diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 00000000..e9169d26 --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,28 @@ +[bumpversion] +current_version = 2.0.0 +commit = False +tag = False +parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\.(?P[a-z]+)(?P\d+))? +serialize = + {major}.{minor}.{patch}.{release}{dev} + {major}.{minor}.{patch} + +[bumpversion:part:release] +optional_value = prod +first_value = dev +values = + dev + prod + +[bumpversion:part:dev] + +[bumpversion:file:./dvp/src/main/python/dlpx/virtualization/VERSION] + +[bumpversion:file:./common/src/main/python/dlpx/virtualization/common/VERSION] + +[bumpversion:file:./platform/src/main/python/dlpx/virtualization/platform/VERSION] + +[bumpversion:file:./libs/src/main/python/dlpx/virtualization/libs/VERSION] + +[bumpversion:file:./tools/src/main/python/dlpx/virtualization/_internal/VERSION] + diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..b7f9d4da --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @ankursarin @fdrozdowski @nhlien93 @crystalplumage diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 00000000..a7625c63 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,92 @@ +name: Pre-commit actions for Delphix Virtualization SDK + +on: [pull_request] + +jobs: + pytest: + name: Test ${{ matrix.package }} on ${{ matrix.os }} using pytest + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 4 + matrix: + python-version: [2.7] + os: [ubuntu-latest, macos-latest] + package: [common, libs, platform, tools] + + steps: + - name: Checkout ${{ matrix.package }} project + uses: actions/checkout@v1 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install ${{ matrix.package }} dependencies + working-directory: ${{ matrix.package }} + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt --find-links https://test.pypi.org/simple/dvp-api/ + + - name: Install ${{ matrix.package }} project + working-directory: ${{ matrix.package }} + run: | + pip install . --find-links https://test.pypi.org/simple/dvp-api/ + + - name: Test ${{ matrix.package }} project with pytest + working-directory: ${{ matrix.package }} + run: | + python -m pytest src/test/python + + lint: + name: Lint ${{ matrix.package }} + + runs-on: ubuntu-latest + strategy: + max-parallel: 4 + matrix: + package: [tools] + + steps: + - name: Checkout ${{ matrix.package }} + uses: actions/checkout@v1 + + - name: Set up Python 2.7 + uses: actions/setup-python@v1 + with: + python-version: 2.7 + + - name: Install flake8 + run: | + python -m pip install --upgrade pip + pip install flake8 + + - name: Run flake8 on src directory + working-directory: ${{ matrix.package }} + run: python -m flake8 src/main/python --max-line-length 88 + + - name: Run flake8 on test directory + working-directory: ${{ matrix.package }} + run: python -m flake8 test/main/python --max-line-length 88 + + #format: + #name: Check format ${{ matrix.package}} + + #runs-on: ubuntu-latest + #strategy: + #max-parallel: 4 + #matrix: + #package: [common, libs, platform, tools] + + #steps: + #- uses: actions/checkout@v1 + + #- name: Check src format + #uses: lgeiger/black-action@v1.0.1 + #with: + #args: "${{ matrix.package }}/src/main/python -t py27 --check" + + #- name: Check test format + #uses: lgeiger/black-action@v1.0.1 + #with: + #args: "${{ matrix.package }}/src/test/python -t py27 --check" diff --git a/.gitignore b/.gitignore index 403bc11b..c73fd091 100644 --- a/.gitignore +++ b/.gitignore @@ -2,9 +2,6 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -# Non-build Gradle files -.gradle/ - # IntelliJ config files .idea/ @@ -38,9 +35,3 @@ venv/ # Python cache __pycache__ -# Generated protobuf files -*_pb2.py - -# Generated python build files -Pipfile -setup.py diff --git a/.hooksconfig b/.hooksconfig index 502fdde1..d4f11e7f 100644 --- a/.hooksconfig +++ b/.hooksconfig @@ -1,12 +1,16 @@ [gate] name = virtualization-sdk shortname = sdk - slack-url = https://hooks.slack.com/services/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 - slack-color = 1AD6F5 + slack-url = https://hooks.slack.com/services/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 + slack-color = 1AD6F5 slack-notify = virt-sdk-pushes approvers = gatekeepers-virtualization-sdk tags-allowed = true [branch "master"] - gate-release-check = skip gate-allowed-issuetypes = 1,3,4,5,10001,10302 + + [branch "projects/plugin-upgrade"] + gate-allowed-issuetypes = 1,3,4,5,10001,10302 + gate-comment-check = on + gate-review-check = on diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..ee661748 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,87 @@ +# Virtualization SDK Contribution Guide + +*First of all, thanks for taking the time to contribute to the virtualization-sdk project!* + +By following these guidelines you can help us make this project even better. + +# Table of Contents +[Getting Started](#getting-started) + +[How to Build the SDK from Source](#how-to-build-the-sdk-from-source) + +[Asking for Help](#asking-for-help) + +[How to Contribute](#how-to-contribute) + + * [How to Raise Pull Requests](#how-to-raise-pull-requests) + * [Code Owners](#code-owners) + +[Testing and CI/CD](#testing-and-ci/cd) + +[Coding Guidelines](#coding-guidelines) + + * [Commit Message Format](#commit-message-format) + + +## Getting Started +The virtualization-sdk is distributed as a Python package called [dvp](https://pypi.org/project/dvp/). Install it in your local development environment so that you can build and upload a plugin. + + +## How to Build the SDK from Source +The virtualization-sdk repository can be built from source on GitHub as described below. + +### Fork the virtualization-sdk Repository + +First step is to fork the virtualization-sdk repository. Please refer to [Forking a GitHub Repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) for instructions. + +### Clone the virtualization-sdk Repository + +Once the virtualization-sdk repository is forked, clone the forked repository into a local copy on your computer. Please refer to [Cloning a Forked Repository](https://help.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository) for instructions. + +`git clone git@github.com:your-username/virtualization-sdk.git` + +### Development + +For development instructions, refer to [README-dev.md](https://github.com/delphix/virtualization-sdk/blob/develop/README-dev.md). + +## Asking for Help +Please raise a GitHub issue to ask for help with appropriate GitHub tag . + +## How to Contribute + +### How to Raise Pull Requests +This repository uses GitHub standard pull request model. Once the changes are made locally and committed to the forked repository and tested, a pull request can be raised using the pull request template for the changes to be reviewed. + +Some guidelines for Pull Requests: + +* All pull requests must be based on the current master branch and apply without conflicts. +* All GitHub Actions checks should succeed. Please refer to [Testing and CI/CD](#testing-and-cicd) for details. +* Please attempt to limit pull requests to a single commit which resolves one specific issue. +* Make sure your commit messages are in the correct format as specified at [Commit Message Format](#commit-message-format) +* When updating a pull request squash multiple commits into one and perform a rebase. You want all your changes to be included in one commit replayed on top of master branch of the virtualization-sdk. +* For large pull requests consider structuring your changes as a stack of logically independent patches which build on each other. This makes large changes easier to review and approve which speeds up the merging process. +* Try to keep pull requests simple. Simple code with comments is much easier to review and approve. +* Test cases should be provided when appropriate. + +Once the pull request has required approvals from code owners of the repository, the code owner will merge the pull request into the actual virtualization-sdk repository. + +### Code Owners +Code owners defined by the codeowners file in the repository are the gatekeepers of the repository. For a pull request to be merged, it requires approval from at least one codeowner. + +## Testing and CI/CD +CI/CD for this repository is managed through GitHub Actions. All the checks need to succeed for the pull request to be merged. + +## Coding Guidelines +### Commit Message Format +Commit messages for new changes must meet the following guidelines: +* Every commit message should have a GitHub issue id that it addresses and its title. +* Every commit message can have an optional Description of the issue. Though it is optional it is highly recommended to provide one to summarize important information about the fix. +* Each line of the description must be 72 characters or less. +* Sample commit message to address issue 123 with title "Format of error is incorrect": + + `Fixes #123 Format of error is incorrect` + + `Optional Description of the issue` +* If the commit doesn't address a specific issue, it should include a description of changes. + + diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index 637b90ff..00000000 --- a/Pipfile.lock +++ /dev/null @@ -1,20 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "ae4bdd7d4157baab65ae9d0e8389a6011e6b640995372c45ec81fa5d1ddfae9f" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "2.7" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": {}, - "develop": {} -} diff --git a/README-dev.md b/README-dev.md new file mode 100644 index 00000000..61c23ce7 --- /dev/null +++ b/README-dev.md @@ -0,0 +1,141 @@ +# Copyright (c) 2019 by Delphix. All rights reserved. + +# Delphix Virtualization SDK + +This README is for SDK developers. If you are a plugin developer please refer to [README.md](README.md). + +The artifact produced by this repository is a set of Python distributions that make up the SDK. + +## Background + +There are two parts of the SDK that are important to think about separately since they have slightly different workflows. + +1. The `tools` package is the SDK's CLI. This aids in plugin development, testing, and distribution. +2. `common`, `libs`, and `platform` contain what are collectively called the "wrappers". These are vanilla Python classes +that abstract the Virtualization API protobuf messages (published by app-gate) away from plugin developers. These expose +the API plugin developers write against. + +All dependencies of a plugin must be packaged with the plugin including the protobuf messages (`dvp-api`) and the wrappers. +This is done automatically by `dvp build`. + +This is what causes the slightly different workflows in development. Changes to `tools` are completely isolated from the +Delphix Engine and wrappers changes only impact the plugin build. + +## Development + +### Development process + +At a very high level, our development process usually looks like this: + +1. Create a fork of the delphix/virtualization-sdk repository. +2. Clone the forked repository. +3. Make changes to SDK code. Test these changes manually and with [unit tests](#unit-testing). Iterate on this until you have everything working. +4. Bump major/minor/patch/build version depending on the scope of the change. Refer to [versioning section](#versioning) for instructions. +5. Commit your changes. Refer to the [contribution guideline](https://github.com/delphix/virtualization-sdk/blob/develop/CONTRIBUTING.md#commit-message-format) + for commit message format. +6. Make sure the version number of the packages is updated appropriately in your commit. That includes all files mentioned +in `.bumpversion.cfg` and the string in `test_get_version()` inside `test_package_util.py`. +7. Push your changes to a branch in the forked repository. +8. Run [blackbox tests](#functional-blackbox-testing) against that branch. +9. Publish a pull request to the delphix/virtualization-sdk once your code is ready for review. +10. Once the pull request is approved, merge the pull request into delphix/virtualization-sdk repository. + +These steps are described in more detail below. + +### Development environment +Development should be done in a personal virtualenv. To setup the virtual environment: + +1. `virtualenv /path/to/env/root`. This should be a Python 2.7 virtualenv. +2. `source ~/path/to/env/root/bin/activate`. + +### Installing the SDK from source +To install the SDK, follow these steps: + +1. Create a file at + `/pip.conf` that contains: + + ``` + [install] + index-url=https://pypi.org/simple/ + extra-index-url=https://test.pypi.org/simple/ + ``` + + One of the SDK dependencies - dvp-api - is currently hosted on [TestPyPi](https://test.pypi.org/project/dvp-api/). + By default `pip` looks at pypi.org for packages to install. In order to successfully install the SDK, you have to + configure pip to search an additional package repository - test.pypi.org. +2. Go into one of the package directories (common, dvp, libs, platform, tools) and run the commands below. +3. Install the package's development dependencies: `pip install -r requirements.txt`. +4. Install the package itself (use `-e` flag if you want to install the package in editable mode): `pip install .`. + + +### CLI changes + +To better understand how to develop and test `tools` changes, see [tools/README-dev.md](https://github.com/delphix/virtualization-sdk/blob/develop/tools/README-dev.md). + +## Versioning + +The SDK is shipped as five Python packages that are currently versioned and shipped together: dvp, dvp-common, dvp-libs, +dvp-platform, and dvp-tools. + +The first thing to do is to change the version number of all the packages. Our versioning scheme follows the rules of +semantic versioning in order to help developers manage their "dependency hell". We use [bump2version](https://github.com/c4urself/bump2version) +to make the version management of all five packages easier. Semantic versioning rules are the following: + +``` +Given a version number MAJOR.MINOR.PATCH, increment the: + + MAJOR version when you make incompatible API changes, + MINOR version when you add functionality in a backwards compatible manner, and + PATCH version when you make backwards compatible bug fixes. +``` +Source: https://semver.org/ + +The version format is MAJOR.MINOR.PATCH for released versions and ...dev for development builds. +For more details see `.bumpversion.cfg` in the root of this repository. + +If you want to bump the build number from `1.1.0.dev7` to `1.1.0.dev8`, run `bumpversion dev`. + +If you want to bump the major/minor/patch version, run `bumpversion [major|minor|patch]`. + +If you want to get rid of the dev label (bump from `1.1.0.dev7` to `1.1.0`), run `bumpversion release`. + +## Testing + +Currently, there are three types of SDK testing: unit, manual, and functional (blackbox). + +### Unit Testing + +Go into one of the package directories (common, dvp, libs, platform, tools) and follow these steps: + +1. Install the package's development dependencies and package itself by following the directions in [the SDK installation section](#installing-the-sdk-from-source). +2. Run unit tests: `python -m pytest src/test/python`. + +There's no way to locally run unit tests in all packages with one command. However, they will be run automatically +through GitHub Actions when you open a pull request. You can always open a draft pull request + +### Manual testing + +#### Wrappers: dvp, common, platform, libs +The only way to manually test the new wrappers code is to build a plugin, upload it to a Delphix Engine and run through +all the standard workflows. The same workflows will be exercised by functional (blackbox) tests. + +### Functional (blackbox) testing +To run blackbox tests, follow these steps: +1. Push your code to a branch in the forked repository on Github. Let's say the branch is called `my-feature` in repository called `/virtualization-sdk`. +2. Navigate to the app-gate directory and start tests using `git blackbox`. For the guide on which test suite to use, +see the next sections. + +At a minimum, each pull request should pass `appdata_python_samples` and `appdata_sanity` tests with a direct or staged plugin. +See the section below for the description of each test suite. + +#### Blackbox tests targeting wrappers (mostly Delphix Engine workflows) +* appdata_python_samples (sample plugins from the app-gate): +`git blackbox -s appdata_python_samples --extra-params="-p virt-sdk-repo=https://github.com//virtualization-sdk.git -p virt-sdk-branch=my-feature"`, +* appdata_sanity with a direct Python plugin on CentOS 7.3: `git blackbox -s appdata_sanity -c APPDATA_PYTHON_DIRECT_CENTOS73 -a --extra-params="-p virt-sdk-repo=https://github.com//virtualization-sdk.git -p virt-sdk-branch=my-feature"`, +* appdata_sanity with a staged Python plugin on CentOS 7.3: `git blackbox -s appdata_sanity -c APPDATA_PYTHON_STAGED_CENTOS73 -a --extra-params="-p virt-sdk-repo=https://github.com//virtualization-sdk.git -p virt-sdk-branch=my-feature"`. + +#### Blackbox tests targeting the CLI (~80% CLI tests) +* virtualization_sdk (installs and tests a direct Python plugin on Ubuntu 18): +`git blackbox -s virtualization_sdk -c APPDATA_SDK_UBUNTU18_DIRECT_CENTOS73 --extra-params="-p virt-sdk-repo=https://github.com//virtualization-sdk.git -p virt-sdk-branch=my-feature"`, +* virtualization_sdk (installs and tests a staged Python plugin on Ubuntu 18): +`git blackbox -s virtualization_sdk -c APPDATA_SDK_UBUNTU18_STAGED_CENTOS73 --extra-params="-p virt-sdk-repo=https://github.com//virtualization-sdk.git -p virt-sdk-branch=my-feature"`. \ No newline at end of file diff --git a/README.md b/README.md index 557e852a..9299bf8b 100644 --- a/README.md +++ b/README.md @@ -1,203 +1,53 @@ -# Copyright (c) 2019 by Delphix. All rights reserved. - # Delphix Virtualization SDK This repository contains the Virtualization SDK for building custom data source integrations for the Delphix Dynamic Data Platform. -# Development process - -At a very high level, our development process usually looks like this: -1. Make local changes to SDK and appgate code. Test these changes locally. Iterate on this until you have everything working. -2. Publish a review for SDK code, and also publish a "provisional" review of appgate code. Address any feedback. -3. Push the SDK code and publish new SDK builds to our internal servers. -4. Modify the appgate code to use the newly-published SDK build from artifactory. -5. Finalize your appgate review. -6. Push the appgate changes. +The latest user documentation can be found [here](https://developer.delphix.com). -These steps are described in more detail below. +## Getting Started -## Local Development +### Prerequisites -To do local development, we need to generate an SDK build locally. We also need to get appgate code to use that local build, rather than an official build from artifactory. +- macOS 10.14+, Ubuntu 16.04+, or Windows 10 +- Python 2.7 (Python 3 is not supported) +- Java 7+ +- Delphix Engine 5.3.5.0 or above -### Making a Local SDK build +### Installing -There are two separate components in play here -- a JAR that is used by our appgate code, and a Python distribution that is used by end users and by our blackbox tests. There are separate build procedures for each of these. +To install the latest version of the SDK run: -#### Versioning -The first thing to do is to change the version number. In the root of the SDK codebase, open the `build.gradle` file, and change the version. For almost all cases, this will simply involve incrementing the "build number", which is the three-digit number at the very end of the version. - -#### Building the JAR -In order to build an SDK runtime JAR, navigate to the root directory of the repository, and execute: ``` -./gradlew jar +$ pip install dvp ``` -This will produce the jar under `build/libs`. - -Note that the build not yet do a lot of error checking. So, it is sometimes not immediately apparent if the build did not succeed. Check the timestamp on the `sdk-.jar` file to make sure it's been newly-built. If it's not up to date, you might need to modify the script `bin/build.sh` to produce more output, rerun the jar task, and look at the failure messages. -#### Building the Python distribution +To install a specific version of the SDK run: -To build the Python source distribution, navigate to the root directory of the repository and type: ``` -./gradlew sdist -``` - -The results of this build will be stored in the various `*/build/python-dist` directories. (One each for `common`, `platform`, etc.) - -### Testing a Local SDK build - -There are three levels of testing: -- Unit Testing -- Testing with appgate code -- Testing with qa-gate code - -#### Unit Testing - -Running `./gradlew test` from the top level of the repository will run all SDK unit tests. Smaller sets of tests can be run from inside each directory (`common`, `platform`, etc.) by going into that directory and running `../gradlew test`. - -#### Testing With Appgate Code - -Usually your SDK changes will require corresponding appgate changes. You can test this by importing a "local build" of the SDK JAR into your appgate code, as described below. Once that is done, you can test as you normally would test appgate code: run unit tests, manually test on a VM, run blackbox tests, whatever. - -#### Testing With Qa-Gate Code - -Unfortunately, there is currently no way to point blackbox tests at a local SDK. You must always publish your Python distribution to our Pypi server (details below) before you can run blackbox tests against it. - -### Using a Local SDK Build With Appgate Code - -We need to put the local SDK build somewhere that the appgate code can access it, and we need to actually tell the appgate code to use it. - -#### Making Local SDK Build Available - -This step is easy. Simply copy `build/libs/sdk-.jar` to `appliance/lib` in the `app-gate`. - -Don't forget to check this change into git if you plan on using `git dx-test` or `git appliance-deploy` to test. (Note: you will **not** be pushing this! We'll undo this change later.) - - -#### Using Local SDK Build (For Intellij and Delphix Engine use) - -Delphix Engine and IntelliJ both use gradle to build. So, we have to ensure that the gradle build knows how to find and use our local SDK build. This is a two-step process. - - -1. We need to tell gradle to look for jars in the `lib` directory. In order to do that, we will have to add the following code to `appliance/gradle-lib/java-common.gradle`: - - ``` - flatDir { - dirs "${gradleLibDir}/../lib/" - } - ``` - - The above entry has to be added in the list of external repositories. Here is a more complete listing. - - ``` - /* - * External repositories we fetch jars from. This should never include a repository - * that is not managed by Delphix. Third party repos should be mirrored through - * http://artifactory.delphix.com/. - */ - repositories { - /* - * Legacy location for jars that were checked directly into the app-gate. - */ - ivy { - ivyPattern "${gradleLibDir}/../lib/[module]/ivy-[revision].xml" - artifactPattern "${gradleLibDir}/../lib/[module]/[artifact]-[revision].[ext]" - } - ... - ... - flatDir { - dirs "${gradleLibDir}/../lib/" - } - } +$ pip install dvp== ``` -2. We have to tell gradle to actually use our local SDK where applicable. We have two modules that need to see the SDK: `appdata` and `workflow`. So, we have to edit both `appliance/server/appdata/build.gradle` and `appliance/server/workflow/build.gradle`). +To upgrade an existing installation of the SDK run: - We need to add the following line: - ``` - compile name: "sdk-" - ``` - - We also need to remove (or comment out) this line so that gradle will not try to use an artifactory build: - ``` - implementation group: "com.delphix.virtualization", name: "sdk", version: virtualizationSdkVer - ``` - -Once you complete the above two steps, IntelliJ should notice the changes to your build files and rebuild the project. If you don't have the auto-rebuild option turned on, refresh the gradle build. - - -#### Using Local SDK Build (For Eclipse use) - -Eclipse does not use gradle to build, so you have to follow special steps if you're using Eclipse. - -Comment out/remove the following line from `ivy-eclipse-deps.xml`: -``` - ``` - -Add the following entry to `dlpx-app-gate/.classpath`: -``` - +$ pip install dvp --upgrade ``` -## SDK Review and Provisional Appgate review - -Once you're finished with local development and testing, you can publish your final SDK review to reviewboard. - -In addition, it's customary to publish a "provisional" appgate review, so that people can get insight into how the out-for-review SDK changes will actually be used by the appgate. Of course, this review will contain all your temporary local-build changes mentioned above. So, in your review, you'll want to mention that these temporary changes will be reverted before the review is finalized. - -## Pushing and Deploying SDK Code - - -### Publishing - -There are two Gradle tasks that do publishing: `publishDebug` and `publishProd`. They differ in two ways: - -1. They publish the Python distributions to separate repositories on Artifactory (the jar is always published to the same one.). `publishDebug` uploads to `dvp-local-pypi`. This is a special repository that has been setup to test the SDK. It falls back our our production PyPI repository, but artifacts uploaded to `dvp-local-pypi` do not impact production artifacts. This should be used for testing. `publishProd` does upload the Python distributions to our production Artifactory PyPI repository, `delphix-local`. - -2. `publishProd` runs tests, formatting, and linting while `publishDebug` does not. - -NOTE: The external release to `pypi.org` is done outside of the build system. - -#### Setup - -1. There are three environment variables that need to be set in order to publish: `ARTIFACTORY_PYPI_USER`, `ARTIFACTORY_PYPI_PASS`, and `ARTIFACTORY_JAR_KEY`. - - `ARTIFACTORY_PYPI_USER` and `ARTIFACTORY_PYPI_PASS` are one set of credentials used to upload the Python distributions to our internal PyPI repositories. The credentials are the same for both internal PyPI repositories mentioned above. - `ARTIFACTORY_JAR_KEY` - - - `ARTIFACTORY_PYPI_USER` and `ARTIFACTORY_PYPI_PASS` is the username/password combo given to you by whoever setup your Artifactory pypi account. If you do not have one, please reach out to the `#appdata-core` channel. These used to upload the Python distributions to our internal PyPI repositories. The credentials are the same for both internal PyPI repositories mentioned above. - - `ARTIFACTORY_JAR_KEY` is your Artifactory API key and is used to upload the jar. It can be retreived from http://artifactory.delphix.com/artifactory/webapp/#/profile. You may have to login. This is different from the PyPI credentials because the artifacts are being uploaded to different repositories on Artifactory. - -2. `twine` needs to be installed. This is a Python package that is used to upload Python distributions. If it's not installed, install it by running `pip install twine`. - - -#### Debug Publishing - -Run `./gradlew publishDebug`. This will build the jar, every Python distribution, and upload them to Artifactory with the Python distributions going to our testing repository, `dvp-local-pypi`. - -You can install `dvp` from this repository with the command `pip install -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-local-pypi/simple dvp==`. +## Reporting a bug -#### Final Publishing +If you run into a problem, please search the [existing issues](https://github.com/delphix/virtualization-sdk/issues) first to ensure the issue hasn't been reported before. Open a new issue only if you haven't found anything similar to your issue. -Once you are absolutely certain all changes have been made run `./gradlew publishProd`. This will run checks, build the jar, create the Python distributions, and upload all of them to Artifactory with the Python distributions going to `delphix-local`. +If the issue is not being tracked, please file an issue using the `Bug Report` issue template [here](https://github.com/delphix/virtualization-sdk/issues/new/choose). -## Using Newly-Deployed SDK Build +## Requesting a feature -Now, we have to go back to our `appgate` code and make it point to the newly-deployed build on artifactory, instead of the local build we used to test. +To request a feature, file a GitHub issue on this repository using the `Feature Request` issue template [here](https://github.com/delphix/virtualization-sdk/issues/new/choose). -First, undo the temporary changes we made earlier: -1. Delete `appliance/lib/sdk-.jar`. Make sure you delete from git as well as just removing from the filesystem. -2. Undo your changes to `appliance/gradle-lib/java-common.gradle` so that the gradle build doesn't look for local jars. -3. Undo your changes to `appliance/server/appdata/build.gradle` and `appliance/server/workflow/build.gradle`. -4. If you are an Eclipse user, undo your changes to `ivy-eclipse-deps.xml` and `dlpx-app-gate/.classpath`. +## Contributing -Next, we need to point to our newly-deployed version: -1. Modify `appliance/gradle.properties` and change `virtualizationSdkVer` to refer to your new version number. -2. Modify `ivy-eclipse-deps.xml` and change the `com.delphix.virtualization` line to refer to your new version number. +Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on the process for submitting pull requests to us. -## Finalizing Appgate Review +## License -Once you've got the above changes completed, tested, and checked into git, you can update your appgate review. Now, your review will be ready for final ship-its. +This project is licensed under the Apache 2.0 License - see the [LICENSE](LICENSE) file for details. \ No newline at end of file diff --git a/bin/build.sh b/bin/build.sh deleted file mode 100755 index 6caccd45..00000000 --- a/bin/build.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/bash -# -# Copyright (c) 2018, 2019 by Delphix. All rights reserved. -# - -SCRIPT_DIR=`dirname $0` -source ${SCRIPT_DIR}/common.sh - -# This script must be executed from the root directory of the virtualization-sdk repo. -ROOT=`git rev-parse --show-toplevel` -cd $ROOT - -mkdir -p build/libs -cd build/libs - -echo "Copying Virtualization SDK binaries from NAS..." -scp -r delphix@support-tools:/nas/engineering/fdrozdowski/virtualization-sdk/bin . - -echo "Preparing Virtualization SDK jar directory..." -JAR_DIRECTORY=virtualization-sdk-jar -mkdir -p ${JAR_DIRECTORY} - -cp bin/six.py ${JAR_DIRECTORY} -cp -r bin/google ${JAR_DIRECTORY} -cp -r bin/enum ${JAR_DIRECTORY} -cp bin/typing.py ${JAR_DIRECTORY} - -mkdir -p ${JAR_DIRECTORY}/dlpx/virtualization/ -touch ${JAR_DIRECTORY}/dlpx/__init__.py -touch ${JAR_DIRECTORY}/dlpx/virtualization/__init__.py - -cp ${ROOT}/common/src/main/proto/dlpx/virtualization/common.proto ${JAR_DIRECTORY}/dlpx/virtualization/common.proto -cp ${ROOT}/platform/src/main/proto/dlpx/virtualization/platform.proto ${JAR_DIRECTORY}/dlpx/virtualization/platform.proto -cp ${ROOT}/libs/src/main/proto/dlpx/virtualization/libs.proto ${JAR_DIRECTORY}/dlpx/virtualization/libs.proto - -cd ${JAR_DIRECTORY} -cp -r ./../bin . - -echo "Compiling protobuf definitions to Java and Python classes..." -bin/protoc-3.6.1-osx-x86_64/bin/protoc -I=. --java_out=. --python_out=. dlpx/virtualization/common.proto dlpx/virtualization/platform.proto dlpx/virtualization/libs.proto - -echo "Copying virtualization-sdk modules to the jar directory..." -rsync -av --progress ${ROOT}/common/src/main/python/dlpx/ dlpx/ > /dev/null -rsync -av --progress ${ROOT}/platform/src/main/python/dlpx/ dlpx/ > /dev/null -rsync -av --progress ${ROOT}/libs/src/main/python/dlpx/ dlpx/ > /dev/null - -echo "Pre-compiling the Python Virtualization Platform protobuf module..." - -# The command below assumes that there's "python" on the PATH and it resolves to Python 2.7 (CPython). -# We compile dlpx/virtualization modules to Jython classes and prepend each file name with dlpx/virtualization. -java -jar bin/jython-standalone-2.7.1.jar -Dcpython_cmd=python -m compileall -f -d dlpx/virtualization dlpx/virtualization -java -jar bin/jython-standalone-2.7.1.jar -Dcpython_cmd=python -m compileall -l -f -d dlpx/virtualization six.py -java -jar bin/jython-standalone-2.7.1.jar -Dcpython_cmd=python -m compileall -l -f -d dlpx/virtualization typing.py - -echo "Compiling Java source files to Java classes..." -javac -d . -classpath bin/protobuf-java-3.6.1.jar com/delphix/virtualization/common/*java com/delphix/virtualization/platform/*java com/delphix/virtualization/libs/*java > /dev/null - -rm -r bin - -VERSION=`cat "${ROOT}/build.gradle" | grep '^\s*version\s*=\s*"*"'| sed -E 's/.*"(.*)".*/\1/g'` -[ -z "$VERSION" ] && die "Failed to retrieve SDK version from build.gradle." - -echo "Creating a Virtualization SDK jar..." -JAR_FILE_NAME="sdk-$VERSION.jar" -jar cvf ${JAR_FILE_NAME} . > /dev/null -mv ${JAR_FILE_NAME} ./.. - -exit 0 diff --git a/bin/check_version_bump.sh b/bin/check_version_bump.sh index 37f4ecf2..080cab21 100755 --- a/bin/check_version_bump.sh +++ b/bin/check_version_bump.sh @@ -13,15 +13,32 @@ CWD=`pwd` source ${SCRIPT_DIR}/common.sh -cd ${ROOT} - # Diff 'build.gradle' with origin/master. This works since we don't have any backport branches. Grep that diff # for a line that starts with a '+' that signifies an addition in git. The line should then have 'version =' followed # by a something in quotes that starts with x.y.z where x, y, and z are digits. Anything is allowed to follow that # until the quotes end. -VERSION=`git diff origin/master -- build.gradle | grep '^\+\s*version\s*=\s*"[0-9]\.[0-9]\.[0-9].*"'` -[[ -z ${VERSION} ]] && die "The SDK version has not been increased. Please increment the version in /build.gradle." +GRADLE_VERSION_REGEX=".*([0-9]\.[0-9]\.[0-9].*)\"" +GRADLE_VERSION_DIFF=`git diff origin/master -- $ROOT/build.gradle | grep '^\+\s*version\s*=\s*"[0-9]\.[0-9]\.[0-9].*"'` + +# Extract the exact version string from the diff. +if [[ $GRADLE_VERSION_DIFF =~ $GRADLE_VERSION_REGEX ]]; then + GRADLE_VERSION=${BASH_REMATCH[1]} +fi + +[[ -z ${GRADLE_VERSION} ]] && die "The SDK version has not been increased. Please increment the version in /build.gradle." + +# Unfortunately there are currently two places to specify the version. Validate +# that the version in tools has been changed. +TOOLS_VERSION_REGEX=".*([0-9]\.[0-9]\.[0-9].*)" +TOOLS_VERSION_DIFF=`git diff origin/master -- $ROOT/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg | grep '^\+\s*package_version\s*=\s*[0-9]\.[0-9]\.[0-9].*'` + +if [[ $TOOLS_VERSION_DIFF =~ $TOOLS_VERSION_REGEX ]]; then + TOOLS_VERSION=${BASH_REMATCH[1]} +fi + +[[ -z $TOOLS_VERSION ]] && die "The SDK version has been increased in /tools/src/main/python/dlpx/virtualization/_internal/settings.cfg. Please increment the version there as well. These versions must match." -cd ${CWD} +# Validate that the two versions are the same. +[[ $GRADLE_VERSION != $TOOLS_VERSION ]] && echo "The version in /build.gradle ($GRADLE_VERSION) does not match the version in /tools/src/main/python/dlpx/virtualization/_internal/settings.cfg ($TOOLS_VERSION). These versions must match." exit 0 diff --git a/bin/split_v1.0.0_jar.sh b/bin/split_v1.0.0_jar.sh new file mode 100755 index 00000000..c0e8a10c --- /dev/null +++ b/bin/split_v1.0.0_jar.sh @@ -0,0 +1,76 @@ +#!/bin/bash +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# This script downloads the original 1.0.0 jar and splits it into two jars. +# The first contains only Java protobuf classes. The second jar contains +# Python protobuf classes and common, libs, and platform. +# Both of these are consumed by the Delphix Engine. +# +# This script should only need to be ran once ever to generate these jars. In order +# to support multiple version of the Virtualization API, we are now going to ship +# the wrappers with the plugin itself instead of with the Delphix Engine. +# +# However, in order to be backwards compatible with SDK version 1.0.0, we must +# continue shipping the 1.0.0 wrappers with the Delphix Engine. The Delphix Engine +# will continue to need the Java protobuf classes of the latest version, but will +# no longer need the Python classes nor common, libs, and platform hence why this +# script will only need to be ran once. +# +# This script exists primarily to document how the jars were built in case there is +# an issue and we need to reproduce it. +# + +git rev-parse --is-inside-work-tree || exit 1 +ROOT=`git rev-parse --show-toplevel` + +LIBS_DIR=${ROOT}/build/libs +BUILD_DIR=${ROOT}/build/split-1.0.0/ +EXTRACTED_DIR=${BUILD_DIR}/extracted +JAVA_DIR=${BUILD_DIR}/java +PYTHON_DIR=${BUILD_DIR}/python + +# Start with a clean directory +rm -rf ${BUILD_DIR} + +mkdir ${BUILD_DIR} +mkdir ${LIBS_DIR} +mkdir ${EXTRACTED_DIR} +mkdir ${JAVA_DIR} +mkdir ${PYTHON_DIR} + +# Download the original 1.0.0 jar that contains both Java and Python files. +CURRENT_JAR_NAME="sdk-1.0.0.jar" +JAR_FILE=${BUILD_DIR}/${CURRENT_JAR_NAME} +wget -P ${BUILD_DIR} http://artifactory.delphix.com/artifactory/virtualization-sdk/com/delphix/virtualization/sdk/1.0.0/${CURRENT_JAR_NAME} + +# Extract the com/ directory into the Java directory. These are all the Java protobuf files. +pushd ${JAVA_DIR} +jar xvf ${JAR_FILE} com/ + +# Create a new jar that contains com/*. +jar cvf ${LIBS_DIR}/api-java-1.0.0.jar . + +# Extract all the Python files into their own directory. +pushd ${PYTHON_DIR} +jar xvf ${JAR_FILE} dlpx google enum six.py typing.py + +# Compile google and enum files +python -m compileall -f -d dlpx dlpx/ +python -m compileall -f -d google google/ +python -m compileall -f -d enum enum/ +python -m compileall -f -d six six.py +python -m compileall -f -d typing typing.py + +# +# Zip the files and base64 encode the zip. This is odd, but since we already import the +# plugin from a base64 encoded zip this makes it easy to import the wrappers as well. +# +zip -r virtualization-sdk-wrappers-1.0.0.zip `find . -name "*.py" -o -name "*.pyc"` +base64 --input virtualization-sdk-wrappers-1.0.0.zip > virtualization-sdk-wrappers-v1.0.0.txt + +# Create a jar with just the text file containing the base64 encoded zip. +jar cvf ${LIBS_DIR}/wrappers-python-1.0.0.jar virtualization-sdk-wrappers-v1.0.0.txt + +popd +popd diff --git a/bin/upload.sh b/bin/upload.sh index 7fbaf3f9..610214c6 100755 --- a/bin/upload.sh +++ b/bin/upload.sh @@ -3,24 +3,25 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -# This script uploads the SDK jar as well as all Python distributions created by the SDK. It assumes that all artifacts +# This script uploads all Python distributions created by the SDK. It assumes that all artifacts # already exist. This is not intended to be a long term solution. Instead, this is a dirty way to abstract away # some of this logic. It should instead be rolled directly into the Gradle build and our future CI pipeline. # # This script can upload to both our internal dev and prod PyPI repositories. It defaults to dev. -SCRIPT_DIR=`dirname $0` -source ${SCRIPT_DIR}/common.sh +SCRIPT_DIR=$(dirname "$0") +# shellcheck source=./common.sh +source "${SCRIPT_DIR}"/common.sh USAGE="Usage: upload.sh [--prod]" # Validate usage is correct and expected environment variables are set. if [[ $# -gt 1 ]]; then - die $USAGE + die "$USAGE" elif [[ $# -eq 1 && $1 != "--prod" ]]; then - die $USAGE -elif [[ -z ${ARTIFACTORY_PYPI_USER} || -z ${ARTIFACTORY_PYPI_PASS} || -z ${ARTIFACTORY_JAR_KEY} ]]; then - die "ARTIFACTORY_PYPI_USER, ARTIFACTORY_PYPI_PASS, and/or ARTIFACTORY_JAR_KEY environment variables are not set. Set them or pass them in as arguments to upload.sh." + die "$USAGE" +elif [[ -z ${ARTIFACTORY_PYPI_USER} || -z ${ARTIFACTORY_PYPI_PASS} ]]; then + die "ARTIFACTORY_PYPI_USER and/or ARTIFACTORY_PYPI_PASS environment variables are not set. Set them or pass them in as arguments to upload.sh." fi # dvp-local-pypi is used for testing and is the default. delphix-local is our internal production PyPI repository and @@ -32,42 +33,34 @@ else fi # Check early that 'twine' is on the path. -command -v twine 2>&1 >/dev/null || die "'twine' is either not install or not on PATH. To install 'twine' run 'pip install twine'" +command -v twine >/dev/null 2>&1 || die "'twine' is either not install or not on PATH. To install 'twine' run 'pip install twine'" # All the file paths need to be relative to the root of the git repo -ROOT=`git rev-parse --show-toplevel` +ROOT=$(git rev-parse --show-toplevel) # Get the SDK version from build.gradle in the root of the SDK. This essentially just looks for a line that has # 'version =' and pulls the value from the quotes after it. Nothing too sophisticated and fairly error prone. -VERSION=`cat "${ROOT}/build.gradle" | grep '^\s*version\s*=\s*"*"'| sed -E 's/.*"(.*)".*/\1/g'` +VERSION=$(grep '^current_version\s*=\s*' "${ROOT}/.bumpversion.cfg" | sed -E 's/.*=[[:space:]](.*)/\1/g') [ -z "$VERSION" ] && die "Failed to retrieve SDK version from build.gradle." -echo "Uploading custom build jar..." -RESPONSE=`curl --silent --write-out "%{http_code}" -H "X-JFrog-Art-Api: ${ARTIFACTORY_JAR_KEY}" -T "${ROOT}/build/libs/sdk-${VERSION}.jar" "http://artifactory.delphix.com/artifactory/virtualization-sdk/com/delphix/virtualization/sdk/${VERSION}/sdk-${VERSION}.jar"` - -# The above 'curl' command writes out "${http_code}" so the last three characters of the output will be the HTTP -# response code. If that response code is not "201", it is a failure so die and then print the response. ${REPONSE%????} -# prints $REPONSE without the last 4 characters which are the HTTP exit code and an 'n'. -[ ${RESPONSE: -3} -ne "201" ] && die "Failed to upload ${ROOT}/build/libs/sdk-${VERSION}.jar to artifactory:\n" ${RESPONSE%????} - echo "Uploading 'common' Python distribution..." -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/common/build/python-dist/*${VERSION}.tar.gz" > /dev/null -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/common/build/python-dist/*${VERSION//-/_}*.whl" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/common/dist/*${VERSION}.tar.gz" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/common/dist/*${VERSION//-/_}*.whl" > /dev/null echo "Uploading 'platform' Python distribution..." -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/platform/build/python-dist/*${VERSION}.tar.gz" > /dev/null -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/platform/build/python-dist/*${VERSION//-/_}*.whl" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/platform/dist/*${VERSION}.tar.gz" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/platform/dist/*${VERSION//-/_}*.whl" > /dev/null echo "Uploading 'libs' Python distribution..." -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/libs/build/python-dist/*${VERSION}.tar.gz" > /dev/null -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/libs/build/python-dist/*${VERSION//-/_}*.whl" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/libs/dist/*${VERSION}.tar.gz" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/libs/dist/*${VERSION//-/_}*.whl" > /dev/null echo "Uploading 'tools' Python distribution..." -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/tools/build/python-dist/*${VERSION}.tar.gz" > /dev/null -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/tools/build/python-dist/*${VERSION//-/_}*.whl" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/tools/dist/*${VERSION}.tar.gz" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/tools/dist/*${VERSION//-/_}*.whl" > /dev/null echo "Uploading 'dvp' Python distribution..." -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/dvp/build/python-dist/*${VERSION}.tar.gz" > /dev/null -twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/dvp/build/python-dist/*${VERSION//-/_}*.whl" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/dvp/dist/*${VERSION}.tar.gz" > /dev/null +twine upload --repository-url ${REPO} -u "${ARTIFACTORY_PYPI_USER}" -p "${ARTIFACTORY_PYPI_PASS}" "${ROOT}/dvp/dist/*${VERSION//-/_}*.whl" > /dev/null exit 0 diff --git a/bin/upload_split_v1.0.0_jar.sh b/bin/upload_split_v1.0.0_jar.sh new file mode 100755 index 00000000..ef69fc99 --- /dev/null +++ b/bin/upload_split_v1.0.0_jar.sh @@ -0,0 +1,35 @@ +#!/bin/sh +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# This script uploads the two jars produced by split_v1.0.0_jar.sh to artifactory. +# + +git rev-parse --is-inside-work-tree || exit 1 +ROOT=`git rev-parse --show-toplevel` +LIBS_DIR=${ROOT}/build/libs + +# +# These urls and names determine how the artifacts are specified with Gradle. Here is how they look in Gradle: +# implementation group: "com.delphix.virtualization.platform", name:"api-java", version:"1.0.0" +# implementation group: "com.delphix.virtualization.sdk", name:"wrappers-python", version:"1.0.0" +# +# These are temporary. The Java jar will likely be pulled in directly during the Gradle build since the protobuf +# messages will live in the app gate. The Python jar will be pulled in until we deprecate support for v1.0.0. +# +ARTIFACTORY_API_URL=http://artifactory.delphix.com/artifactory/virtualization-sdk/com/delphix/virtualization/platform/api-java/1.0.0 +ARTIFACTORY_WRAPPERS_URL=http://artifactory.delphix.com/artifactory/virtualization-sdk/com/delphix/virtualization/sdk/wrappers-python/1.0.0 + +API_JAR_NAME=api-java-1.0.0.jar +WRAPPERS_JAR_NAME=wrappers-python-1.0.0.jar + +API_JAR_PATH=${LIBS_DIR}/${API_JAR_NAME} +WRAPPERS_JAR_PATH=${LIBS_DIR}/${WRAPPERS_JAR_NAME} + +[ ! -f ${API_JAR_PATH} ] && echo "${API_JAR_PATH} does not exist. Run ${ROOT}/bin/split_v1.0.0_jar.sh to create it and then run this script again." && exit 1 +[ ! -f ${WRAPPERS_JAR_PATH} ] && echo "${WRAPPERS_JAR_PATH} does not exist. Run ${ROOT}/bin/split_v1.0.0_jar.sh to create it and then run this script again." && exit 1 + +[ -z ${ARTIFACTORY_JAR_KEY} ] && echo "The environment variable 'ARTIFACTORY_JAR_KEY' is not set. Set it and run this script again." && exit 1 + +curl -H "X-JFrog-Art-Api: ${ARTIFACTORY_JAR_KEY}" -T ${API_JAR_PATH} ${ARTIFACTORY_API_URL}/${API_JAR_NAME} +curl -H "X-JFrog-Art-Api: ${ARTIFACTORY_JAR_KEY}" -T ${WRAPPERS_JAR_PATH} ${ARTIFACTORY_WRAPPERS_URL}/${WRAPPERS_JAR_NAME} diff --git a/build.gradle b/build.gradle deleted file mode 100644 index 46cb5fa1..00000000 --- a/build.gradle +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -plugins { - id "com.google.protobuf" version "0.8.7" apply false - id "delphix.python" version "0.0.7" apply false -} - -subprojects { - version = "1.0.0" -} - -def binDir = "${rootProject.projectDir}/bin" - -/* - * The commands below are all temporary commands to help abstract the ad-hoc release process we have today. They - * are only intended to help abstract away this logic until we roll these into the actual build system. - */ - -/* - * The jar that is shipped with the Delphix Engine to support the Virtualization Platform is created by a shell script. - * This task wraps that shell script. - */ -task jar(type: Exec) { - commandLine "${binDir}/build.sh" -} - -/* - * This task wraps a shell script that checks if the version of the SDK has been bumped. Bumping the SDK version is - * currently a manual step that is easy to forget. - */ -task checkVersionBump(type: Exec) { - commandLine "${binDir}/check_version_bump.sh" -} - -// 'check' is ran as part of the pre-checkin checklist. It should validate that the version has been bumped. -task check { - dependsOn 'checkVersionBump' -} - -task buildPython { - dependsOn ':common:sdist' - dependsOn ':common:wheel' - dependsOn ':platform:sdist' - dependsOn ':platform:wheel' - dependsOn ':libs:sdist' - dependsOn ':libs:wheel' - dependsOn ':tools:sdist' - dependsOn ':tools:wheel' - dependsOn ':dvp:sdist' - dependsOn ':dvp:wheel' -} - -task build { - dependsOn 'buildPython' -} - -/* - * This task publishes the final version of the jar and all Python distributions to our internal production PyPI - * repository. This should be executed immediately before a change is pushed. - * - * NOTE: This DOES NOT publish to pypi.org. The external release process is done outside of the build system. - */ -task publishProd(type: Exec) { - dependsOn 'check' - dependsOn 'build' - - executable "${binDir}/upload.sh" - args "--prod" -} - -/* - * This task is similar to the above, but instead of publishing to the internal production PyPI repository, it publishes - * to the internal dvp development repository. - * - * This also does not run the full 'check' command. However, it does validate that the version has been bumped. - */ -task publishDebug(type: Exec) { - dependsOn 'checkVersionBump' - dependsOn 'jar' - dependsOn 'buildPython' - - executable "${binDir}/upload.sh" -} diff --git a/common/.gitignore b/common/.gitignore deleted file mode 100644 index 74641651..00000000 --- a/common/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -# Ignore generated Java files -src/main/java/ diff --git a/common/MANIFEST.in b/common/MANIFEST.in index eb9b6989..c39fba12 100644 --- a/common/MANIFEST.in +++ b/common/MANIFEST.in @@ -2,4 +2,5 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -include LICENSE \ No newline at end of file +include LICENSE +include src/main/python/dlpx/virtualization/common/VERSION diff --git a/common/Pipfile.lock b/common/Pipfile.lock deleted file mode 100644 index 2c3630c6..00000000 --- a/common/Pipfile.lock +++ /dev/null @@ -1,132 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "6ad18e02013aa5f2d09b0aa3e376942da5f67ff5f0fb8b79b867bf032d066de5" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "name": "delphix", - "url": "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/", - "verifySsl": true - } - ] - }, - "default": { - "protobuf": { - "hashes": [ - "sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4", - "sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811", - "sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444", - "sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96", - "sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2", - "sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef", - "sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e", - "sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995", - "sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed", - "sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9", - "sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90", - "sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19", - "sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625", - "sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9", - "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", - "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" - ], - "version": "==3.6.1" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - } - }, - "develop": { - "atomicwrites": { - "hashes": [ - "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", - "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" - ], - "version": "==1.3.0" - }, - "attrs": { - "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" - ], - "version": "==19.1.0" - }, - "funcsigs": { - "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - ], - "markers": "python_version < '3.0'", - "version": "==1.0.2" - }, - "more-itertools": { - "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" - ], - "markers": "python_version <= '2.7'", - "version": "==5.0.0" - }, - "pathlib2": { - "hashes": [ - "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", - "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7" - ], - "markers": "python_version < '3.6'", - "version": "==2.3.3" - }, - "pluggy": { - "hashes": [ - "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", - "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746" - ], - "version": "==0.9.0" - }, - "py": { - "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" - ], - "version": "==1.8.0" - }, - "pytest": { - "hashes": [ - "sha256:592eaa2c33fae68c7d75aacf042efc9f77b27c08a6224a4f59beab8d9a420523", - "sha256:ad3ad5c450284819ecde191a654c09b0ec72257a2c711b9633d677c71c9850c4" - ], - "version": "==4.3.1" - }, - "scandir": { - "hashes": [ - "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", - "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", - "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", - "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", - "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", - "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", - "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", - "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", - "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", - "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", - "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" - ], - "markers": "python_version < '3.5'", - "version": "==1.10.0" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - } - } -} diff --git a/common/build.gradle b/common/build.gradle deleted file mode 100644 index faace22b..00000000 --- a/common/build.gradle +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -plugins { - id "java" - id "com.google.protobuf" - id "delphix.python" -} - -repositories { - mavenCentral() -} - -dependencies { - // Necessary to compile generated java protocol buffer libraries. - compile 'com.google.protobuf:protobuf-java:3.6.1' -} - -protobuf { - - protoc { - artifact = 'com.google.protobuf:protoc:3.6.1' - } - - // This activates other protoc language targets. - // https://github.com/google/protobuf-gradle-plugin#default-outputs - generateProtoTasks { - all().each { task -> - task.builtins { - python { - } - } - } - } - generatedFilesBaseDir = "$projectDir/src" -} - -artifacts { - python sdist.distFile -} - -dlpxPython { - sources { - delphix { - url = "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/" - } - } - - dist { - name = "dvp-common" - } - - packages { - protobuf { - version = "==3.6.1" - } - } - - devPackages { - } - - supportedPythons { - "python2.7" {} - } -} - -/* - * This is a temporary task. 'src/main/java' only contains compiled protobuf classes. Sometimes these get out of date - * and they need to be cleaned up manually. In the long term, everything should probably be built under 'build' so - * the 'clean' task automatically deletes them. In the short term though, this task cleans them up. - */ -task removeProtobufJava(type: Delete) { - delete "${projectDir}/src/main/java" -} - -/* - * As part of running a packages 'setup.py' file, setuptools creates 'egg-info' directories that contain information - * about the build distribution. These can sometimes cause issues. We should probably build Python distributions in - * 'build' so these would be created there, however they still could be created in the 'src' directory if someone runs - * 'setup.py' manually. This is often done during development to install the package for testing. - */ -task removeEggInfo(type: Delete) { - delete "${projectDir}/src/main/python/dvp_common.egg-info" -} - -task wheel(type: SetupPyTask) { - setupPyCommand "bdist_wheel" - distFile String.format("%s-%s-%s-%s-%s.whl", dist.name.get().replace("-", "_"), "$project.version".replace("-", "_"), "py2", "none", "any") - - dependsOn makeSetupPy -} - -clean.dependsOn('removeProtobufJava') -clean.dependsOn('removeEggInfo') - -project.afterEvaluate { - tasks["sdist"].dependsOn tasks["generateProto"] - tasks["test_python2.7"].dependsOn tasks["generateProto"] -} diff --git a/common/lock.dev-requirements.txt b/common/lock.dev-requirements.txt deleted file mode 100644 index bc763e37..00000000 --- a/common/lock.dev-requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -atomicwrites==1.3.0 -attrs==19.1.0 -funcsigs==1.0.2 ; python_version < '3.0' -more-itertools==5.0.0 ; python_version <= '2.7' -pathlib2==2.3.3 ; python_version < '3.6' -pluggy==0.9.0 -py==1.8.0 -pytest==4.3.1 -scandir==1.10.0 ; python_version < '3.5' -six==1.12.0 diff --git a/common/lock.requirements.txt b/common/lock.requirements.txt deleted file mode 100644 index f6490f34..00000000 --- a/common/lock.requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -protobuf==3.6.1 -six==1.12.0 diff --git a/common/requirements.txt b/common/requirements.txt new file mode 100644 index 00000000..28ecde71 --- /dev/null +++ b/common/requirements.txt @@ -0,0 +1,15 @@ +bump2version==0.5.11 +contextlib2==0.6.0.post1 ; python_version < '3' +funcsigs==1.0.2 ; python_version < '3.0' +importlib-metadata==0.23 ; python_version < '3.8' +more-itertools==5.0.0 ; python_version <= '2.7' +packaging==19.2 +pathlib2==2.3.5 ; python_version < '3.6' +pluggy==0.13.0 +py==1.8.0 +pyparsing==2.4.5 +pytest==4.6.6 +scandir==1.10.0 ; python_version < '3.5' +six==1.13.0 +wcwidth==0.1.7 +zipp==0.6.0 diff --git a/common/setup.py b/common/setup.py new file mode 100644 index 00000000..35d574e2 --- /dev/null +++ b/common/setup.py @@ -0,0 +1,18 @@ +import os +import setuptools + +PYTHON_SRC = 'src/main/python' + +install_requires = [ + "dvp-api == 1.1.0", +] + +with open(os.path.join(PYTHON_SRC, 'dlpx/virtualization/common/VERSION')) as version_file: + version = version_file.read().strip() + +setuptools.setup(name='dvp-common', + version=version, + install_requires=install_requires, + package_dir={'': PYTHON_SRC}, + packages=setuptools.find_packages(PYTHON_SRC), +) diff --git a/common/src/main/proto/dlpx/virtualization/common.proto b/common/src/main/proto/dlpx/virtualization/common.proto deleted file mode 100644 index 6606a2c9..00000000 --- a/common/src/main/proto/dlpx/virtualization/common.proto +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2018, 2019 by Delphix. All rights reserved. - */ - -syntax = "proto3"; - -option java_multiple_files = true; - -package com.delphix.virtualization.common; - -message RemoteConnection { - RemoteEnvironment environment = 1; - RemoteUser user = 2; -} - -message RemoteEnvironment { - string name = 1; - string reference = 2; - RemoteHost host = 3; -} - -message RemoteHost { - string name = 1; - string reference = 2; - string binary_path = 3; - string scratch_path = 4; -} - -message RemoteUser { - string name = 1; - string reference = 2; -} - -message LinkedSource { - string guid = 1; - PluginDefinedObject parameters = 2; -} - -message DirectSource { - RemoteConnection connection = 1; - LinkedSource linked_source = 2; -} - -// Stuff that always mounts the entire ZFS filesystem -message SingleEntireMount { - RemoteEnvironment remote_environment = 1; - string mount_path = 2; - string shared_path = 3; -} - -// Stuff that can optionally mount only a subset of the ZFS filesystem -message SingleSubsetMount { - RemoteEnvironment remote_environment = 1; - string mount_path = 2; - string shared_path = 3; // not supported on Windows -} - -message StagedSource { - LinkedSource linked_source = 1; - RemoteConnection source_connection = 2; - SingleEntireMount staged_mount = 3; - RemoteConnection staged_connection = 4; -} - -message VirtualSource { - string guid = 1; - RemoteConnection connection = 2; - repeated SingleSubsetMount mounts = 3; - PluginDefinedObject parameters = 4; -} - -message SourceConfig { - string name = 1; - PluginDefinedObject parameters = 2; -} - -message Repository { - string name = 1; - PluginDefinedObject parameters = 2; -} - -message Snapshot { - PluginDefinedObject parameters = 1; -} - -message SnapshotParameters { - PluginDefinedObject parameters = 1; -} - -message PluginDefinedObject { - string json = 1; -} - -message OwnershipSpec { - int32 uid = 1; - int32 gid = 2; -} diff --git a/common/src/main/python/dlpx/virtualization/common/VERSION b/common/src/main/python/dlpx/virtualization/common/VERSION new file mode 100644 index 00000000..359a5b95 --- /dev/null +++ b/common/src/main/python/dlpx/virtualization/common/VERSION @@ -0,0 +1 @@ +2.0.0 \ No newline at end of file diff --git a/common/src/main/python/dlpx/virtualization/common/_common_classes.py b/common/src/main/python/dlpx/virtualization/common/_common_classes.py index 2cc9622f..c5d71fe3 100644 --- a/common/src/main/python/dlpx/virtualization/common/_common_classes.py +++ b/common/src/main/python/dlpx/virtualization/common/_common_classes.py @@ -2,7 +2,7 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -from dlpx.virtualization import common_pb2 +from dlpx.virtualization.api import common_pb2 from dlpx.virtualization.common.exceptions import IncorrectTypeError """Classes used for Plugin Operations diff --git a/common/src/main/python/dlpx/virtualization/common/exceptions.py b/common/src/main/python/dlpx/virtualization/common/exceptions.py index 932aae4a..d1032a81 100644 --- a/common/src/main/python/dlpx/virtualization/common/exceptions.py +++ b/common/src/main/python/dlpx/virtualization/common/exceptions.py @@ -60,10 +60,12 @@ def get_actual_and_expected_type(actual_type, expected_type): type(s) that was actually passed in for the parameter. This will either take the type and make it a str or join the types as a string and put it in brackets. - expected_type (Type or List[Type], Dict[Type, Type]): + expected_type (Type, List[Type], List[Type1, Type2], or + Dict[Type, Type]): The type of the parameter that was expected. Or if this is a - container then we assume there is one element in it and that type - is the expected type of the container. + container then we either assume there is one element in it and that + type is the expected type of the container, or if the list contains + multiple types, then multiple types are expected. ie: if expected_type = [str] then the returned expected string with be something like "type 'list of str'" @@ -74,33 +76,48 @@ def get_actual_and_expected_type(actual_type, expected_type): def _remove_angle_brackets(type_string): return type_string.replace('<', '').replace('>', '') + def _get_type_name(type_object): + if type_object.__module__ != '__builtin__': + type_name = '{}.{}'.format( + type_object.__module__, type_object.__name__) + else: + type_name = type_object.__name__ + return type_name + if isinstance(expected_type, list): - if len(expected_type) != 1: + """ + If expected_type length is greater than 1, we can + expect 2 cases. Either the list has all the same types + (violating the assumption that there is one element in the + list, and that type is the expected type of the container) + or the list has types that are unique to each other, meaning that + multiple types were expected, any one of which are allowed. + """ + if len(expected_type) != 1 and len(set(expected_type)) == 1: raise PlatformError('The thrown TypeError should have had a' ' list of size 1 as the expected_type') - single_type = expected_type[0] - if single_type.__module__ != '__builtin__': - type_name = '{}.{}'.format( - single_type.__module__, single_type.__name__) + if len(expected_type) > 1: + for index in range(0, len(expected_type)): + expected_type[index] = _get_type_name(expected_type[index]) + expected_type[index] = _remove_angle_brackets(str(expected_type[index])) + + expected = "any one of the following types: '{}'".format(expected_type) else: - type_name = single_type.__name__ - expected = "type 'list of {}'".format(type_name) + single_type = expected_type[0] + type_name = _get_type_name(single_type) + + expected = "type 'list of {}'".format(type_name) elif isinstance(expected_type, dict): if len(expected_type) != 1: raise PlatformError('The thrown TypeError should have had a' ' dict of size 1 as the expected_type') + key_type = expected_type.keys()[0] value_type = expected_type.values()[0] - if key_type.__module__ != '__builtin__': - key_type_name = '{}.{}'.format( - key_type.__module__, key_type.__name__) - else: - key_type_name = key_type.__name__ - if value_type.__module__ != '__builtin__': - value_type_name = '{}.{}'.format( - value_type.__module__, value_type.__name__) - else: - value_type_name = value_type.__name__ + + key_type_name = _get_type_name(key_type) + value_type_name = _get_type_name(value_type) + expected = "type 'dict of {}:{}'".format( key_type_name, value_type_name) @@ -162,6 +179,4 @@ def __init__( actual, expected, (' if defined', '')[required])) - super(IncorrectTypeError, self).__init__(message) - - + super(IncorrectTypeError, self).__init__(message) \ No newline at end of file diff --git a/common/src/test/java/NotUsed.java b/common/src/test/java/NotUsed.java deleted file mode 100644 index 6608cff8..00000000 --- a/common/src/test/java/NotUsed.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -/** - * Gradle will fail when running the test task if there are not classes in the - * Java test jar. This class is simply here to prevent that from happening. - * If a test is introduced in the future this file will be deleted. - */ -public class NotUsed { -} diff --git a/common/src/test/python/dlpx/virtualization/common/test_common_classes.py b/common/src/test/python/dlpx/virtualization/common/test_common_classes.py index 4e2a2862..d0ede0f7 100644 --- a/common/src/test/python/dlpx/virtualization/common/test_common_classes.py +++ b/common/src/test/python/dlpx/virtualization/common/test_common_classes.py @@ -3,7 +3,7 @@ # import pytest -from dlpx.virtualization import common_pb2 +from dlpx.virtualization.api import common_pb2 from dlpx.virtualization.common._common_classes import (RemoteConnection, RemoteEnvironment, RemoteHost, RemoteUser) from dlpx.virtualization.common.exceptions import IncorrectTypeError @@ -63,7 +63,7 @@ def test_remote_connection_from_proto_fail(): RemoteConnection.from_proto('') assert err_info.value.message == ( "RemoteConnection's parameter 'connection' was" - " type 'str' but should be of class 'dlpx.virtualization" + " type 'str' but should be of class 'dlpx.virtualization.api" ".common_pb2.RemoteConnection'.") @@ -115,7 +115,7 @@ def test_remote_environment_from_proto_fail(): RemoteEnvironment.from_proto('') assert err_info.value.message == ( "RemoteEnvironment's parameter 'environment' was" - " type 'str' but should be of class 'dlpx.virtualization" + " type 'str' but should be of class 'dlpx.virtualization.api" ".common_pb2.RemoteEnvironment'.") @@ -177,7 +177,7 @@ def test_remote_host_from_proto_fail(): RemoteHost.from_proto('') assert err_info.value.message == ( "RemoteHost's parameter 'host' was" - " type 'str' but should be of class 'dlpx.virtualization" + " type 'str' but should be of class 'dlpx.virtualization.api" ".common_pb2.RemoteHost'.") @@ -220,5 +220,5 @@ def test_remote_user_from_proto_fail(): RemoteUser.from_proto('') assert err_info.value.message == ( "RemoteUser's parameter 'user' was" - " type 'str' but should be of class 'dlpx.virtualization" + " type 'str' but should be of class 'dlpx.virtualization.api" ".common_pb2.RemoteUser'.") diff --git a/common/src/test/python/dlpx/virtualization/test_common_generated.py b/common/src/test/python/dlpx/virtualization/test_common_generated.py deleted file mode 100644 index b7264066..00000000 --- a/common/src/test/python/dlpx/virtualization/test_common_generated.py +++ /dev/null @@ -1,10 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -from google.protobuf import message - - -def test_import_common(): - from dlpx.virtualization import common_pb2 - assert issubclass(common_pb2.Repository, message.Message) diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 00000000..67a76b31 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,6 @@ +site/ +.DS_Store +build.sh + +# virtualenv directory created by pipenv +.venv/ diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md new file mode 100644 index 00000000..9db47fd6 --- /dev/null +++ b/docs/CONTRIBUTING.md @@ -0,0 +1,102 @@ +# Contributions and Reviews + +This guide will help get you set up to author, edit, and review documentation within our gitlab repo. + +---- + +- [Contributions and Reviews](#contributions-and-reviews) + - [Roles & Tools](#roles-tools) + - [Requirements for Authoring and Editing](#requirements-for-authoring-and-editing) + - [Requirements for Reviewing](#requirements-for-reviewing) + - [Guidelines](#guidelines) + - [Authoring Guidelines](#authoring-guidelines) + - [Reviewing Guidelines](#reviewing-guidelines) + - [Publishing](#publishing) + +---- + +## Roles & Tools + +This section provides a brief overview of the main roles in the git authoring and publishing process. + +**Authors and Editors** + +* Create _markdown_ documentation content +* Commit new/edited documentation to _git_ +* Submit a review with _reviewboard_ +* Push reviews to _gitlab_ + +**Technical Reviewers** + +* Review diffs in _reviewboard_ +* Provide feedback and/or "ship it" + +**Gatekeeper Reviewers** + +* Review diffs in _reviewboard_ +* View changes locally in _mkdocs_ using _reviewboard_ patches +* Provide feedback and/or "ship it" + +### Requirements for Authoring and Editing + +This documentation is written in Markdown. Markdown is an absurdly simple markup language for creating easy to read documents. [Here](https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet) is a fairly popular cheat sheet for Markdown basics. You can also find many Markdown GUI tools or integrations. Atom, Sublime Text, and Eclipse all have built-in Markdown editing. Tools like MacDown for Mac may also help. + +In order to create and edit documentation, you will need to be set up with the engineering git stack. Full instructions working with git can be found in the [Engineering Handbook](https://docs.delphix.com/display/EH/Setting+Up+Git). + +The base requirements to make changes to this git repo are: + +* [Install git-utils](https://docs.delphix.com/display/EH/Setting+Up+Git#SettingUpGit-Installgit-utils) (make sure you do this **outside** of your docsdev repo) +* Configure your PATH environment variable with [these instructions](https://gitlab.delphix.com/git/git-utils) +* Configure [git and reviewboard](https://docs.delphix.com/display/EH/Setting+Up+Git#SettingUpGit-ConfigureGit) +* Set up your local dev environment with instructions found [here](https://gitlab.delphix.com/docs/docsdev) + +### Requirements for Reviewing + +Technically, all you need to do a review is access to reviewboard. Since Markdown is easy to read in plain text, you can review changes by simply going to reviewboard and looking at the diff for any document you're assigned to review. Once done you can provide commentary and/or vote to ship it. + +To provide a more thorough review or gatekeeper review, you should [set up your local dev environment](https://gitlab.delphix.com/docs/docsdev) so you can download the diff as a patch and check out the changes visually, or simply use the rbt patch command to sync your local repo up with a review. For example, assuming you already have a local docsdev environment and are reviewing reviewboard ID 99999: + +1. cd ~/\ +2. git checkout -B review-99999 +3. rbt patch 99999 + +These commands created a new branch for your testing called "review-99999", then applied review 99999's changes to your local repo so you can view the changes in mkdocs. + +## Guidelines + +We have two goals: Provide the best documentation we can for our customers, and ensure that the publish process is smooth and intuitive every release. To that end, we have some guidelines for how to create, edit, and review content. + +### Authoring Guidelines + +1. Learn Markdown or use a really good IDE. It's easy to use, but there are complex topics like tables, admonishments, links, and images that you may need some practice with. Look at the other docs in the repo for inspiration and tutelage. +2. Test everything in mkdocs locally. Best practice is to always have mkdocs running in one terminal tab. It auto-refreshes when you make changes, so you can make sure that nothing breaks, and that your content looks good. +3. Do not create new directories (nav categories) in /docs without working with Jaspal Sumal (jaspal.sumal@delphix.com) +4. Place all screenshots in the local media/ directory of the category you're editing in. For example, if you're editing a page in docs/Getting_Started, put any screenshots you're going to use in docs/Getting_Started/media +5. Use relative links to reference screenshots (./media/image.png) and other pages (../Getting_Started/pagename/) +6. Beware the .pages file. .pages is a hidden file in every folder that provides page order. Any pages not listed in .pages will be alphabetically ordered _after_ the pages that have been listed. If you have a typo in this file or specify a renamed/deleted page, it will break mkdocs. +7. Always abide by Engineering requirements for branching, tagging, etc. +8. Provide verbose and descriptive commit messages. +9. Submit all changes for review and provide any necessary description on reviewboard when you publish it. +10. Assign one technical reviewer and one QA reviewer to any change in procedure/technical content. No technical/QA reviewers are necessary for typographical or non-contextual formatting changes. +11. Pushing requires technical signoff and gatekeeper signoff from the docs team. + +### Reviewing Guidelines + +1. The diff can usually provide what you need for reviewing changes. However, use mkdocs to review locally whenever possible to ensure good formatting and no breaks to mkdocs. +2. For minor corrections, leave a general comment on the review and vote to ship it so the author can fix it and push. +3. For major docs projects (e.g. whole new sections of docs or large batches of changes), coordinate with Jas. It is possible we'd be better off using another approach to review (e.g. track notes via Google Sheets) +4. If you're a reviewer that is not hooked into reviewboard, and unable to get set up to use it, work with Jas on an alternative approach (e.g. track notes via Google Sheets) +5. If there are issues in production docs, the current procedure is to post the issue in the #docs channel. + +## Publishing + +Publishing is currently a manual process that will be automated into the release process at a future point in time. The publishing workflow follows these steps: + +1. After the git repo is frozen, Jas begins review and adjustments. +2. If there are technical questions or issues, Jas will take back to engineering for review. +3. The publish process will run. This process will: + * Pull the appropriate branch to a build machine + * Run "mkdocs build clean" to compile documentation to HTML + * Push the documentation to the S3 bucket for Masking Docs + +In the future, stage 3 of this process should be automated via a jenkins job and incorporated into the GA release process along with branching/tagging requirements like our app gate. diff --git a/docs/Pipfile b/docs/Pipfile new file mode 100644 index 00000000..aa02c1f8 --- /dev/null +++ b/docs/Pipfile @@ -0,0 +1,12 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +mkdocs = "*" +mkdocs-material = "*" +markdown-include = "*" +mkdocs-awesome-pages-plugin = "*" + +[dev-packages] diff --git a/docs/Pipfile.lock b/docs/Pipfile.lock new file mode 100644 index 00000000..6b9f6840 --- /dev/null +++ b/docs/Pipfile.lock @@ -0,0 +1,132 @@ +{ + "_meta": { + "hash": { + "sha256": "6766a756dba28084af761664985b55708ca941615fe77db3cafce3ce8c65135d" + }, + "pipfile-spec": 6, + "requires": {}, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "click": { + "hashes": [ + "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d", + "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b" + ], + "version": "==6.7" + }, + "jinja2": { + "hashes": [ + "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", + "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" + ], + "version": "==2.10" + }, + "livereload": { + "hashes": [ + "sha256:583179dc8d49b040a9da79bd33de59e160d2a8802b939e304eb359a4419f6498", + "sha256:dd4469a8f5a6833576e9f5433f1439c306de15dbbfeceabd32479b1123380fa5" + ], + "markers": "python_version != '3.0.*' and python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*'", + "version": "==2.5.2" + }, + "markdown": { + "hashes": [ + "sha256:9ba587db9daee7ec761cfc656272be6aabe2ed300fece21208e4aab2e457bc8f", + "sha256:a856869c7ff079ad84a3e19cd87a64998350c2b94e9e08e44270faef33400f81" + ], + "version": "==2.6.11" + }, + "markdown-include": { + "hashes": [ + "sha256:72a45461b589489a088753893bc95c5fa5909936186485f4ed55caa57d10250f" + ], + "index": "pypi", + "version": "==0.5.1" + }, + "markupsafe": { + "hashes": [ + "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" + ], + "version": "==1.0" + }, + "mkdocs": { + "hashes": [ + "sha256:1b4d46cd1cb517cd743358da96a3efc588fd86f81512fb9c28214597b6dc731f", + "sha256:cd7264ea42d76f5bc1a0bd8b0a2c6c6e6be3a8742f5e78f47104a452dbe93600" + ], + "index": "pypi", + "version": "==0.17.5" + }, + "mkdocs-awesome-pages-plugin": { + "hashes": [ + "sha256:87a682fb43b1f416c063645153820074373f774fc6125696dc5005fa742df0df", + "sha256:c39454775b830d7e107178a64155e32254af654818143ba5d1d8331a49b9b48c" + ], + "index": "pypi", + "version": "==1.2.0" + }, + "mkdocs-material": { + "hashes": [ + "sha256:51d3ab7130dc120b1b595868d55ea87a8a5ecec3505dcc29fd6a66ededd99278", + "sha256:78146117b918774f234c23829473eb82bb5955d4562df7501f55a3f8c16d5915" + ], + "index": "pypi", + "version": "==2.9.2" + }, + "pygments": { + "hashes": [ + "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d", + "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc" + ], + "version": "==2.2.0" + }, + "pymdown-extensions": { + "hashes": [ + "sha256:20f2ae1067ab850cab92fcf57487267a7fd1365a7b1e7c5394e1e0778455eec1", + "sha256:7d3fcbb4c5d70a78d1f4c2c7eef02dbe7e1ba08b06cb72e08b3d1027eb77458b" + ], + "version": "==4.12" + }, + "pyyaml": { + "hashes": [ + "sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b", + "sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf", + "sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a", + "sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3", + "sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1", + "sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1", + "sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613", + "sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04", + "sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f", + "sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537", + "sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531" + ], + "version": "==3.13" + }, + "six": { + "hashes": [ + "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", + "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" + ], + "version": "==1.11.0" + }, + "tornado": { + "hashes": [ + "sha256:5ef073ac6180038ccf99411fe05ae9aafb675952a2c8db60592d5daf8401f803", + "sha256:6d14e47eab0e15799cf3cdcc86b0b98279da68522caace2bd7ce644287685f0a", + "sha256:92b7ca81e18ba9ec3031a7ee73d4577ac21d41a0c9b775a9182f43301c3b5f8e", + "sha256:ab587996fe6fb9ce65abfda440f9b61e4f9f2cf921967723540679176915e4c3", + "sha256:b36298e9f63f18cad97378db2222c0e0ca6a55f6304e605515e05a25483ed51a" + ], + "version": "==4.5.3" + } + }, + "develop": {} +} diff --git a/docs/build.sh b/docs/build.sh new file mode 100755 index 00000000..f256f923 --- /dev/null +++ b/docs/build.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +git fetch +pipenv run mkdocs build --clean +aws s3 sync ./site s3://dlpx-virt-sdk-docs --delete --cache-control "public, max-age=1" --profile delphix +aws s3api put-object-acl --bucket dlpx-virt-sdk-docs --key 404.html --acl public-read --profile delphix diff --git a/docs/docs/.pages b/docs/docs/.pages new file mode 100644 index 00000000..96e7af9b --- /dev/null +++ b/docs/docs/.pages @@ -0,0 +1,8 @@ +arrange: + - index.md + - Getting_Started.md + - Building_Your_First_Plugin + - Versioning_And_Upgrade + - References + - Best_Practices + - Release_Notes diff --git a/docs/docs/Best_Practices/.pages b/docs/docs/Best_Practices/.pages new file mode 100644 index 00000000..a9189905 --- /dev/null +++ b/docs/docs/Best_Practices/.pages @@ -0,0 +1,8 @@ +arrange: + - CLI_Configuration_File.md + - Code_Sharing.md + - Managing_Scripts_For_Remote_Execution.md + - User_Visible_Errors.md + - Sensitive_Data.md + - Unicode_Data.md + - Working_with_Powershell.md diff --git a/docs/docs/Best_Practices/CLI_Configuration_File.md b/docs/docs/Best_Practices/CLI_Configuration_File.md new file mode 100644 index 00000000..c3f4fd82 --- /dev/null +++ b/docs/docs/Best_Practices/CLI_Configuration_File.md @@ -0,0 +1,54 @@ +# CLI Configuration File + +The CLI configuration file can be used to set default values for CLI command options. + +## Location + +The configuration file is located in the user's home directory under `.dvp/config`. + +``` + + └── .dvp + └── config +``` + +Your user's home directory will depend on the operating system, but can be referred to using `~` in Unix-based operating systems or `%UserProfile%` in Windows. + +## Supported Options + +!!! note "Use `default` profile" + + Only the values listed in the `default` profile are used unless they are overridden by values passed in from a command line option with the same name. + +The CLI configuration file supports the following options: + +### engine +Specifies the Delphix Engine which can be used as part of the [dvp upload](/References/CLI.md#upload) or [dvp download-logs](/References/CLI.md#download-logs) command. + +``` +engine = engine.example.com +``` + +### user +Specifies the user to a Delphix Engine which is used as part of the [dvp upload](/References/CLI.md#upload) or [dvp download-logs](/References/CLI.md#download-logs) command. + +``` +user = admin +``` + +### password +Specifies the password for the user to a Delphix Engine which is used as part of the [dvp upload](/References/CLI.md#upload) or [dvp download-logs](/References/CLI.md#download-logs) command. + +``` +password = userpassword +``` + +### Example + +The following example uses all of the supported options for the CLI configuration file: +``` +[default] +engine = engine.example.com +user = admin +password = userpassword +``` \ No newline at end of file diff --git a/docs/docs/Best_Practices/Code_Sharing.md b/docs/docs/Best_Practices/Code_Sharing.md new file mode 100644 index 00000000..5640e6a2 --- /dev/null +++ b/docs/docs/Best_Practices/Code_Sharing.md @@ -0,0 +1,150 @@ +# Code Sharing + +All Python modules inside of `srcDir` can be imported just as they would be if the plugin was executing locally. When a plugin operation is executed `srcDir` is the current working directory so all imports need to be relative to `srcDir` regardless of the path of the module doing the import. + +Please refer to Python's [documentation on modules](https://docs.python.org/2/tutorial/modules.html#modules) to learn more about modules and imports. + +## Example + +Assume we have the following file structure: + +``` +postgres +├── plugin_config.yml +├── schema.json +└── src + ├── operations + │   ├── __init__.py + │   └── discovery.py + ├── plugin_runner.py + ├── resources + │   ├── __init__.py + │   ├── execute_sql.sh + │   ├── list_installs.sh + │   └── list_schemas.sql + └── utils + ├── __init__.py + └── execution_util.py +``` + +Any module in the plugin could import `execution_util.py` with `from utils import execution_util`. + +!!! warning "Gotcha" + Since the platform uses Python 2.7, every directory needs to have an `__init__.py` file in it otherwise the modules and resources in the folder will not be found at runtime. For more information on `__init__.py` files refer to Python's [documentation on packages](https://docs.python.org/2/tutorial/modules.html#packages). + + Note that the `srcDir` in the plugin config file (`src` in this example) does _not_ need an `__init__.py` file. + +Assume `schema.json` contains: + +``` +{ + "repositoryDefinition": { + "type": "object", + "properties": { + "name": { "type": "string" } + }, + "nameField": "name", + "identityFields": ["name"] + }, + "sourceConfigDefinition": { + "type": "object", + "required": ["name"], + "additionalProperties": false, + "properties": { + "name": { "type": "string" } + }, + "nameField": "name", + "identityFields": ["name"] + } +} +``` + +To keep the code cleaner, this plugin does two things: + +1. Splits discovery logic into its own module: `discovery.py`. +2. Uses two helper funtions `execute_sql` and `execute_shell` in `utils/execution_util.py` to abstract all remote execution. + +### plugin_runner.py + +When the platform needs to execute a plugin operation, it always calls into the function decorated by the `entryPoint` object. The rest of the control flow is determined by the plugin. In order to split logic, the decorated function must delegate into the appropriate module. Below is an example of `plugin_runner.py` delegating into `discovery.py` to handle repository and source config discovery: + +```python +from operations import discovery + +from dlpx.virtualization.platform import Plugin + + +plugin = Plugin() + + +@plugin.discovery.repository() +def repository_discovery(source_connection): + return discovery.find_installs(source_connection); + + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + return discovery.find_schemas(source_connection, repository) + + +``` +!!! note + `discovery.py` is in the `operations` package so it is imported with `from operations import discovery`. + +### discovery.py +In `discovery.py` the plugin delegates even further to split business logic away from remote execution. `utils/execution_util.py` deals with remote execution and error handling so `discovery.py` can focus on business logic. Note that `discovery.py` still needs to know the format of the return value from each script. + +```python +from dlpx.virtualization import libs + +from generated.definitions import RepositoryDefinition, SourceConfigDefinition +from utils import execution_util + + +def find_installs(source_connection): + installs = execution_util.execute_shell(source_connection, 'list_installs.sh') + + # Assume 'installs' is a comma separated list of the names of Postgres installations. + install_names = installs.split(',') + return [RepositoryDefinition(name=name) for name in install_names] + + +def find_schemas(source_connection, repository): + schemas = execution_util.execute_sql(source_connection, repository.name, 'list_schemas.sql') + + # Assume 'schemas' is a comma separated list of the schema names. + schema_names = schemas.split(',') + return [SourceConfigDefinition(name=name) for name in schema_names] +``` +!!! note + Even though `discovery.py` is in the `operations` package, the import for `execution_util` is still relative to the `srcDir` specified in the plugin config file. `execution_util` is in the `utils` package so it is imported with `from utils import execution_util`. + +### execution_util.py + +`execution_util.py ` has two methods `execute_sql` and `execute_shell`. `execute_sql` takes the name of a SQL script in `resources/` and executes it with `resources/execute_sql.sh`. `execute_shell` takes the name of a shell script in `resources/` and executes it. + +```python +import pkgutil + +from dlpx.virtualization import libs + + +def execute_sql(source_connection, install_name, script_name): + psql_script = pkgutil.get_data("resources", "execute_sql.sh") + sql_script = pkgutil.get_data("resources", script_name) + + result = libs.run_bash( + source_connection, psql_script, variables={"SCRIPT": sql_script}, check=True + ) + return result.stdout + + +def execute_shell(source_connection, script_name): + script = pkgutil.get_data("resources", script_name) + + result = libs.run_bash(source_connection, script, check=True) + return result.stdout +``` + +!!! note + Both `execute_sql` and `execute_shell` use the `check` parameter which will cause an error to be raised if the exit code is non-zero. For more information refer to the `run_bash` [documentation](/References/Platform_Libraries.md#run_bash). \ No newline at end of file diff --git a/docs/docs/Best_Practices/Managing_Scripts_For_Remote_Execution.md b/docs/docs/Best_Practices/Managing_Scripts_For_Remote_Execution.md new file mode 100644 index 00000000..ef63e26c --- /dev/null +++ b/docs/docs/Best_Practices/Managing_Scripts_For_Remote_Execution.md @@ -0,0 +1,103 @@ +# Managing Scripts for Remote Execution + +To execute a PowerShell or Bash script or Expect script on a remote host, you must provide the script as a string to `run_powershell` or `run_bash` or `run_expect`. While you can keep these strings as literals in your Python code, best practice is to keep them as resource files in your source directory and access them with `pkgutil`. + +[pkgutil](https://docs.python.org/2/library/pkgutil.html) is part of the standard Python library. The method that is applicable to resources is [pkgutil.get_data](https://docs.python.org/2/library/pkgutil.html#pkgutil.get_data). + +### Basic Usage + +Given the following plugin structure: + +``` +├── plugin_config.yml +├── schema.json +└── src + ├── plugin_runner.py + └── resources + ├── __init__.py + └── get_date.sh +``` + +Assume `SnapshotDefinition` is: + +``` +"snapshotDefinition": { + "type" : "object", + "additionalProperties" : false, + "properties" : { + "name": {"type": "string"}, + "date": {"type": "string"} + } +} +``` + +and `src/resources/get_date.sh` contains: + +``` +#!/usr/bin/env bash +date +``` + + +If `get_date.sh` is needed in `post_snapshot`, it can be retrieved and executed: + +```python +import pkgutil + +from dlpx.virtualization import libs +from dlpx.virtualization.platform import Plugin +from dlpx.virtualization.platform.exceptions import UserError + +from generated.definitions import SnapshotDefinition + + +plugin = Plugin() + +@plugin.linked.post_snapshot() +def post_snapshot(direct_source, repository, source_config): + # Retrieve script contents + script_content = pkgutil.get_data('resources', 'get_date.sh') + + # Execute script on remote host + response = libs.run_bash(direct_source.connection, script_content) + + # Fail operation if the timestamp couldn't be retrieved + if response.exit_code != 0: + raise UserError( + 'Failed to get date', + 'Make sure the user has the required permissions', + '{}\n{}'.format(response.stdout, rsponse.stderr)) + + return SnapshotDefinition(name='Snapshot', date=response.stdout) +``` + +!!! note "Python's Working Directory" + This assumes that `src/` is Python's current working directory. This is the behavior of the Virtualization Platform. + +!!! warning "Resources need to be in a Python module" + `pkgutil.get_data` cannot retrieve the contents of a resource that is not in a Python package. This means that a resource that is in the first level of your source directory will not be retrievable with `pkgutil`. Resources must be in a subdirectory of your source directory, and that subdirectory must contain an `__init__.py` file. + +### Multi-level Packages + +Given the following plugin structure: + +``` +├── plugin_config.yml +├── schema.json +└── src + ├── plugin_runner.py + └── resources + ├── __init__.py + ├── database + │   ├── __init__.py + │   └── execute_sql.sh + └── platform + ├── __init__.py + └── get_date.sh +``` + +The contents of `src/resources/platform/get_date.sh` can be retrieved with: + +```python +script_content = pkgutil.get_data('resources.platform', 'get_date.sh') +``` diff --git a/docs/docs/Best_Practices/Sensitive_Data.md b/docs/docs/Best_Practices/Sensitive_Data.md new file mode 100644 index 00000000..8665649f --- /dev/null +++ b/docs/docs/Best_Practices/Sensitive_Data.md @@ -0,0 +1,112 @@ +# Dealing With Sensitive Data + +Often, a plugin will need to handle sensitive user-provided data. The most common example of this is a database password. + +Plugins must be careful to handle sensitive data appropriately. Three tips for handling sensitive data are: + +1. Tell the Delphix Engine which parts of your data are sensitive. +2. When passing sensitive data to remote plugin library functions (such as `run_bash`), use environment variables. +3. Avoid logging, or otherwise writing out the sensitive data. + +Each of these tips are explained below. + +# Marking Your Data As Sensitive + +Because the Delphix Engine manages the storing and retrieving of plugin-defined data, it needs to know which pieces of data are sensitive. The plugin does this in its [schemas](/References/Glossary.md#schema), by using the special [`password`](/References/Schemas.md#password) keyword. + +The following example of a schema defines an object with three properties, one of which is sensitive and tagged with the `password` keyword: + +```json +{ + "type": "object", + "properties": { + "db_connectionPort": {"type": "string"}, + "db_username": {"type": "string"}, + "db_password": {"type": "string", "format": "password"} + } +} +``` + +This tells the Delphix Engine to take special precautions with this password property, as follows: + +1. The Delphix Engine will encrypt the password before storing it, and decrypt it only as necessary to pass back to the plugin. +2. The Delphix Engine will not write this password anywhere (for example, it will not appear in any system logs). +3. The Delphix Engine's UI and CLI will not display the password. +4. Clients of the Delphix Engine's public API will not be able to access the password. + +# Using Environment Variables For Remote Data Passing + +Sometimes, a plugin will need to pass sensitive data to a remote environment. For example, perhaps a database command needs to be run on a [staging environment](/References/Glossary.md#staging-environment), and that database command will need to use a password. + +## Example +Let us take a look at a very simple example where we need to shutdown a database called "inventory" on a target environment by using the `db_cmd shutdown inventory` command. This command will ask for a password on `stdin`, and for our example our password is "hunter2". + +If we were running this command by hand, it might look like this: +```bash +$ db_cmd shutdown inventory +Connecting to database instance... +Please enter database password: +``` + +At this point, we would type in "hunter2", and the command would proceed to shut down the database. + +Since a plugin cannot type in the password by hand, it will do something like this instead: + +```bash +$ echo "hunter2" | db_cmd shutdown inventory +``` + +## Don't Do This + +First, let us take a look at how **not** to do this! Here is a bit of plugin python code that will run the above command. + +```python +from dlpx.virtualization import libs +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.virtual.stop() +def my_virtual_stop(virtual_source, repository, source_config): + # THIS IS INSECURE! DO NOT DO THIS! + full_command = "echo {} | db_cmd shutdown {}".format(password, db_name) + libs.run_bash(virtual_source.connection, full_command) +``` + +This constructs a Python string containing exactly the desired command from above. However, this is not recommended. + +The problem here is that there is a cleartext password in the Python string. But, this Python string is not treated as sensitive by the Virtualization Platform. For example, suppose the Virtualization Platform cannot make a connection to the target environment. In which case, it will raise an error containing the Python string, so that people will know what command failed. But, in our example, that would result in the password being part of the cleartext error message. + +## Using Environment Variables + +The Delphix Engine provides a better way to pass sensitive data to remote bash (or powershell) calls: environment variables. Let us look at a different way to run the same command as above. + +```python +from dlpx.virtualization import libs +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.virtual.stop() + # Use environment variables to pass sensitive data to remote commands + environment_vars = { + "DATABASE_PASSWORD" : password + } + full_command = "echo $DATABASE_PASSWORD | db_cmd shutdown {}".format(db_name) + libs.run_bash(virtual_source.connection, full_command, variables=environment_vars) +``` + +!!! note + We are no longer putting the cleartext password into the Python command string. Instead, we are instructing the Virtualization Platform to put the password into an environment variable on the target environment. The Python command string merely mentions the name of the environment variable, and does not contain the password itself. + +Once the command runs on the target environment, Bash will substitute in the password, and the database shutdown will run as expected. + +Unlike with the command string, the Virtualization Platform **does** treat environment variables as sensitive information, and will not include them in error messages or internal logs, etc. + +# Don't Write Out Sensitive Data + +Plugin writers are strongly advised to never write out unencrypted sensitive data. This is common-sense general advice that applies to all areas of programming, not just for plugins. However, there are a couple of special concerns for plugins. + +The Virtualization Platform provides logging capabilities to plugins. The generated logs are unencrypted and not treated as sensitive. Therefore, it is important for plugins to **never log sensitive data**. + +In addition, remember that your plugin is not treated as sensitive by the Virtualization Platform. Plugin code is distributed unencrypted, and is viewable in cleartext by Delphix Engine users. Sensitive data such as passwords should never be hard-coded in your plugin code. diff --git a/docs/docs/Best_Practices/Unicode_Data.md b/docs/docs/Best_Practices/Unicode_Data.md new file mode 100644 index 00000000..06b6f88a --- /dev/null +++ b/docs/docs/Best_Practices/Unicode_Data.md @@ -0,0 +1,29 @@ +# Working with Unicode Data + +To use unicode characters in the plugin code, the following lines should be included at top of the plugin code: + +```python +#!/usr/bin/env python +# -*- coding: utf-8 -*- +``` + +Otherwise, there may be errors when building the plugin using [dvp build](/References/CLI.md#build) or during the execution of a plugin operation. + +## Example + +```python +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from dlpx.virtualization.platform import Plugin +from dlpx.virtualization import libs +from generated.definitions import RepositoryDefinition + +plugin = Plugin() + +@plugin.discovery.repository() +def repository_discovery(source_connection): + # Create a repository with name ☃ + command = 'echo ☃' + result = libs.run_bash(source_connection, command) + return [RepositoryDefinition(name=result.stdout)] +``` \ No newline at end of file diff --git a/docs/docs/Best_Practices/User_Visible_Errors.md b/docs/docs/Best_Practices/User_Visible_Errors.md new file mode 100644 index 00000000..db3476b5 --- /dev/null +++ b/docs/docs/Best_Practices/User_Visible_Errors.md @@ -0,0 +1,40 @@ +# User Visible Errors + +Plugin authors can choose to fail a plugin operation by raising an exception of type `UserError` with a custom message, action and output for the end user. + +## Fields + +Field | Type | Description +----- | ---- | ----------- +message | String | Description of the failure to show the end user. +action | String | **Optional**. List of actions that the end user could take to fix the problem. If not provided, it defaults to `Contact the plugin author to correct the error.` +output | String | **Optional**. Output or stack trace from the failure to give the end user more information so that they can self diagnose. If not provided, it defaults to the stack trace of the failure. + + +## Example + +```python +import pkgutil +from dlpx.virtualization.platform import Plugin +from generated.definitions import SourceConfigDefinition +from dlpx.virtualization.platform.exceptions import UserError + +plugin = Plugin() + +@plugin.virtual.start() +def start(virtual_source, repository, source_config): + script_content = pkgutil.get_data('resources', 'start_database.sh') + + response = libs.run_bash(virtual_source.connection, script_content) + + # Fail operation if the database could not be started + if response.exit_code != 0: + raise UserError( + 'Failed to start the database', + 'Make sure the user has appropriate permissions', + '{}\n{}'.format(response.stdout, response.stderr)) +``` + +The UI would show the end user if the plugin operation above fails: + +![Screenshot](images/UserError_Start.png) \ No newline at end of file diff --git a/docs/docs/Best_Practices/Working_with_Powershell.md b/docs/docs/Best_Practices/Working_with_Powershell.md new file mode 100644 index 00000000..609cffe8 --- /dev/null +++ b/docs/docs/Best_Practices/Working_with_Powershell.md @@ -0,0 +1,85 @@ + +### Error handling in Powershell + +!!! info + Commands run via run_powershell are executed as a script. The exit code returned by run_powershell as part of the RunPowershellResult is determined by the exit code from the script. +PowerShell gives you a few ways to handle errors in your scripts: + +1. Set $ErrorActionPreference. This only applies to PowerShell Cmdlets. For scripts or other executables such as sqlcmd, PowerShell will return with exit code 0 even if there is an error, regardless of the value of $ErrorActionPrefe rence. The allowable values for $ErrorActionPreference are: + + Continue (default) – Continue even if there is an error. + SilentlyContinue – Acts like Continue with the exception that errors are not displayed + Inquire – Prompts the user in case of error + Stop - Stops execution after the first error + +2. Use exception handling by using traps and try/catch blocks or if statements to detect errors and return with non-zero exit codes + +3. Use custom error handling that can be invoked after launching each command in the script to correctly detect errors. + +### Examples + +The following example will show you how setting $ErrorActionPreference will return exit codes + +In the below code, `ls nothing123` is expected to fail. + +```Windows +ls nothing123 +Write-Host "Test" +``` + +Here is the output when the above commands runs on a remote host and the script will return the value of `$?` to be True eventhough the script failed. + +```PS C:\Users\dtully\test> ./test1.ps1 +ls : Cannot find path 'C:\Users\dtully\test\nothing123' because it does not exist. +At C:\Users\dtully\test\test1.ps1:1 char:1 ++ ls nothing123 ++ ~~~~~~~~~~~~~ + + CategoryInfo : ObjectNotFound: (C:\Users\dtully\test\nothing123:String) [Get-ChildItem], ItemNotFoundEx + ception + + FullyQualifiedErrorId : PathNotFound,Microsoft.PowerShell.Commands.GetChildItemCommand + +PS C:\Users\dtully\test> Write-Host $? +True +``` +Now lets set $ErrorActionPreference=Stop. + +```Windows +$ErrorActionPreference = "Stop" +ls nothing123 +Write-Host "Test" +``` +Now when we run the command again we see the return value of `$?` to be False. + +```Windows +PS C:\Users\dtully\test> ./test1.ps1 +ls : Cannot find path 'C:\Users\dtully\test\nothing123' because it does not exist. +At C:\Users\dtully\test\test1.ps1:2 char:1 ++ ls nothing123 ++ ~~~~~~~~~~~~~ + + CategoryInfo : ObjectNotFound: (C:\Users\dtully\test\nothing123:String) [Get-ChildItem], ItemNotFoundException + + FullyQualifiedErrorId : PathNotFound,Microsoft.PowerShell.Commands.GetChildItemCommand + +PS C:\Users\dtully\test> Write-Host $? +False +``` + +The following example shows how you can use the function verifySuccess to detect whether the previous command failed, and if it did print, print an error message and return with an exit code of 1. + +```Windows +function die { + Write-Error "Error: $($args[0])" + exit 1 +} + +function verifySuccess { + if (!$?) { + die "$($args[0])" + } +} + +Write-Output "I'd rather be in Hawaii" +verifySuccess "WRITE_OUTPUT_FAILED" + +& C:\Program Files\Delphix\scripts\myscript.ps1 +verifySuccess "MY_SCRIPT_FAILED" +``` diff --git a/docs/docs/Best_Practices/images/UserError_Start.png b/docs/docs/Best_Practices/images/UserError_Start.png new file mode 100644 index 00000000..9a0e62fe Binary files /dev/null and b/docs/docs/Best_Practices/images/UserError_Start.png differ diff --git a/docs/docs/Building_Your_First_Plugin/.pages b/docs/docs/Building_Your_First_Plugin/.pages new file mode 100644 index 00000000..0d70c50c --- /dev/null +++ b/docs/docs/Building_Your_First_Plugin/.pages @@ -0,0 +1,6 @@ +arrange: + - Overview.md + - Initial_Setup.md + - Discovery.md + - Data_Ingestion.md + - Provisioning.md diff --git a/docs/docs/Building_Your_First_Plugin/Data_Ingestion.md b/docs/docs/Building_Your_First_Plugin/Data_Ingestion.md new file mode 100644 index 00000000..87c04dfa --- /dev/null +++ b/docs/docs/Building_Your_First_Plugin/Data_Ingestion.md @@ -0,0 +1,292 @@ +# Data Ingestion + +## How Does Delphix Ingest Data? + +As [previously](Discovery.md) discussed, the Delphix Engine uses the [discovery](/References/Glossary.md#discovery) process to learn about datasets that live on a [source environment](/References/Glossary.md#source-environment). In this section we will learn how the Delphix Engine uses a two-step process to ingest a dataset. + +### Linking + +The first step is called [linking](/References/Glossary.md#linking). This is simply the creation of a new dataset on the Delphix Engine, which is associated with the dataset on the source environment. This new linked dataset is called a [dSource](/References/Glossary.md#dsource). + +### Syncing + +Immediately after linking, the new dSource is [synced](/References/Glossary.md#syncing) for the first time. Syncing is a process by which data from the source environment is copied onto the Delphix Engine. Subsequent syncs may then be periodically performed in order to keep the dSource up-to-date. + +The details of how this is done varies significantly from plugin to plugin. For example, some plugins will simply copy files from the filesystem. Other plugins might contact a DBMS and instruct it to send backup or replication streams. There are many possibilities here, but they all break down into two main strategies that the plugin author can choose from: direct and staging. + +With the [direct](/References/Glossary.md#direct-linkingsyncing) strategy, the plugin is not in charge +of the data copying. Instead the Delphix Engine directly pulls raw data from the source environment. +The plugin merely provides the location of the data. This is a very simple strategy, and is also +quite limiting. + +For our first plugin, we will be using the more flexible [staging](/References/Glossary.md#staged-linkingsyncing) strategy. With this strategy, the Delphix Engine uses NFS for Unix environments (or iSCSI on Windows environments) to mount storage onto a [staging environment](/References/Glossary.md#staging-environment). Our plugin will then be in full control of how to get data from the source environment onto this storage mount. + +With the staging strategy, there are two types of syncs: sync and resync. A `sync` is used to ingestion incremental changes while a `resync` is used to re-ingest all the data for the dSource. For databases, this could mean re-ingesting from a full database backup to reset the dSource. A `sync` and a `resync` execute the same plugin operations and are differentiated by a boolean flag in the [snapshot_parameters](/References/Classes.md#snapshotparametersdefinition) argument passed into [linked.pre_snapshot](/References/Plugin_Operations.md#staged-linked-source-pre-snapshot) and [linked.post_snapshot](/References/Plugin_Operations.md#staged-linked-source-post-snapshot). + +A regular `sync` is the default and is executed as part of policy driven syncs. A `resync` is only executed during initial ingestion or if the Delphix user manually starts one. The customer can manually trigger a `resync` via the UI by selecting the dSource, going to more options and selecting **Resynchronize dSource**. ![Screenshot](images/Resync.png) + +!!! tip "Gotcha" + Although it is not common, it is entirely possible that the staging environment is the same as the source environment. Be careful not to assume otherwise in your plugins. + +### Our Syncing Strategy + +For our purposes here in this intro plugin, we will use a simple strategy. We won't do anything with the resync snapshot parameter and simply copy files from the filesystem on the source environment onto the NFS mount on the staging environment. We will do this by running the Unix tool `rsync` from our staging environment, and rely on passwordless SSH to connect to the source environment. + +!!! info + This plugin is assuming that `rsync` is installed on the staging host, and that the staging + host user is able to SSH into the source host without having to type in a password. A more + full-featured plugin would test these assumptions, usually as part of discovery. + +In the special case mentioned above, where the staging environment is the same as the source environment, we could likely do something more efficient. However, for simplicity's sake, we won't do that here. + +## Defining Your Linked Source Data Format + +In order to be able to successfully do the copying required, plugins might need to get some information from the end-user of your plugin. In our case, we need to tell `rsync` how to access the files. This means we need to know the source environment's IP address (or domain name), the username we need to connect as, and finally the location where the files live. + +Again, we will be using a JSON schema to define the data format. The user will be presented with a UI that lets them provide all the information our schema specifies. + +Open up `schema.json` in your editor/IDE. Locate the `LinkedSourceDefinition` and replace it with the following schema: +```json +"linkedSourceDefinition": { + "type": "object", + "additionalProperties": false, + "required": ["sourceAddress", "username", "mountLocation"], + "properties": { + "sourceAddress": { + "type": "string", + "prettyName": "Host from which to copy", + "description": "IP or FQDN of host from which to copy" + }, + "username": { + "type": "string", + "prettyName": "Username on Source Host", + "description": "Username for making SSH connection to source host" + }, + "mountLocation": { + "type": "string", + "format": "unixpath", + "prettyName": "Mount Location on Staging Host", + "description": "Where to mount storage onto the staging host while syncing" + } + } +}, +``` + +!!! info + As will be explained later, this schema will be used to generate Python code. + All names in the autogenerated Python code will use `lower_case_with_underscores` as attribute names as per Python variable naming conventions. + That is, if we were to use `mountLocation` as the schema property name, it would be called + `mount_location` in the generated Python code. + + +With this schema, the user will be required to provide the source username, the source's IP address, and the staging mount location as part of the linking process. + + +## Implementing Syncing in Your Plugin + +There are three things we must do to implement syncing. First, we need to tell the Delphix Engine +where to mount storage onto the staging environment. Next we need to actually do the work of copying +data onto that mounted storage. Finally, we need to generate any snapshot-related data. + +### Mount Specification + +Before syncing can begin, the Delphix Engine needs to mount some storage onto the staging host. +Since different plugins can have different requirements about where exactly this mount lives, it is +up to the plugin to specify this location. As mentioned above, our simple plugin will get this +location from the user. + +Open up the `plugin_runner.py` file and find the `linked_mount_specification` function (which was generated by `dvp init`). Replace it with the following code: +``` +@plugin.linked.mount_specification() +def linked_mount_specification(staged_source, repository): + mount_location = staged_source.parameters.mount_location + mount = Mount(staged_source.staged_connection.environment, mount_location) + return MountSpecification([mount]) +``` + +Let's take this line-by-line to see what's going on here. + +``` +@plugin.linked.mount_specification() +``` +This [decorator](/References/Glossary.md#password-property) announces that the following function +is the code that handles the `mount_specification` operation. This is what allows the Delphix +Engine to know which function to call when it's time to learn where to mount. Every operation +definition will begin with a similar decorator. + +``` +def linked_mount_specification(staged_source, repository): +``` +This begins a Python function definition. We chose to call it `linked_mount_specification`, but we +could have chosen any name at all. This function accepts two arguments, one giving information about +the linked source, and one giving information about the associated repository. + +``` + mount_location = staged_source.parameters.mount_location +``` + +The `staged_source` input argument contains an attribute called `parameters`. This in turn contains +all of the properties defined by the `linkedSourceDefinition` schema. So, in our case, that means +it will contain attributes called `source_address`, `username`, and `mount_location`. Note how any attribute defined in `camelCase` in the schema is converted to `variable_with_underscores`. This line +simply retrieves the user-provided mount location and saves it in a local variable. + +``` + mount = Mount(staged_source.staged_connection.environment, mount_location) +``` + +This line constructs a new object from the [Mount class](/References/Classes.md#mount). This class +holds details about how Delphix Engine storage is mounted onto remote environments. Here, we +create a mount object that says to mount onto the staging environment, at the location specified +by the user. + +``` + return MountSpecification([mount]) +``` + +On the line just before this one, we created an object that describes a *single* mount. Now, we +must return a full [mount specification](/References/Glossary.md#mount-specification). In general, +a mount specification is a collection of mounts. But, in our case, we just have one single mount. +Therefore, we use an array with only one item it in -- namely, the one single mount object we +created just above. + + +### Data Copying + +As explained [here](/References/Workflows.md#linked-source-sync), the Delphix Engine will always run the plugin's `preSnapshot` operation just before taking a snapshot of the dsource. That means our `preSnapshot` operation has to get the NFS share into the desired state. For us, that means that's the time to do our data copy. + +Unlike the previous operations we've seen so far, the pre-snapshot operation will not be autogenerated by `dvp init`. +So, we will need to add one ourselves. Open up the `plugin_runner.py` file. + +First, we'll add a new import line near the top of the file, so that we can use Delphix's platform libraries and raise user visible errors (explained below). +```python +from dlpx.virtualization import libs +from dlpx.virtualization.platform.exceptions import UserError +``` + + +Next, we'll add a new function: + +```python +@plugin.linked.pre_snapshot() +def copy_data_from_source(staged_source, repository, source_config, snapshot_parameters): + stage_mount_path = staged_source.mount.mount_path + data_location = "{}@{}:{}".format(staged_source.parameters.username, + staged_source.parameters.source_address, + source_config.path) + + rsync_command = "rsync -r {} {}".format(data_location, stage_mount_path) + + result = libs.run_bash(staged_source.staged_connection, rsync_command) + + if result.exit_code != 0: + raise UserError( + "Could not copy files.", + "Ensure that passwordless SSH works for {}.".format(staged_source.parameters.source_address), + result.stderr) +``` + +Let's walk through this function and see what's going on + +```python + stage_mount_path = staged_source.mount.mount_path +``` + +The `staged_source` argument contains information about the current mount location. Here we save that +to a local variable for convenience. + +```python + data_location = "{}@{}:{}".format(staged_source.parameters.username, + staged_source.parameters.source_address, + source_config.path) +``` + +This code creates a Python string that represents the location of the data that we want to ingest. +This is in the form `@:`. For example `jdoe@sourcehost.mycompany.com:/bin`. As +before with `mountLocation`, we have defined our schemas such that these three pieces of information +were provided by the user. Here we're just putting them into a format that `rsync` will understand. + +```python + rsync_command = "rsync -r {} {}".format(data_location, stage_mount_path) +``` + +This line is the actual Bash command that we'll be running on the staging host. This will look something like `rsync -r user@host:/source/path /staging/mount/path`. + +```python + result = libs.run_bash(staged_source.staged_connection, rsync_command) +``` + +This is an example of a [platform library](/References/Glossary.md#platform-libraries) function, where we ask the Virtualization Platform +to do some work on our behalf. In this case, we're asking the platform to run our Bash command on the +staging environment. For full details on the `run_bash` platform library function and others, see this [reference](/References/Platform_Libraries.md). + +```python + if result.exit_code != 0: + raise UserError( + "Could not copy files.", + "Ensure that passwordless SSH works for {}.".format(staged_source.parameters.source_address), + result.stderr) +``` +Finally, we check to see if our Bash command actually worked okay. If not, we raise an error +message, and describe one possible problem for the user to investigate. For more details on raising user visible errors, see this [reference](/Best_Practices/User_Visible_Errors/). + + +### Saving Snapshot Data + +Whenever the Delphix Engine takes a [snapshot](/References/Glossary.md#snapshot) of a dSource or VDB, +the plugin has the chance to save any information it likes alongside that snapshot. Later, if the +snapshot is ever used to provision a new VDB, the plugin can use the previously-saved information +to help get the new VDB ready for use. + +The format of this data is controlled by the plugin's `snapshotDefinition` schema. In our case, we +don't have any data we need to save. So, there's not much to do here. We will not modify the blank +schema that was created by `dvp init`. + +We do still need to provide python function for the engine to call, but we don't have to do much. +In fact, the default implementation that was generated by `dvp init` will work just fine for our purposes: + +```python +@plugin.linked.post_snapshot() +def linked_post_snapshot(staged_source, + repository, + source_config, + snapshot_parameters): + return SnapshotDefinition() +``` + +The only thing this code is doing is creating a new object using our (empty) snapshot +definition, and returning that new empty object. + + +## How to Link and Sync in the Delphix Engine + +Let's try it out and make sure this works! + +**Prerequisites** + + - You should already have a repository and source config set up from the previous page. + + - You can optionally set up a new staging environment. Or, you can simply re-use your source + environment for staging. + +**Procedure** + +!!! note + Recall that, for simplicity's sake, this plugin requires that passwordless SSH is set up between + your staging and source environments. You may want to verify this before continuing. + +1. As before, use `dvp build` and `dvp upload` to get your latest plugin changes installed onto +the Delphix Engine. + +2. Go to **Manage > Environments**, select your **source** environment, and then go to the **Databases** tab. Find **Repository for our First Plugin**, and your source config underneath it. + +3. From your source config click **Add dSource**. This will begin the linking process. The first +screen you see should ask for the properties that you recently added to your `linkedSourceDefinition`. ![Screenshot](images/LinkingWizard.png) + +4. Walk through the remainder of the screens and hit **Submit**. This will kick off the initial link and first sync. + +5. You can confirm that your new dSource was added successfully by going to **Manage > Datasets**. + +After you have finished entering this information, the initial sync process will begin. This is what will call your pre-snapshot operation, thus copying data. + +!!! warning "Gotcha" + Manually creating a dSource sets your plugin’s linked source schema in stone, and you will have to recreate the dSource in order to modify your schema. We will cover how to deal with this correctly later, in the [upgrade section](/Versioning_And_Upgrade/Upgrade.md). For now, if you need to change your plugin's linked source schema, you will have to first delete any dSources you have manually added. diff --git a/docs/docs/Building_Your_First_Plugin/Discovery.md b/docs/docs/Building_Your_First_Plugin/Discovery.md new file mode 100644 index 00000000..67eb9e2a --- /dev/null +++ b/docs/docs/Building_Your_First_Plugin/Discovery.md @@ -0,0 +1,252 @@ +# Discovery + +## What is Discovery? +In order to ingest data from a source environment, the Delphix Engine first needs to learn information about the data: Where does it live? How can it be accessed? What is it called? + +[Discovery](/References/Glossary.md#discovery) is the process by which the Delphix Engine learns about remote data. Discovery can be either: + +- [automatic](/References/Glossary.md#automatic-discovery) — where the plugin finds the remote data on its own +- [manual](/References/Glossary.md#manual-discovery) — where the user tells us about the remote data + +For our first plugin, we will be using a mix of these two techniques. + +## Source Configs and Repositories + +### What are Source Configs and Repositories? + +A [source config](/References/Glossary.md#source-config) is a collection of information that Delphix uses to represent a dataset. Different plugins will have different ideas about what a "dataset" is (an entire database? a set of config files? an application?). For our first plugin, it is simply a directory tree on the filesystem of the remote environment. + +A [repository](/References/Glossary.md#repository) represents what you might call "data dependencies" -- anything installed on the remote host that the dataset depends on. For example, if you are working with a Postgres database, then your repository will represent an installation of a particular version of the Postgres DBMS. In this plugin, we do not have any special dependencies, except for the simple existence of the unix system on which the directory lives. + +We will be using automatic discovery for our repositories, and manual discovery for our source configs. This is the default configuration that is created by `dvp init`, so there is nothing further we need to do here. + +### Defining Your Data Formats +Because each plugin will have different ideas about what a repository or source config represents, different plugins will have different sets of information that they need to collect and store. + +Delphix needs to know the format of this information. How many pieces of information are collected? What are they called? Are they strings? Numbers? + +For our first plugin, we do not need a lot of information. We use no special information about our repositories (except some way for the user to identify them). For source configs, all we need to know is the path to the directory from which we will be ingesting data. + +The plugin needs to describe all of this to the Delphix Engine, and it does so using [schemas](/References/Glossary.md#schema). Recall that when we ran `dvp init`, a file full of bare-bones schemas was created. As we build up our first toolkit, we will be augmenting these schemas to serve our needs. + +#### Repository Schema +Open up the `schema.json` file in your editor/IDE and locate `repositoryDefinition`, it should look like this: + +```json +{ + "repositoryDefinition": { + "type": "object", + "properties": { + "name": { "type": "string" } + }, + "nameField": "name", + "identityFields": ["name"] + } +} +``` + +Since we do not have any special dependencies, we can just leave it as-is. + +For detailed information about exactly how repository schemas work, see [the reference page](/References/Schemas.md). + +In brief, what we are doing here is saying that each of our repositories will have a single property called `name`, which will be used both as a unique identifier and as the user-visible name of the repository. + +#### Source Config Schema + +For source configs, the bare-bones schema is not going to be good enough. Recall that for us, a source config represents a directory tree on a remote environment. + +Locate the `sourceConfigDefinition` inside the `schema.json` file and modify the definition so it looks like this: + +```json +"sourceConfigDefinition": { + "type": "object", + "required": ["name", "path"], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "prettyName": "Dataset Name", + "description": "User-visible name for this dataset" + }, + "path": { + "type": "string", + "format": "unixpath", + "prettyName": "Path", + "description": "Full path to data location on the remote environment" + } + }, + "nameField": "name", + "identityFields": ["path"] +}, +``` + +Now, we have two properties, a property `name` serving as the user-visible name of the source config and `path` which tells us where the data lives on the remote host. Note we are using `path` as the unique identifier. + +Because we are using manual discovery, the end user is going to be responsible for filling in values for `name` and `path`. So, we have added some things to our schema that we did not need for repositories. + +The `prettyName` and `description` entries will be used by the UI to tell the user what these fields mean. + +Because we set `additionalProperties` to `false`, this will prevent users from supplying properties other than `name` and `path`. + +Finally, we have specified that the `path` property must be a well-formatted Unix path. This allows the UI to enforce that the format is correct before the user is allowed to proceed. (Note this only enforces the format, and does not actually check to see if the path really exists on some remote environment!) + +Refer to the reference page for [Schemas](/References/Schemas.md) for more details about these entries, and for other things that you can do in these schemas. + +## Implementing Discovery in Your Plugin + +### About Python Code + +As described in the overview section, plugins customize the behavior of the Delphix Engine by providing Python code. Each customizable piece of behavior is called a "plugin operation". The plugin provides separate Python functions for each of the operations that it wants to customize. + +Right now, we are concerned with discovery. There are two customizable operations related to automatic discovery, one for repositories and one for source configs. In both cases, the job of the Python method is to automatically collect whatever information the schemas (see above) require, and to return that information to the Delphix Engine. The Delphix Engine will run these customized operations whenever a new environment is added, or when an existing environment is rediscovered. + +### A Look at the Generated Code +Recall that the `dvp init` command we ran created a file called `src/plugin_runner.py`. Open this file in your editor/IDE. You will see that this file already contains a bunch of Python code. Let's take a look at the first three blocks of code in this file. + +```python +from dlpx.virtualization.platform import Mount, MountSpecification, Plugin + +from generated.definitions import ( + RepositoryDefinition, + SourceConfigDefinition, + SnapshotDefinition, +) +``` +These `import` lines make certain functionality available to our Python code. Some of this functionality will +be used just below, as we implement discovery. Others will be used later on, as we implement +ingestion and provisioning. Later, you'll add more `import`s to unlock more functionality. + +```python +plugin = Plugin() +``` + +This line creates a Python object which allows us to define our plugin types. We have the ability to do this because of the `import Plugin` statement above. + +This object is stored in a variable we have elected to call `plugin`. We are free to call this variable anything we want, so long as we also change the `entryPoint` line in the `plugin_config.yml` file. For this example, we will just leave it as `plugin`. + +```python +# +# Below is an example of the repository discovery operation. +# +# NOTE: The decorators are defined on the 'plugin' object created above. +# +# Mark the function below as the operation that does repository discovery. +@plugin.discovery.repository() +def repository_discovery(source_connection): + # + # This is an object generated from the repositoryDefinition schema. + # In order to use it locally you must run the 'build -g' command provided + # by the SDK tools from the plugin's root directory. + # + + return [RepositoryDefinition(name='1e87dc30-3cdb-4f0a-9634-07ce017d20d1')] +``` + +This is our first [plugin operation](/References/Plugin_Operations.md). In this case, it's defining what will happen when the Delphix Engine wants to discover repositories on an environment. Let's take a look at this code line-by-line + +```python +@plugin.discovery.repository() +def repository_discovery(source_connection): +``` + +This begins the definition of a function called `repository_discovery`. + +We are using a Python [decorator](/References/Glossary.md#decorator) which signals to the Delphix Engine that this is the function which should be called when it is time to do repository discovery. The actual name of the function doesn't matter here. Note that we are using our `plugin` variable here as part of the decorator. + +The Delphix Engine will pass us information about the source environment in an argument called `source_connection`. + +!!! warning + The name of this input argument matters. That is, you'll always need to have an argument called + `source_connection` here. Each plugin operation has its own set of required argument names. For + details on which arguments apply to which operations, see the [reference section](/References/Plugin_Operations.md). + +```python + return [RepositoryDefinition(name='1e87dc30-3cdb-4f0a-9634-07ce017d20d1')] +``` + +This creates and returns a Python object that corresponds to the format defined by our repository schema. Because out repository has exactly one string property called `name`, therefore this Python object has one property called `name`. + +Notice that the code generator has filled in the value of `name` with a random string. This results in a plugin operation that works, but which will not be very helpful for the user. We'll change this later. + + +The rest of the file contains more plugin operations, and we'll be modifying them later. + + +### Repository Discovery + +Now, we need to modify the provided [repository discovery](/References/Plugin_Operations.md#repository-discovery) operation. This operation will examine a remote environment, find any repositories, and return information about them to the Delphix Engine. + +As a reminder, our only external dependency on the remote environment is simply the existence of a filesystem. Since every Unix host has a filesystem, that means we will have exactly one repository per remote environment. Therefore, our repository discovery operation can be very simple. + +In fact, as we saw above, the default-generated `repository_discovery` function does almost exactly what we want -- it returns one single repository for any Unix host that it is asked to work with. The only problem with it is that it uses +unhelpful name. That's really easy to change! + +Replace or modify `repository_discovery` so it looks like this: + +```python +@plugin.discovery.repository() +def repository_discovery(source_connection): + repository = RepositoryDefinition('Repository for our First Plugin') + return [repository] +``` + +!!! tip + Be careful to always use consistent indentation in Python code! + + +### Source Config Discovery + +For source configs, we will rely solely on manual discovery. Therefore, the user will tell us which directories they want to ingest from. We still have to define a source config discovery operation -- it just won't need to do much. + +The job of this operation is to return only source configs associated with the given `repository`. This function will be called once per repository. In our case, that means it will only be called once. + +Because we want to supply **no** automatically-discovered source configs, this function should simply returns an empty list. + +In fact, `dvp init` has already generated a function for us that does exactly this. + +```python +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + return [] +``` + +If we wanted to do automatic discovery of source configs, we'd modify this function. But, for our purposes now, the existing code is fine and we don't need to change anything. + + + +## How to Run Discovery in the Delphix Engine + +Let us make sure discovery works! + +1. Run the `dvp build` commands, as before. This will build the plugin, with all of the new changes, and create an artifact. + +2. Run `dvp upload -e -u `, as before. This will get all the new changes onto the Delphix Engine. + +3. Once the new plugin is uploaded, add a remote unix environment to your engine. To do this, go to **Manage > Environments**, chose **Add Environment** from the menu, answer the questions, and **Submit**. (If you already have an environment set up, you can just refresh it instead). + + To keep an eye on this discovery process, you may need to open the **Actions** tab on the UI. If any errors happen, they will be reported here. + +4. After the automatic discovery process completes, go to the **Databases** tab. You will see an entry for **Repository For Our First Plugin**. This is the repository you created in your Python code. + +![Screenshot](images/PostDiscovery.png) + +Notice that it says *No databases found on installation*. This is because we chose not to do automatic source config discovery. + +However, because we have allowed manual source config discovery, you can add your own entries by clicking the plus sign (**Add Database**). Complete the information in the Add Database dialog and click Add. + +![Screenshot](images/AddDatabase.png) + +This should all look familiar. It is precisely what we defined in our source config schema. As expected, there are two entries, one for our `name` property, and one for `path`. + +For example, in the above screenshot, we are specifying that we want to sync the `/bin` directory +from the remote host, and we want to call it `Binaries`. You can pick any directory and name that +you want. + +Once you have added one or more source configs, you will be able to sync. This is covered on the next page. + + +!!! warning + Once you have automatically or manually created source configs, you will not be allowed to modify your plugin's source config schema. We will cover how to deal with this later in the [upgrade section](/Versioning_And_Upgrade/Upgrade.md). For now, if you need to change your plugin's source config schema: + + - You will have to delete any source configs you have manually added. + - Delete the plugin and its corresponding objects (dSources, Virtual Sources, etc) if the source configs were manually discovered. diff --git a/docs/docs/Building_Your_First_Plugin/Initial_Setup.md b/docs/docs/Building_Your_First_Plugin/Initial_Setup.md new file mode 100644 index 00000000..5012daa4 --- /dev/null +++ b/docs/docs/Building_Your_First_Plugin/Initial_Setup.md @@ -0,0 +1,114 @@ +# Initial Setup + +Before we begin to start writing plugin code, we will need to do some setup work. We will be using the `dvp` tool, which is described in the [Getting Started](/Getting_Started.md) section. + +The quoted examples in this section assume you're working on a Unix-like system. + +## Sanity check + +First a reminder that it's highly recommended that you develop your plugin in a [virtual environment](https://virtualenv.pypa.io/en/latest/). + +Next, make sure you have a Delphix Engine ready to use, as described in the [Prerequisites](Overview.md#prerequisites) section on the previous page. + +Finally, let's quickly make sure that `dvp` is working! Type `dvp -h` and you should see something like the following: +``` +(venv)$ dvp -h +Usage: dvp [OPTIONS] COMMAND [ARGS]... + + The tools of the Delphix Virtualization SDK that help develop, build, and + upload a plugin. + +Options: + --version Show the version and exit. + -v, --verbose Enable verbose mode. Can be repeated up to three times for + increased verbosity. + -q, --quiet Enable quiet mode. Can be repeated up to three times for + increased suppression. + -h, --help Show this message and exit. + +Commands: + build Build the plugin code and generate upload artifact file... + download-logs Download plugin logs from a target Delphix Engine to a... + init Create a plugin in the root directory. + upload Upload the generated upload artifact (the plugin JSON + file)... +``` + +If this looks good, you are ready to begin! + +If, instead, you see something like the following, go back to [Getting Started](/Getting_Started.md) and make sure you setup everything correctly before continuing. +``` +(venv)$ dvp +-bash: dvp: command not found +``` + +## Creating a Bare Plugin + +To start, we will create a new directory where our new plugin code will live. +``` +(venv)$ mkdir first_plugin +(venv)$ cd first_plugin +``` + +Now that we are in our new plugin directory, we can use the `dvp` tool to create a plugin for us. This plugin will be a mere skeleton -- it will not do anything useful until we modify it in the subsequent pages. + +``` +(venv) first_plugin$ dvp init -n first_plugin -s STAGED -p WINDOWS +``` + +The `-n` argument here means "plugin name." We are using the name `first_plugin`. + +The `-s` argument tells which syncing strategy we want to use. + +The `-p` argument tells which host platform our plugin supports. + +You can type `dvp init -h` for more information about the options available. + +After running this command, you should see that files have been created for you: + +``` +(venv) first_plugin$ ls +plugin_config.yml schema.json src +``` + +These files are described below: + +File | Description +--------------------|---------------------- +`plugin_config.yml` | The [plugin config](/References/Glossary.md#plugin-config) file, which provides a list of plugin properties +`schema.json` | Contains [schemas](/References/Glossary.md#schema) which provide custom datatype definitions +`src/plugin_runner.py` | A Python file which will eventually contain code that handles plugin [operations](/References/Glossary.md#operation) + + +Open these files in your editor/IDE and take a look at them. At this point they will not have a lot of content, but we will add to them as we go through the next few pages. + + +## Building The New Plugin + +The new files we created above have to get [built](/References/Glossary.md#building) to produce a single [artifact](/References/Glossary.md#artifact). This is done with the `dvp` tool. + +``` +(venv) first_plugin$ dvp build +``` + +After the build, you should see that the build process has created a new file called `artifact.json`. +``` +(venv) first_plugin$ ls +artifact.json plugin_config.yml schema.json src +``` + +## Uploading The New Plugin + +Now using the `dvp` tool we can upload the artifact onto our Delphix Engine. + +``` +(venv) first_plugin$ dvp upload -e engine.company.com -u admin +``` + +The `-e` argument specifies the engine on which to install the plugin, and the `-u` argument gives the Delphix Engine user. + +You will be prompted for a password. + +Once the upload is finished, you can verify the installation from the Manage > Toolkits screen in the Delphix Engine UI. + +![Screenshot](images/PostUpload.png) \ No newline at end of file diff --git a/docs/docs/Building_Your_First_Plugin/Overview.md b/docs/docs/Building_Your_First_Plugin/Overview.md new file mode 100644 index 00000000..29fb0650 --- /dev/null +++ b/docs/docs/Building_Your_First_Plugin/Overview.md @@ -0,0 +1,59 @@ +# Overview + +In the following few pages, we will walk through an example of making a simple, working plugin. + +Our plugin will virtualize simple directory trees on Unix systems. The actual contents of these directories could be anything: configuration files, documents, image libraries, etc. Our plugin will not care about the contents and will treat it as a directory tree full of files. + +## Data Flow in the Delphix Engine +Here we will briefly overview how data moves through the Delphix Engine. + +### Ingestion +It all begins with Delphix ingesting data—copying some data from what we call a [source environment](/References/Glossary.md#source-environment) onto the Delphix Engine. + +Plugins can use either of two basic strategies to do this copying: + + - [direct linking](/References/Glossary.md#direct-linking), where the Delphix Engine pulls data directly from the source environment. + - [staged linking](/References/Glossary.md#staged-linking), where the plugin is responsible for pulling data from the source environment. + +Our plugin will use the staged linking strategy. + +With staged linking, Delphix exposes and mounts storage to a [staging environment](/References/Glossary.md#staging-environment). This would be an NFS share for Unix environments and iSCSI disks for Windows environments. You can use either the source environment or a different environment for staging. We will write our plugin to handle both approaches. + +Once Delphix mounts the storage share onto the staging environment, the plugin needs to arrange for the relevant data to be copied from the source environment onto the storage share, which is backed by Delphix Engine storage. + +When this initial copy is complete, Delphix will take a snapshot of the backing storage. + +This same basic operation will be repeated when Delphix mounts an NFS share: The plugin copies data onto it, then Delphix snapshots the result. + +### Provisioning +**Provisioning** is when you take a Delphix Engine snapshot and create a virtual dataset from it. + +First the snapshot is cloned onto the Delphix Engine, then this newly-cloned data is mounted as a virtual dataset onto a **target environment**. While this new virtual dataset gets updated by its end users, the original snapshot is persistent. You can use it in a few ways: + + - Provision other virtual datasets from it + - Rewind the virtual dataset back to the state it represents + - Create a physical database from it in what we call V2P: Virtual to Physical + +## Parts of a Plugin +A plugin consists of three main parts. We will cover them briefly here, and then fill in more details later in the tutorial. + +### Plugin Config +Plugin config is where the plugin describes itself to the Delphix Engine. What is the plugin called? What version of the plugin is being used? What type(s) of environments does the plugin work with? What features does the plugin offer?... + +### Plugin Operations +The plugin will need to provide operations. These are Python functions, each of which implements one small piece of functionality. This is how the plugin customizes Delphix behavior to work with the kind of dataset you’re building the plugin for. One operation will handle setting up a newly-configured virtual dataset. Another will handle copying data from a source environment, and so on. + +Later we’ll provide examples for our first plugin. See [Plugin Operations](/References/Plugin_Operations.md) for full details on the operations that are available, which are required, and what each one is required to do. + +### Schemas +As part of normal operations, plugins need to generate and access certain pieces of information in order to do their job. For example, plugins that work with Postgres might need to know which port number to connect to, or which credentials to use. + +Defining your plugin’s schemas will enable it to give the Delphix Engine the details it needs to run the operations we’ve built into it. Different datasets can have very different needs. The [schemas](/References/Schemas.md) you provide for your plugin will tell Delphix how to operate with your dataset. + +## Prerequisites +To complete the tutorial that follows, make sure you check off the things on this list: + +- Download the SDK and get it working +- A running Delphix Engine version 6.0.2.0 or above. +- Add at least one Unix host—but preferably three—to the Delphix Engine as remote environments. +- Have a tool at hand for editing text files—mostly Python and JSON. A simple text editor would work fine, or you can use a full-fledged IDE. diff --git a/docs/docs/Building_Your_First_Plugin/Provisioning.md b/docs/docs/Building_Your_First_Plugin/Provisioning.md new file mode 100644 index 00000000..349c46ba --- /dev/null +++ b/docs/docs/Building_Your_First_Plugin/Provisioning.md @@ -0,0 +1,156 @@ +# Provisioning + +## What is Provisioning? +Once Delphix has a [snapshot](/References/Glossary.md#snapshot) of a dataset (for example of a dSource), it is possible to quickly clone that snapshot to create a new [virtual dataset](/References/Glossary.md#virtual-dataset). This new virtual dataset will be made available for use on a [target environment](/References/Glossary.md#target-environment). This process is called [provisioning](/References/Glossary.md#provisioning). + +## Our Provisioning Strategy +For many plugins, there is a lot of work that needs to be done before a newly-provisioned virtual dataset can be made useful. For example, it might need to be registered with a running DBMS. Or, maybe some data inside the dataset needs to be changed so it behaves properly on the target environment. + +In our case, however, there is very little to do. All we really require is that the files in the virtual dataset are accessible at some path on the target environment. Since the Delphix Engine takes care of mounting the data, we only need to worry about controlling *where* that data is mounted. + +## Defining our Provision-Related Data Formats + +We have already seen four custom data formats: for repositories, source configs, snapshots and +linked sources. The final one is used for [virtual sources](/References/Glossary.md#virtual-source). + +Recall that, for our plugin, a VDB is just a directory full of files. There is no special +procedure needed to enable it, no DBMS to coordinate with, etc. All we need to do is make the files +available on the target environment. + +So, the only question for the user is "Where should these files live?" + +Open up `schema.json`, locate the `virtualSourceDefintion` section, and change it to look like this: + +```json +"virtualSourceDefinition": { + "type": "object", + "additionalProperties" : false, + "required": ["mountLocation"], + "properties" : { + "mountLocation": { + "type": "string", + "format": "unixpath", + "prettyName": "Mount Location on Target Host", + "description": "Where to mount VDB onto the target host" + } + } +}, +``` + +This should look familiar from the source config schema that we did earlier. We only have one +property, and it represents the mount location on the target environment. + +## Implementing Provisioning + +There are numerous ways for a plugin to customize the provisioning process. +For our example plugin, we just need to do a few things: + +1. Tell Delphix where to mount the virtual dataset. +2. Create a `sourceConfig` to represent each newly-provisioned virtual dataset. +3. Modify an existing `sourceConfig`, if necessary, when the virtual dataset is refreshed or rewound. +4. Construct snapshot-related data any time a snapshot is taken of the virtual dataset. + +### Controlling Mounting + +As we saw previously with linked sources, we need to tell Delphix where to mount the dataset. Open +up `plugin_runner.py` and find the `plugin.virtual.mount_specification` decorator. Change that function so that +it looks like this: + +```python +@plugin.virtual.mount_specification() +def vdb_mount_spec(virtual_source, repository): + mount_location = virtual_source.parameters.mount_location + mount = Mount(virtual_source.connection.environment, mount_location) + return MountSpecification([mount]) +``` + +As we did with linked sources, we just look up what the user told us, and then package that up +and return it to Delphix. + +### Creating a Source Config for a new VDB + +Just like we saw earlier with [linked datasets](/References/Glossary.md#linked-dataset), each virtual dataset will need its own source config so that the Delphix Engine can interact with it. Our plugin is in charge of creating that source config at provision time + +As a reminder, here is what our schema looks like for source configs: + +```json +"sourceConfigDefinition": { + "type": "object", + "required": ["name", "path"], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "prettyName": "Dataset Name", + "description": "User-visible name for this dataset" + }, + "path": { + "type": "string", + "format": "unixpath", + "prettyName": "Path", + "description": "Full path to data location on the remote environment" + } + }, + "nameField": "name", + "identityFields": ["path"] +}, +``` + +Thus, for each newly-cloned virtual dataset, we create a new source config object with a name and a +path. This is done by the `configure` plugin operation. + +In addition to generating a new source config, the configure operation is also tasked with getting +the newly-cloned dataset ready for use on the target environment. What this means exactly will vary +from plugin to plugin. For our simple plugin, the dataset does not require any setup work, and so we +only have to worry about the source config. + +Find the `plugin.virtual.configure` decorator and change the function to look like this: + +```python +@plugin.virtual.configure() +def configure_new_vdb(virtual_source, snapshot, repository): + mount_location = virtual_source.parameters.mount_location + name = "VDB mounted at {}".format(mount_location) + return SourceConfigDefinition(path=mount_location, name=name) +``` + +### Modifying a Source Config after Rewind or Refresh + +Just as a new VDB might need to be configured, a refreshed or rewound VDB might need to be +"reconfigured" to handle the new post-refresh (or post-rewind) state of the VDB. So, just as there +is a `configure` operation, there is also a `reconfigure` operation. + +The main difference between the two is that `configure` must *create* a source config, but +`reconfigure` needs to *modify* a pre-existing source config. + +In our simple plugin, there is no special work to do at reconfigure time, and there is no reason +to modify anything about the source config. We just need to write a `reconfigure` operation that +returns the existing source config without making any changes. Find the `plugin.virtual.reconfigure` decorator and modify the function as follows. + +```python +@plugin.virtual.reconfigure() +def reconfigure_existing_vdb(virtual_source, repository, source_config, snapshot): + return source_config +``` + +### Saving Snapshot Data + +As with our linked sources, we don't actually have anything we need to save when VDB snapshots are +taken. And, again, `dvp init` has created a post-snapshot operation that will work just fine for us without modification: + +```python +@plugin.virtual.post_snapshot() +def virtual_post_snapshot(virtual_source, repository, source_config): + return SnapshotDefinition() +``` + +## How To Provision in the Delphix Engine + +Finally, let us try it out to make sure provisioning works! + +1. Again, use `dvp build` and `dvp upload` to get your new changes onto your Delphix Engine. +2. Click **Manage > Datasets**. +3. Select the dSource you created in the last page. You should see at least one snapshot, and maybe more than one if you have manually taken a snapshot, or if you have a snapshot policy in place. Select one of these snapshots and click the **Provision vFiles** icon. +4. This will open the Provision VDB wizard. Complete the steps and select **Submit**. + During VDB provisioning one of the things you will have to do is to provide the data required by your virtual source schema. In our case, that means you will be asked to provide a value for `mountLocation`. You will also be asked to choose a target environment on which the new VDB will live. After the wizard finishes, you will see a job appear in the **Actions** tab on the right-hand side of the screen. When that job completes, your new VDB should be ready. +5. To ensure everything has worked correctly, log into to your target environment. From there, you can examine the directory you specified as the `mountLocation`. What you should see is a copy of the directory that you linked to with your dSource. \ No newline at end of file diff --git a/docs/docs/Building_Your_First_Plugin/images/AddDatabase.png b/docs/docs/Building_Your_First_Plugin/images/AddDatabase.png new file mode 100644 index 00000000..dd78f79c Binary files /dev/null and b/docs/docs/Building_Your_First_Plugin/images/AddDatabase.png differ diff --git a/docs/docs/Building_Your_First_Plugin/images/LinkingWizard.png b/docs/docs/Building_Your_First_Plugin/images/LinkingWizard.png new file mode 100644 index 00000000..8f8ea4c3 Binary files /dev/null and b/docs/docs/Building_Your_First_Plugin/images/LinkingWizard.png differ diff --git a/docs/docs/Building_Your_First_Plugin/images/PostDiscovery.png b/docs/docs/Building_Your_First_Plugin/images/PostDiscovery.png new file mode 100644 index 00000000..bc8b4ddc Binary files /dev/null and b/docs/docs/Building_Your_First_Plugin/images/PostDiscovery.png differ diff --git a/docs/docs/Building_Your_First_Plugin/images/PostUpload.png b/docs/docs/Building_Your_First_Plugin/images/PostUpload.png new file mode 100644 index 00000000..ff269487 Binary files /dev/null and b/docs/docs/Building_Your_First_Plugin/images/PostUpload.png differ diff --git a/docs/docs/Building_Your_First_Plugin/images/Resync.png b/docs/docs/Building_Your_First_Plugin/images/Resync.png new file mode 100644 index 00000000..114b686c Binary files /dev/null and b/docs/docs/Building_Your_First_Plugin/images/Resync.png differ diff --git a/docs/docs/Getting_Started.md b/docs/docs/Getting_Started.md new file mode 100644 index 00000000..0879a0e8 --- /dev/null +++ b/docs/docs/Getting_Started.md @@ -0,0 +1,70 @@ +# Getting Started +The Virtualization SDK is a Python package on [PyPI](https://pypi.org/project/dvp/). Install it in your local development environment so that you can build and upload a plugin. + +The SDK consists of three parts: + +- The `dlpx.virtulization.platform` module +- The `dlpx.virtualization.libs` module +- A CLI + +The platform and libs modules expose objects and methods needed to develop a plugin. The CLI is used to build and upload a plugin. + +## Requirements + +- macOS 10.14+, Ubuntu 16.04+, or Windows 10 +- Python 2.7 (Python 3 is not supported) +- Java 7+ +- Delphix Engine 6.0.2.0 or above + +## Installation +To install the latest version of the SDK run: + +``` +$ pip install dvp +``` + +!!! tip "Use a Virtual Environment" + We highly recommended that you develop plugins inside of a virtual environment. To learn more about virtual environments, refer to [Virtualenv's documentation](https://virtualenv.pypa.io/en/latest/). + + The virtual environment needs to use Python 2.7. This is configured when creating the virtualenv: + + ```$ virtualenv -p /path/to/python2.7/binary ENV``` + +To install a specific version of the SDK run: + +``` +$ pip install dvp== +``` + +To upgrade an existing installation of the SDK run: + +``` +$ pip install dvp --upgrade +``` + +!!! note "API Build Version" + The version of the SDK defines the version of the Virtualization Platform API your plugin will be built against. + +## Basic Usage + +Our [CLI reference](/References/CLI.md) describes commands, provides examples, and a help section. + +To build your plugin: + +``` +$ dvp build -c -a +``` + +This will generate an upload artifact at ``. That file can then be uploaded with: + +``` +$ dvp upload -e -u -a +``` + +You will be prompted for the Delphix Engine user's password. + +You can also use a [CLI Configuration File](/Best_Practices/CLI_Configuration_File.md) to set default values for [CLI](/References/CLI.md) command options. + +## Questions? + +If you have questions, bugs or feature requests reach out to us via the [Virtualization SDK GitHub repository](https://github.com/delphix/virtualization-sdk/). \ No newline at end of file diff --git a/docs/docs/References/.pages b/docs/docs/References/.pages new file mode 100644 index 00000000..c304fd9b --- /dev/null +++ b/docs/docs/References/.pages @@ -0,0 +1,12 @@ +arrange: + - CLI.md + - Plugin_Config.md + - Decorators.md + - Plugin_Operations.md + - Schemas.md + - Schemas_and_Autogenerated_Classes.md + - Platform_Libraries.md + - Logging.md + - Workflows.md + - Classes.md + - Glossary.md diff --git a/docs/docs/References/CLI.md b/docs/docs/References/CLI.md new file mode 100644 index 00000000..349220ba --- /dev/null +++ b/docs/docs/References/CLI.md @@ -0,0 +1,204 @@ +# CLI + +The CLI is installed with the SDK. To install the SDK, refer to the [Getting Started](/Getting_Started.md) section. You can also use a [CLI Configuration File](/Best_Practices/CLI_Configuration_File.md) to set default values for CLI command options. + +## Help +Every command has a `-h` flag including the CLI itself. This will print the help menu. + +#### Examples +Get the CLI's help menu. + +``` +$ dvp -h +Usage: dvp [OPTIONS] COMMAND [ARGS]... + + The tools of the Delphix Virtualization SDK that help develop, build, and + upload a plugin. + +Options: + --version Show the version and exit. + -v, --verbose Enable verbose mode. Can be repeated up to three times for + increased verbosity. + + -q, --quiet Enable quiet mode. Can be repeated up to three times for + increased suppression. + + -h, --help Show this message and exit. + +Commands: + build Build the plugin code and generate upload artifact file... + download-logs Download plugin logs from a target Delphix Engine to a... + init Create a plugin in the root directory. + upload Upload the generated upload artifact (the plugin JSON + file)... +``` + + +Get the `build` command's help menu. +``` +$ dvp build -h +Usage: dvp build [OPTIONS] + + Build the plugin code and generate upload artifact file using the + configuration provided in the plugin config file. + +Options: + -c, --plugin-config FILE Set the path to plugin config file.This file + contains the configuration required to build the + plugin. [default: plugin_config.yml] + -a, --upload-artifact FILE Set the upload artifact.The upload artifact file + generated by build process will be writtento + this file and later used by upload command. + [default: artifact.json] + -g, --generate-only Only generate the Python classes from the schema + definitions. Do not do a full build or create an + upload artifact. [default: False] + -h, --help Show this message and exit. +``` + +## Verbosity + +To change the verbosity level of the CLI you can specify up to three `-v` (to increase) or `-q` (to decrease) the amount that is printed to the console. This is an option on the CLI itself and can be used with any command. + +|Option|Output| +|:----:|:---:| +|-qqq|None| +|-qq|Critical| +|-q|Error| +|-v|Info| +|-vv|Debug| +|-vvv|All| + + +### Examples +Print everything to the console. + +``` +$ dvp -vvv build +``` + +Print nothing to the console. + +``` +$ dvp -qqq build +``` + +## Commands + +### init +#### Description +Create a plugin in the root directory. The plugin will be valid but have no functionality. + +#### Options +|Option                     |Description|Required|Default           | +|------|-----------|:--------:|:-------:| +|-r,
--root-dir
DIRECTORY|Set the plugin root directory.|N|`os.cwd()`| +|-n,
--plugin-name
TEXT|Set the name of the plugin that will be used to identify it.|N|id| +|-s,
--ingestion-strategy
[DIRECT\|STAGED]|Set the ingestion strategy of the plugin. A "direct" plugin ingests without a staging server while a "staged" plugin requires a staging server.|N|`DIRECT`| +|-t,
--host-type
[UNIX\|WINDOWS]|Set the host platform supported by the plugin.|N|`UNIX`| + + +#### Examples +Create a `UNIX` plugin in the current working directory with the `DIRECT` ingestion strategy. Here the name of the plugin will be equal to the id that is generated. + +``` +$ dvp init +``` + +Create a `UNIX` plugin in the current working directory with the `DIRECT` ingestion strategy and use `postgres` as the display name. + +``` +$ dvp init -n postgres +``` + +Create a `UNIX` plugin called `mongodb` in a custom location with the `STAGED` ingestion strategy. + +``` +$ dvp init -n mongodb -s STAGED -r /our/plugin/directory +``` + +Create a `WINDOWS` plugin called `mssql` in the current working directory with the `DIRECT` ingestion strategy. + +``` +$ dvp init -n mssql -t WINDOWS +``` + +*** +### build +#### Description +Build the plugin code and generate upload artifact file using the configuration provided in the plugin config file. + +#### Options +|Option                            |Description|Required|Default                 | +|------|-----------|:--------:|:-------:| +|-c,
--plugin-config
FILE|Set the path to plugin config file.This file contains the configuration required to build the plugin.|N|`plugin_config.yml`| +|-a,
--upload-artifact
FILE|Set the upload artifact.The upload artifact file generated by build process will be written to this file and later used by upload command.|N|`artifact.json`| +|-g,
--generate-only|Only generate the Python classes from the schema definitions. Do not do a full build or create an upload artifact.|N|`False`| + +#### Examples +Do a full build of the plugin and write the upload artifact to `./artifact.json`. + +This assumes current working directory contains a plugin config file named `plugin_config.yml`. + +``` +$ dvp build +``` + +Do a partial build and just generate the Python classes from the schema definitions. + +This assumes current working directory contains ad plugin config file named `plugin_config.yml`. + +``` +$ dvp build -g +``` + +Do a full build of a plugin and write the artifact file to a custom location. + +``` +$ dvp build -c config.yml -a build/artifact.json +``` +*** +### upload +#### Description +Upload the generated upload artifact (the plugin JSON file) that was built to a target Delphix Engine. Note that the upload artifact should be the file created after running the build command and will fail if it's not readable or valid. +#### Options + +|Option                            |Description|Required|Default                       | +|-------|-----------|:--------:|:-------:| +|-e,
--delphix-engine
TEXT|Upload plugin to the provided engine. This should be either the hostname or IP address.|Y|None| +|-u,
--user
TEXT|Authenticate to the Delphix Engine with the provided user.|Y|None| +|-a,
--upload-artifact FILE|Path to the upload artifact that was generated through build.|N|`artifact.json`| +|--wait|Block and wait for the upload job to finish on the Delphix Engine.|N|None| +|--password
TEXT|Authenticate using the provided password. If ommitted, the password will be requested through a secure prompt.|N|None| + + +#### Examples +Upload artifact `build/artifact.json` to `engine.example.com` using the user `admin`. Since the password option is ommitted, a secure password prompt is used instead. + +``` +$ dvp upload -a build/artifact -e engine.example.com -u admin +Password: +``` + +*** +### download-logs +#### Description +Download plugin logs from a Delphix Engine to a local directory. +#### Options + +|Option                            |Description|Required|Default                 | +|-------|-----------|:--------:|:-------:| +|-e,
--delphix-engine
TEXT|Download plugin logs from the provided Delphix engine. This should be either the hostname or IP address.|Y|None| +|-c,
--plugin-config FILE|Set the path to plugin config file. This file contains the plugin name to download logs for.|N|`plugin_config.yml`| +|-u,
--user
TEXT|Authenticate to the Delphix Engine with the provided user.|Y| None | +|-d,
--directory DIRECTORY|Specify the directory of where to download the plugin logs.|N|`os.cwd()`| +|--password
TEXT|Authenticate using the provided password. If ommitted, the password will be requested through a secure prompt.|N| None | + + +#### Examples +Download plugin logs from `engine.example.com` using the user `admin`. Since the password option is ommitted, a secure password prompt is used instead. + +``` +$ dvp download-logs -e engine.example.com -u admin +Password: +``` diff --git a/docs/docs/References/Classes.md b/docs/docs/References/Classes.md new file mode 100644 index 00000000..91928087 --- /dev/null +++ b/docs/docs/References/Classes.md @@ -0,0 +1,223 @@ +# Classes + +## DirectSource + +Represents a Linked Source object and its properties when using a [Direct Linking](Glossary.md#direct-linking) strategy. + +```python +from dlpx.virtualization.platform import DirectSource + +direct_source = DirectSource(guid, connection, parameters) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +guid | String | Unique Identifier for the source. +connection | [RemoteConnection](#remoteconnection) | Connection for the source environment. +parameters | [LinkedSourceDefinition](Schemas_and_Autogenerated_Classes.md#linkedsourcedefinition-class) | User input as per the [LinkedSource Schema](Schemas_and_Autogenerated_Classes.md#linkedsourcedefinition-schema). + +## StagedSource + +Represents a Linked Source object and its properties when using a [Staged Linking](Glossary.md#staged-linking) strategy. + +```python +from dlpx.virtualization.platform import StagedSource + +staged_source = StagedSource(guid, source_connection, parameters, mount, staged_connection) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +guid | String | Unique Identifier for the source. +source_connection | [RemoteConnection](#remoteconnection) | Connection for the source environment. +parameters | [LinkedSourceDefinition](Schemas_and_Autogenerated_Classes.md#linkedsourcedefinition-class) | User input as per the [LinkedSource Schema](Schemas_and_Autogenerated_Classes.md#linkedsourcedefinition-schema). +mount | [Mount](#mount) | Mount point associated with the source. +staged_connection | [RemoteConnection](#remoteconnection) | Connection for the staging environment. + +## VirtualSource + +Represents a Virtual Source object and its properties. + +```python +from dlpx.virtualization.platform import VirtualSource + +virtual_source = VirtualSource(guid, connection, parameters, mounts) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +guid | String | Unique Identifier for the source. +connection | [RemoteConnection](#remoteconnection) | Connection for the source environment. +parameters | [VirtualSourceDefinition](Schemas_and_Autogenerated_Classes.md#virtualsourcedefinition-class) | User input as per the [VirtualSource Schema](Schemas_and_Autogenerated_Classes.md#virtualsourcedefinition-schema). +mounts | list[[Mount](#mount)] | Mount points associated with the source. + +## RemoteConnection + +Represents a connection to a source. + +```python +from dlpx.virtualization.common import RemoteConnection + +connection = RemoteConnection(environment, user) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +environment | [RemoteEnvironment](#remoteenvironment) | Environment for the connection. +user | [RemoteUser](#remoteuser) | User for the connection. + +## Status + +An enum used to represent the state of a linked or virtual source and whether it is functioning as expected. + +```python +from dlpx.virtualization.platform import Status + +status = Status.ACTIVE +``` + +### Values + +Value | Description +----- | ----------- +ACTIVE | Source is healthy and functioning as expected. +INACTIVE | Source is not functioning as expected. + +## Mount + +Represents a mount exported and mounted to a remote host. + +```python +from dlpx.virtualization.platform import Mount + +mount = Mount(environment, path) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +remote_environment | [RemoteEnvironment](#remoteenvironment) or [Reference](Schemas/#reference) | Environment for the connection. +mount_path | String | The path on the remote host that has the mounted data set. +shared_path | String | **Optional.** The path of the subdirectory of the data set to mount to the remote host. + + +## OwnershipSpecification + +Represents how to set the ownership for a data set. This only applies to Unix Hosts. + +```python +from dlpx.virtualization.platform import OwnershipSpecification + +ownership_specification = OwnershipSpecification(uid, gid) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +uid | Integer | The user id to set the ownership of the data set to. +gid | Integer | The group id to set the ownership of the data set to. + +## MountSpecification + +Represents properties for the mount associated with an exported data set. + +```python +from dlpx.virtualization.platform import MountSpecification + +mount_specification = MountSpecification([mount], ownership_specification) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +mounts | list[[Mount](#mount)] | The list of mounts to export the data sets to. +ownership_specification | [OwnershipSpecification](#ownershipspecification) | **Optional.** Control the ownership attributes for the data set. It defaults to the environment user of the remote environment if it is not specified. + +## SnapshotParametersDefinition + +User provided parameters for the snapshot operation. It includes a boolean property named `resync` that can be used to indicate to the plugin whether or not to initiate a full ingestion of the dSource. The parameters are only set during a manual snapshot. When using a sync policy, `resync` defaults to `false`. + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.linked.pre_snapshot() +def linked_pre_snapshot(staged_source, repository, source_config, snapshot_parameters): + if snapshot_parameter.resync: + print(snapshot_parameter.resync) +``` + +> This class will be generated during build and is located with the autogenerated classes. As it is passed into the operation, importing it is not neccessary. + +### Fields + +Field | Type | Description +----- | ---- | ----------- +resync | Boolean | Determines if this snapshot should ingest the dSource from scratch. + +## RemoteEnvironment + +Represents a remote environment. + +```python +from dlpx.virtualization.common import RemoteEnvironment + +environment = RemoteEnvironment(name, reference, host) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +name | String | Name of the environment. +reference | String | Unique identifier for the environment. +host | [RemoteHost](#remotehost) | Host that belongs to the environment. + + +## RemoteHost + +Represents a remote host, can be Unix or Windows. + +```python +from dlpx.virtualization.common import RemoteHost + +host = RemoteHost(name, reference, binary_path, scratch_path) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +name | String | Host address. +reference | String | Unique identifier for the host. +binary_path | String | Path to Delphix provided binaries on the host, which are present in the toolkit pushed to the remote host like `dlpx_db_exec`, `dlpx_pfexec`, etc. This property is only available for Unix hosts. +scratch_path | String | Path to scratch path on the host. + +## RemoteUser + +Represents a user on a remote host. + +```python +from dlpx.virtualization.common import RemoteUser + +user = RemoteUser(name, reference) +``` + +### Fields + +Field | Type | Description +----- | ---- | ----------- +name | String | User name. +reference | String | Unique identifier for the user. \ No newline at end of file diff --git a/docs/docs/References/Decorators.md b/docs/docs/References/Decorators.md new file mode 100644 index 00000000..20158783 --- /dev/null +++ b/docs/docs/References/Decorators.md @@ -0,0 +1,53 @@ +# Decorators + +The Virtualization SDK exposes decorators to be able to annotate functions that correspond to each [Plugin Operation](Plugin_Operations.md). +In the example below, it first instantiates a `Plugin()` object, that can then be used to tag plugin operations. + + +```python +from dlpx.virtualization.platform import Plugin + +# Initialize a plugin object +plugin = Plugin() + +# Use the decorator to annotate the function that corresponds to the "Virtual Source Start" Plugin Operation +@plugin.virtual_source.start() +def my_start(virtual_source, repository, source_config): + print "running start" +``` + +!!! info + Decorators exposed by the Virtualization SDK are inherently python function calls and needs parentheses `()` appended at the end. + +Assuming the name of the object, is `plugin` as above, the table below lists the corresponding decorators for each plugin operation. + +Plugin Operation | Decorator +---------------- | -------- +[Repository Discovey](Plugin_Operations.md#repository-discovery) | `@plugin.discovery.repository()` +[Source Config Discovey](Plugin_Operations.md#source-config-discovery) | `@plugin.discovery.source_config()` +[Direct Linked Source Pre-Snapshot](Plugin_Operations.md#direct-linked-source-pre-snapshot) | `@plugin.linked.pre_snapshot()` +[Direct Linked Source Post-Snapshot](Plugin_Operations.md#direct-linked-source-post-snapshot) | `@plugin.linked.post_snapshot()` +[Staged Linked Source Pre-Snapshot](Plugin_Operations.md#staged-linked-source-pre-snapshot) | `@plugin.linked.pre_snapshot()` +[Staged Linked Source Post-Snapshot](Plugin_Operations.md#linkedsource-post-snapshot) | `@plugin.linked.post_snapshot()` +[Staged Linked Source Start-Staging](Plugin_Operations.md#staged-linked-source-start-staging) | `@plugin.linked.start_staging()` +[Staged Linked Source Stop-Staging](Plugin_Operations.md#staged-linked-source-stop-staging) | `@plugin.linked.stop_staging()` +[Staged Linked Source Status](Plugin_Operations.md#staged-linked-source-status) | `@plugin.linked.status()` +[Staged Linked Source Worker](Plugin_Operations.md#staged-linked-source-worker) | `@plugin.linked.worker()` +[Staged Linked Source Mount Specification](Plugin_Operations.md#staged-linked-source-mount-specification) | `@plugin.linked.mount_specification()` +[Virtual Source Configure](Plugin_Operations.md#virtual-source-configure) | `@plugin.virtual.configure()` +[Virtual Source Unconfigure](Plugin_Operations.md#virtual-source-unconfigure) | `@plugin.virtual.unconfigure()` +[Virtual Source Reconfigure](Plugin_Operations.md#virtual-source-reconfigure) | `@plugin.virtual.reconfigure()` +[Virtual Source Start](Plugin_Operations.md#virtual-source-start) | `@plugin.virtual.start()` +[Virtual Source Stop](Plugin_Operations.md#virtual-source-stop) | `@plugin.virtual.stop()` +[VirtualSource Pre-Snapshot](Plugin_Operations.md#virtualsource-pre-snapshot) | `@plugin.virtual.pre_snapshot()` +[Virtual Source Post-Snapshot](Plugin_Operations.md#virtual-source-post-snapshot) | `@plugin.virtual.post_snapshot()` +[Virtual Source Mount Specification](Plugin_Operations.md#virtual-source-mount-specification) | `@plugin.virtual.mount_specification()` +[Virtual Source Status](Plugin_Operations.md#virtual-source-status) | `@plugin.virtual.status()` +[Repository Data Migration](Plugin_Operations.md#repository-data-migration) | `@plugin.upgrade.repository(migration_id)` +[Source Config Data Migration](Plugin_Operations.md#source-config-data-migration) | `@plugin.upgrade.source_config(migration_id)` +[Linked Source Data Migration](Plugin_Operations.md#linked-source-data-migration) | `@plugin.upgrade.linked_source(migration_id)` +[Virtual Source Data Migration](Plugin_Operations.md#virtual-source-data-migration) | `@plugin.upgrade.virtual_source(migration_id)` +[Snapshot Data Migration](Plugin_Operations.md#snapshot-data-migration) | `@plugin.upgrade.snapshot(migration_id)` + +!!! warning + A plugin should only implement the **direct** operations or the **staged** operations based on the [plugin type](Glossary.md#plugin-type) diff --git a/docs/docs/References/Glossary.md b/docs/docs/References/Glossary.md new file mode 100644 index 00000000..56e6e0d2 --- /dev/null +++ b/docs/docs/References/Glossary.md @@ -0,0 +1,123 @@ +# Glossary + + +## Artifact +A single file that is the result of a [build](#building). It is this artifact which is distributed to users, and which is installed onto engines. + +## Automatic Discovery +[Discovery](#discovery) which is done by the Delphix Engine (with help from a plugin) itself, with no need for the end user to provide any information. + +## Building +The process of creating an [artifact](#artifact) from the collection of files that make up the plugin's source code. + +## Data Migration +A python function which is called as part of the upgrade process. It handles transforming data from an older format to a newer format. More details [here](/Versioning_And_Upgrade/Upgrade.md#data-migrations). + +## Data Migration ID +Each data migration is tagged with a unique ID. This allows the Delphix Engine to know which data migrations need to be run, in which order, when upgrading to a new plugin version. More details [here](/Versioning_And_Upgrade/Upgrade.md#data-migrations). + +## Decorator +A Python construct which is used by plugins to "tag" certain functions, so that the Delphix Engine knows which function corresponds to which plugin operation. + +## Direct Linking +A strategy that involves data being ingested directly from the source environment onto the Delphix Engine, without the assistance of a staging environment. + +## Discovery +The process by which the Delphix Engine learns about how a particular environment can be used for ingesting or virtualizing datasets. + +## dSource +See [Linked Dataset](#linked-dataset) + +## Environment +A remote system that the Delphix Engine can interact with. An environment can be used as a [source](#source-environment), [staging](#staging-environment) or [target](#target-environment) environment (or any combination of those). For example, a Linux machine that the Delphix Engine can connect to is an environment. + +## Environment User +A set of user credentials that the Delphix Engine can use to interact with an [Environmnet](#environment). For example, a username and password to login to a Linux machine. + +## Linked Dataset +A dataset on the Delphix Engine which holds an ingested copy of a pre-existing external dataset from a source environment. A linked dataset is often called a [dSource](#dsource). + +## Linked Source +An object on the Delphix Engine that holds information related to a [linked dataset](#linked-dataset). + +## Linking +The process by which the Delphix Engine connects a new [dSource](#dsource) to a pre-existing dataset on a source environment. + +## Logging +Logging is when a plugin writes out some human-readable information to a log file. The log file can then be examined, typically in order to debug a problem with the plugin. + +## Plugin Config +A [YAML](#yaml) file containing a list of plugin properties: What is the plugin's name? What version of the plugin is this? Etc. More details [here](Plugin_Config.md). + +## Manual Discovery +[Discovery](#discovery) which the end user does by manually entering the necessary information into the Delphix Engine. + +## Mount Specification +A collection of information, provided by the plugin, which give all the details about how and where [virtual datasets](#virtual-dataset) should be mounted onto [target environments](#target-environment). This term is often shortened to "Mount Spec". + +## Password Properties +In [schemas](#schema), any string property can be tagged with `"format": "password"`. This will let the Delphix Engine know that the property contains sensitive information. Any such values will only be stored in encrypted format, and the UI will not display the values on screen. + +## Platform Libraries +A set of Python functions that are provided by the Virtualization Platform. Plugins use these library functions to request that the Virtualization Platform do some task on behalf of the plugin. For example, running a Bash command on an environment, or making an log entry. + +## Plugin +A tool that customizes the Delphix Engine so it knows how to interact with a particular kind of dataset. + +## Plugin Operation +A piece of functionality that provided by a plugin in order to customize Delphix Engine behavior to work with a particular kind of dataset. A plugin operation is implemented as a Python function. +For example, a MySQL plugin might provide an operation called "stop" which knows how to stop a MySQL database. + +## Provisioning +The process of making a virtual copy of a dataset and making it available for use on a target environment. + +## Replication +Delphix allows end users to replicate data objects between Delphix Engines by creating a replication profile. Data objects that belong to a plugin can also be part of the replication profile. Refer to the [Delphix Engine Documentation](https://docs.delphix.com/docs/) for more details. + +## Repository +Information that represents a set of dependencies that a dataset requires in order to be functional. For example, a particular Postgres database might require an installed Postgres 9.6 DBMS, and so its associated repository would contain all the information required to interact with that DBMS. + +## Schema +A formal description of a data type. Plugins use JSON format for their [schemas](Schemas_and_Autogenerated_Classes.md#schemas-and-autogenerated-classes). + +## Snapshot +A point-in-time read-only copy of a dataset. A snapshot includes associated metadata represented by the [SnapshotDefinition Schema](Schemas_and_Autogenerated_Classes.md#snapshotdefinition) + +## Snapshot Parameter +User provided parameters for the snapshot operation. Currently the only properties the parameter has is resync. + +## Source Config +A collection of information that the Delphix Engine needs to interact with a dataset (whether [linked](#linked-dataset) or [virtual](#virtual-dataset) on an [environment](#environment). + +## Source Environment +An [environment](#environment) containing data that is ingested by the Delphix Engine. + +## Staged Linking +A strategy where a [staging environment](#staging-environment) is used to coordinate the ingestion of data into a [dsource](#dsource). + +## Staging Environment +An [environment](#environment) used by the Delphix Engine to coordinate ingestion from a [source environment](#source-environment). + +## Syncing +The process by which the Delphix Engine ingests data from a dataset on a [source environment](#source-environment) into a [dsource](#dsource). Syncing always happens immediately after [linking](#linking), and typically is done periodically thereafter. + +## Target Environment +An [environment](#environment) on which Delphix-provided virtualized datasets can be used. + +## Upgrade Operation +A special plugin operation that takes data produced by an older version of a plugin, and transforms it into the format expected by the new version of the plugin. + +## VDB +See [Virtual Dataset](#virtual-dataset) + +## Version +A string identifier that is unique for every public release of a plugin. + +## Virtual Dataset +A dataset that has been cloned from a snapshot, and whose data is stored on the Delphix Engine. A virtual dataset is made available for use by mounting it to a [target environment](#target-environment). A virtual dataset is often called a "VDB". + +## Virtual Source +An object on the Delphix Engine that holds information related to a [virtual dataset](#virtual-dataset). + +## YAML +YAML is a simple language often used for configuration files. Plugins define their [plugin config](#plugin-config) using YAML. diff --git a/docs/docs/References/Logging.md b/docs/docs/References/Logging.md new file mode 100644 index 00000000..d4e09439 --- /dev/null +++ b/docs/docs/References/Logging.md @@ -0,0 +1,194 @@ +# Logging + +## What is logging? + +The Virtualization Platform keeps plugin-specific log files. A plugin can, at any point in any of its [plugin operations](Glossary.md#plugin-operation), write out some text to its log file(s). These log files can be examined later, typically to try to debug a problem with the plugin. + +## Overview + +The Virtualization Platform integrates with Python's built-in [logging framework](https://docs.python.org/2/library/logging.html). A special [Handler](https://docs.python.org/2/library/logging.html#handler-objects) is exposed by the platform at `dlpx.virtualization.libs.PlatformHandler`. This handler needs to be added to the Python logger your plugin creates. Logging statements made through Python's logging framework will then be routed to the platform. + +## Basic Setup + Below is the absolute minimum needed to setup logging for the platform. Please refer to Python's [logging documentation](https://docs.python.org/2/library/logging.html) and the [example below](#customized-example) to better understand how it can be customized. + +```python +import logging + +from dlpx.virtualization.libs import PlatformHandler + +# Get the root logger. +logger = logging.getLogger() +logger.addHandler(PlatformHandler()) + +# The root logger's default level is logging.WARNING. +# Without the line below, logging statements of levels +# lower than logging.WARNING will be suppressed. +logger.setLevel(logging.DEBUG) +``` + +!!! note "Logging Setup" + Python's logging framework is global. Setup only needs to happen once, but where it happens is important. Any logging statements that occur before the `PlatformHandler` is added will not be logged by the platform. + + It is highly recommended that the logging setup is done in the plugin's entry point module before any operations are ran. + +!!! warning "Add the PlatformHandler to the root logger" + Loggers in Python have a hierarchy and all loggers are children of a special logger called the "root logger". Logging hierarchy is not always intuitive and depends on how modules are structured. + + To avoid this complexity, add the `PlatformHandler` to the root logger. The root logger can be retrieved with `logging.getLogger()`. + + +## Usage +Once the `PlatformHandler` has been added to the logger, logging is done with Python's [Logger](https://docs.python.org/2/library/logging.html#logger-objects) object. Below is a simple example including the basic setup code used above: + +```python +import logging + +from dlpx.virtualization.libs import PlatformHandler + +logger = logging.getLogger() +logger.addHandler(PlatformHandler()) + +# The root logger's default level is logging.WARNING. +# Without the line below, logging statements of levels +# lower than logging.WARNING will be suppressed. +logger.setLevel(logging.DEBUG) + +logger.debug('debug') +logger.info('info') +logger.error('error') +``` + +### Example +Imagine you notice that your plugin is taking a very long time to do discovery. Everything works, it just takes much longer than expected. You'd like to figure out why. + +!!! info + Refer to [Managing Scripts for Remote Execution](/Best_Practices/Managing_Scripts_For_Remote_Execution.md) for how remote scripts can be stored and retrieved. + +Suppose your plugin has a source config discovery operation that looks like this (code is abbreviated to be easier to follow): +```python +import pkgutil + +from dlpx.virtualization import libs +from dlpx.virtualization.platform import Plugin + + +plugin = Plugin() + +@plugin.discovery.repository() +def repository_discovery(source_connection): + return [RepositoryDefinition('Logging Example')] + + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + version_result = libs.run_bash(source_connection, pkgutil.get_data('resources', 'get_db_version.sh')) + users_result = libs.run_bash(source_connection, pkgutil.get_data('resources', 'get_db_users.sh')) + db_results = libs.run_bash(source_connection, pkgutil.get_data('resources', 'get_databases.sh')) + status_result = libs.run_bash(source_connection, pkgutil.get_data('resources', 'get_database_statuses.sh')) + + # Return an empty list for simplicity. In reality + # something would be done with the results above. + return [] + +``` + +Now, imagine that you notice that it's taking a long time to do discovery, and you'd like to try to figure out why. One thing that might help is to add logging, like this: +```python +import logging +import pkgutil + +from dlpx.virtualization import libs +from dlpx.virtualization.platform import Plugin + +from generated.definitions import RepositoryDefinition + +# This should probably be defined in its own module outside +# of the plugin's entry point file. It is here for simplicity. +def _setup_logger(): + # This will log the time, level, filename, line number, and log message. + log_message_format = '[%(asctime)s] [%(levelname)s] [%(filename)s:%(lineno)d] %(message)s' + log_message_date_format = '%Y-%m-%d %H:%M:%S' + + # Create a custom formatter. This will help with diagnosability. + formatter = logging.Formatter(log_message_format, datefmt= log_message_date_format) + + platform_handler = libs.PlatformHandler() + platform_handler.setFormatter(formatter) + + logger = logging.getLogger() + logger.addHandler(platform_handler) + + # By default the root logger's level is logging.WARNING. + logger.setLevel(logging.DEBUG) + + +# Setup the logger. +_setup_logger() + +# logging.getLogger(__name__) is the convention way to get a logger in Python. +# It returns a new logger per module and will be a child of the root logger. +# Since we setup the root logger, nothing else needs to be done to set this +# one up. +logger = logging.getLogger(__name__) + + +plugin = Plugin() + +@plugin.discovery.repository() +def repository_discovery(source_connection): + return [RepositoryDefinition('Logging Example')] + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + logger.debug('About to get DB version') + version_result = libs.run_bash(source_connection, pkgutil.get_data('resources', 'get_db_version.sh')) + logger.debug('About to get DB users') + users_result = libs.run_bash(source_connection, pkgutil.get_data('resources', 'get_db_users.sh')) + logger.debug('About to get databases') + db_results = libs.run_bash(source_connection, pkgutil.get_data('resources', 'get_databases.sh')) + logger.debug('About to get DB statuses') + status_result = libs.run_bash(source_connection, pkgutil.get_data('resources', 'get_database_statuses.sh')) + logger.debug('Done collecting data') + + # Return an empty list for simplicity. In reality + # something would be done with the results above. + return [] +``` + +When you look at the log file, perhaps you'll see something like this: + +``` +[Worker-360|JOB-315|ENVIRONMENT_DISCOVER(UNIX_HOST_ENVIRONMENT-5)] [2019-04-30 12:10:42] [DEBUG] [python_runner.py:44] About to get DB version +[Worker-360|JOB-316|DB_SYNC(APPDATA_CONTAINER-21)] [2019-04-30 12:19:35] [DEBUG] [python_runner.py:49] About to get DB users +[Worker-325|JOB-280|ENVIRONMENT_REFRESH(UNIX_HOST_ENVIRONMENT-5)] [DEBUG] [plugin_runner.py:51] About to get databases +[Worker-326|JOB-281|SOURCES_DISABLE(UNIX_HOST_ENVIRONMENT-5)] [DEBUG] [plugin_runner.py:53] About to get DB statuses +``` + +You can see that it only takes a few seconds for us do each of our data collection steps, with the exception of getting the users, which takes over 13 minutes! + +We now know that our slowdown is something to do with how our bash script is collecting all the users. Logging has gotten us a lot closer to figuring out the problem. + +## How to retrieve logs + +Download a support bundle by going to **Help** > **Support Logs** and select **Download**. The logs will be in a the support bundle under `log/mgmt_log/plugin_log/`. + +## Logging Levels + +Python has a number of [preset logging levels](https://docs.python.org/2/library/logging.html#logging-levels) and allows for custom ones as well. Since logging on the Virtualization Platform uses the `logging` framework, log statements of all levels are supported. + +However, the Virtualization Platform will map all logging levels into three files: `debug.log`, `info.log`, and `error.log` in the following way: + +|Python Logging Level|Logging File| +|:------------------:|:-----------:| +|DEBUG| debug.log| +|INFO| info.log| +|WARN| error.log| +|WARNING| error.log| +|ERROR| error.log| +|CRITICAL| error.log| + +As is the case with the `logging` framework, logging statements are hierarchical: logging statements made at the `logging.DEBUG` level will be written only to `debug.log` while logging statements made at the `logging.ERROR` level will be written to `debug.log`, `info.log`, and `error.log`. + +## Sensitive data + +Remember that logging data means writing that data out in cleartext. Make sure you never log any data that could be secret or sensitive (passwords, etc.). For more details please see our section on [sensitive data](/Best_Practices/Sensitive_Data.md) diff --git a/docs/docs/References/Platform_Libraries.md b/docs/docs/References/Platform_Libraries.md new file mode 100644 index 00000000..ddd42d4f --- /dev/null +++ b/docs/docs/References/Platform_Libraries.md @@ -0,0 +1,192 @@ +# Platform Libraries +Set of functions that plugins can use these for executing remote commands, etc. + +## run_bash + +Executes a bash command on a remote Unix host. + +### Signature + +`def run_bash(remote_connection, command, variables=None, use_login_shell=False, check=False)` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +remote_connection | [RemoteConnection](Classes.md#remoteconnection) | Connection associated with the remote host to run the command on. +command | String | Command to run on the host. +variables | dict[String, String] | **Optional**. Environement variables to set when running the command. +use_login_shell | boolean | **Optional**. Whether to use a login shell. +check | boolean | **Optional**. Whether or not to raise an exception if the `exit_code` in the `RunBashResponse` is non-zero. + +### Returns +An object of `RunBashResponse` + +Field | Type | Description +----- | ---- | ----------- +exit_code | Integer | Exit code from the command. +stdout | String | Stdout from the command. +stderr | String | Stderr from the command. + +### Examples + +Calling bash with an inline command. + +```python +from dlpx.virtualization import libs + +command = "echo 'Hi' >> /tmp/debug.log" +variables = {"var": "val"} + +response = libs.run_bash(connection, command, variables) + +print response.exit_code +print response.stdout +print response.stderr +``` + +Using parameters to construct a bash command. + +```python +from dlpx.virtualization import libs + +name = virtual_source.parameters.username +port = virtual_source.parameters.port +command = "mysqldump -u {} -p {}".format(name,port) + +response = libs.run_bash(connection, command) +``` + +Running a bash script that is saved in a directory. + +```python + + import pkgutil + from dlpx.virtualization import libs + + script_content = pkgutil.get_data('resources', 'get_date.sh') + + # Execute script on remote host + response = libs.run_bash(direct_source.connection, script_content) +``` +For more information please go to [Managing Scripts for Remote Execution](/Best_Practices/Managing_Scripts_For_Remote_Execution.md) section. + +## run_expect + +Executes a tcl command or script on a remote Unix host. + +### Signature + +`def run_expect(remote_connection, command, variables=None)` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +remote_connection | [RemoteConnection](Classes.md#remoteconnection) | Connection associated with the remote host to run the command on. +command | String | Expect(Tcl) command to run. +variables | dict[String, String] | **Optional**. Environement variables to set when running the command. + +### Returns +An object of `RunExpectResponse` + +Field | Type | Description +----- | ---- | ----------- +exit_code | Integer | Exit code from the command. +stdout | String | Stdout from the command. +stderr | String | Stderr from the command. + +### Example + +Calling expect with an inline command. + +```python +from dlpx.virtualization import libs + +command = "puts 'Hi'" +variables = {"var": "val"} + +repsonse = libs.run_expect(connection, command, variables) + +print response.exit_code +print response.stdout +print response.stderr +``` + +## run_powershell + +Executes a powershell command on a remote Windows host. + +### Signature + +`def run_powershell(remote_connection, command, variables=None, check=False)` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +remote_connection | [RemoteConnection](Classes.md#remoteconnection) | Connection associated with the remote host to run the command on. +command | String | Command to run to the remote host. +variables | dict[String, String] | **Optional**. Environement variables to set when running the command. +check | boolean | **Optional**. Whether or not to raise an exception if the `exit_code` in the `RunPowershellResponse` is non-zero. + +### Returns +An object of `RunPowershellResponse` + +Field | Type | Description +----- | ---- | ----------- +exit_code | Integer | Exit code from the command. +stdout | String | Stdout from the command. +stderr | String | Stderr from the command. + +### Example + +Calling powershell with an inline command. + +```python +from dlpx.virtualization import libs + +command = "Write-Output 'Hi'" +variables = {"var": "val"} + +response = libs.run_powershell(connection, command, variables) + +print response.exit_code +print response.stdout +print response.stderr +``` + +## run_sync + +Copies files from the remote source host directly into the dSource, without involving a staging host. + +### Signature + +`def run_sync(remote_connection, source_directory, rsync_user=None, exclude_paths=None, sym_links_to_follow=None)` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +remote_connection | [RemoteConnection](Classes.md#remoteconnection) | Connection associated with the remote host to run the command on. +source_directory | String | Directory of files to be synced. +rsync_user | String | **Optional** User who has access to the directory to be synced. +exclude_paths | list[String] | **Optional** Paths to be excluded. +sym_links_to_follow | list[String] | **Optional** Symbollic links to follow if any. + +### Returns + +None + +### Example + +```python +from dlpx.virtualization import libs + +source_directory = "sourceDirectory" +rsync_user = "rsyncUser" +exclude_paths = ["/path1", "/path2"] +sym_links_to_follow = ["/path3", "/path4"] + +libs.run_sync(connection, source_directory, rsync_user, exclude_paths, sym_links_to_follow) +``` diff --git a/docs/docs/References/Plugin_Config.md b/docs/docs/References/Plugin_Config.md new file mode 100644 index 00000000..7295fc79 --- /dev/null +++ b/docs/docs/References/Plugin_Config.md @@ -0,0 +1,73 @@ +# Plugin Config +The plugin config is a [YAML](https://yaml.org/) file that marks the root of a plugin and defines metadata about the plugin and its structure. The config file is read at build time to generate the upload artifact. + +The name of the file can be specified during the build. By default, the build looks for `plugin_config.yml` in the current working directory. + +## Fields + +|Field Name|Required|Type|Description| +|----------|:------:|:--:|-----------| +|id|Y|string|The unique id of the plugin in a valid UUID format.| +|name|N|string|The display name of the plugin. This will be used in the UI. If it is not specified name will be equal to id.| +|externalVersion|N|string|The plugin's [external version](/Versioning_And_Upgrade/Versioning.md#external-version). This is a freeform string. If it is not supplied, the build number is used as an external version. +|buildNumber|Y|string|The plugin's [build number](/Versioning_And_Upgrade/Versioning.md#build-number). This string must conform to the format described [here](/Versioning_And_Upgrade/Versioning.md#build-number-format-rules). +|hostTypes|Y|list|The host type that the plugin supports. Either `UNIX` or `WINDOWS`.| +|schemaFile|Y|string|The path to the JSON file that contains the [plugin's schema definitions](Schemas.md).

This path can be absolute or relative to the directory containing the plugin config file.| +|srcDir|Y|string|The path to the directory that contains the source code for the plugin. During execution of a plugin operation, this directory will be the current working directory of the Python interpreter. Any modules or resources defined outside of this directory will be inaccessible at runtime.

This path can be absolute or relative to the directory containing the plugin config file.| +|entryPoint|Y|string|A fully qualified Python symbol that points to the `dlpx.virtualization.platform.Plugin` object that defines the plugin.

It must be in the form `importable.module:object_name` where `importable.module` is in `srcDir`.| +|manualDiscovery|N|boolean|True if the plugin supports manual discovery of source config objects. The default value is `true`.| +|pluginType|Y|enum|The ingestion strategy of the plugin. Can be either `STAGED` or `DIRECT`.| +|language|Y|enum|Must be `PYTHON27`.| +|defaultLocale|N|enum|The locale to be used by the plugin if the Delphix user does not specify one. Plugin messages will be displayed in this locale by default. The default value is `en-us`.| +|rootSquashEnabled|N|boolean|This dictates whether "root squash" is enabled on NFS mounts for the plugin (i.e. whether the `root` user on remote hosts has access to the NFS mounts). Setting this to `false` allows processes usually run as `root`, like Docker daemons, access to the NFS mounts. The default value is `true`. This field only applies to Unix hosts.| + +## Example +Assume the following basic plugin structure: + +``` +├── plugin_config.yml +├── schema.json +└── src + └── mongo_runner.py +``` + +`mongo_runner.py` contains: + +```python +from dlpx.virtualization.platform import Plugin + + +mongodb = Plugin() +``` + +This is a valid plugin config for the plugin: + +```yaml +id: 7cf830f2-82f3-4d5d-a63c-7bbe50c22b32 +name: MongoDB +version: 2.0.0 +hostTypes: + - UNIX +entryPoint: mongo_runner:mongodb +srcDir: src/ +schemaFile: schema.json +pluginType: DIRECT +language: PYTHON27 +buildNumber: 0.1.0 +``` +This is a valid plugin config for the plugin with `manualDiscovery` set to `false` and an `externalVersion` set: + +```yaml +id: 7cf830f2-82f3-4d5d-a63c-7bbe50c22b32 +name: MongoDB +hostTypes: + - UNIX +entryPoint: mongo_runner:mongodb +srcDir: src/ +schemaFile: schema.json +manualDiscovery: false +pluginType: DIRECT +language: PYTHON27 +externalVersion: "MongoDB 1.0" +buildNumber: "1" +``` diff --git a/docs/docs/References/Plugin_Operations.md b/docs/docs/References/Plugin_Operations.md new file mode 100644 index 00000000..588ad992 --- /dev/null +++ b/docs/docs/References/Plugin_Operations.md @@ -0,0 +1,1310 @@ +# Plugin Operations + +## Summary +!!! warning + If a Plugin Operation is **Required** and is not present, the corresponding Delphix Engine Operation will fail when invoked. The plugin can still be built and uploaded to the Delphix Engine. + +!!! warning + For each operation, the argument names must match exactly. For example, the Repository Discovery + operation must have a single argument named `source_connection`. + + +Plugin Operation | **Required** | Decorator | Delphix Engine Operations +---------------- | -------- | --------- | ------------------------- +[Repository
Discovery](#repository-discovery) | **Yes** |`discovery.repository()` | [Environment Discovery](Workflows.md#environment-discovery-refresh)
[Environment Refresh](Workflows.md#environment-discovery-refresh) +[Source Config
Discovery](#source-config-discovery) | **Yes** |`discovery.source_config()` | [Environment Discovery](Workflows.md#environment-discovery-refresh)
[Environment Refresh](Workflows.md#environment-discovery-refresh) +[Direct Linked Source
Pre-Snapshot](#direct-linked-source-pre-snapshot) | **No** | `linked.pre_snapshot()` | [Linked Source Sync](Workflows.md#linked-source-sync) +[Direct Linked Source
Post-Snapshot](#direct-linked-source-post-snapshot) | **Yes** | `linked.post_snapshot()` | [Linked Source Sync](Workflows.md#linked-source-sync) +[Staged Linked Source
Pre-Snapshot](#staged-linked-source-pre-snapshot) | **No** | `linked.pre_snapshot()` | [Linked Source Sync](Workflows.md#linked-source-sync) +[Staged Linked Source
Post-Snapshot](#staged-linked-source-post-snapshot) | **Yes** | `linked.post_snapshot()` | [Linked Source Sync](Workflows.md#linked-source-sync) +[Staged Linked Source
Start-Staging](#staged-linked-source-start-staging) | **No** | `linked.start_staging()` | [Linked Source Enable](Workflows.md#linked-source-enable) +[Staged Linked Source
Stop-Staging](#staged-linked-source-stop-staging) | **No** | `linked.stop_staging()` | [Linked Source Disable](Workflows.md#linked-source-disable)
[Linked Source Delete](Workflows.md#linked-source-delete) +[Staged Linked Source
Status](#staged-linked-source-status) | **No** |`linked.status()` | N/A +[Staged Linked Source
Worker](#staged-linked-source-worker) | **No** |`linked.worker()` | N/A +[Staged Linked Source
Mount Specification](#staged-linked-source-mount-specification) | **Yes** | `linked.mount_specification()` | [Linked Source Sync](Workflows.md#linked-source-sync)
[Linked Source Enable](Workflows.md#linked-source-enable) +[Virtual Source
Configure](#virtual-source-configure) | **Yes** | `virtual.configure()` | [Virtual Source Provision](Workflows.md#virtual-source-provision)
[Virtual Source Refresh](Workflows.md#virtual-source-refresh) +[Virtual Source
Unconfigure](#virtual-source-unconfigure) | **No** | `virtual.unconfigure()` | [Virtual Source Refresh](Workflows.md#virtual-source-refresh)
[Virtual Source Delete](Workflows.md#virtual-source-delete) +[Virtual Source
Reconfigure](#virtual-source-reconfigure) | **Yes** | `virtual.reconfigure()` | [Virtual Source Rollback](Workflows.md#virtual-source-rollback)
[Virtual Source Enable](Workflows.md#virtual-source-enable) +[Virtual Source
Start](#virtual-source-start) | **No** | `virtual.start()` | [Virtual Source Start](Workflows.md#virtual-source-start) +[Virtual Source
Stop](#virtual-source-stop) | **No** | `virtual.stop()` | [Virtual Source Stop](Workflows.md#virtual-source-stop) +[Virtual Source
Pre-Snapshot](#virtual-source-pre-snapshot) | **No** | `virtual.pre_snapshot()` | [Virtual Source Snapshot](Workflows.md#virtual-source-snapshot) +[Virtual Source
Post-Snapshot](#virtual-source-post-snapshot) | **Yes** | `virtual.post_snapshot()` | [Virtual Source Snapshot](Workflows.md#virtual-source-snapshot) +[Virtual Source
Mount Specification](#virtual-source-mount-specification) | **Yes** | `virtual.mount_specification()` | [Virtual Source Enable](Workflows.md#virtual-source-enable)
[Virtual Source Provision](Workflows.md#virtual-source-provision)
[Virtual Source Refresh](Workflows.md#virtual-source-refresh)
[Virtual Source Rollback](Workflows.md#virtual-source-rollback)
[Virtual Source Start](Workflows.md#virtual-source-start) +[Virtual Source
Status](#virtual-source-status) | **No** | `virtual.status()` | [Virtual Source Enable](Workflows.md#virtual-source-enable) +[Repository Data Migration](#repository-data-migration) | **No** | `upgrade.repository(migration_id)` | [Upgrade](Workflows.md#upgrade) +[Source Config Data Migration](#source-config-data-migration) | **No** | `upgrade.source_config(migration_id)` | [Upgrade](Workflows.md#upgrade) +[Linked Source Data Migration](#linked-source-data-migration) | **No** | `upgrade.linked_source(migration_id)` | [Upgrade](Workflows.md#upgrade) +[Virtual Source Data Migration](#virtual-source-data-migration) | **No** | `upgrade.virtual_source(migration_id)` | [Upgrade](Workflows.md#upgrade) +[Snapshot Data Migration](#snapshot-data-migration) | **No** | `upgrade.snapshot(migration_id)` | [Upgrade](Workflows.md#upgrade) + + +## Repository Discovery + +Discovers the set of [repositories](Glossary.md#repository) for a plugin on an [environment](Glossary.md#environment). For a DBMS, this can correspond to the set of binaries installed on a Unix host. + +### Required / Optional +**Required.** + +### Delphix Engine Operations + +* [Environment Refresh](Workflows.md#environment-discovery-refresh) +* [Environment Discovery](Workflows.md#environment-discovery-refresh) + +### Signature + +`def repository_discovery(source_connection)` + +### Decorator + +`discovery.repository()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +source_connection | [RemoteConnection](Classes.md#remoteconnection) | The connection associated with the remote environment to run repository discovery + +### Returns + +A list of [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) objects. + +### Example + +```python +from dlpx.virtualization.platform import Plugin +from generated.defintions import RepositoryDefinition + +plugin = Plugin() + +@plugin.discovery.repository() +def repository_discovery(source_connection): + # Initialize the object, filling in all required fields + repository = RepositoryDefinition(installPath="/usr/bin/install") + # Set any additional non-required properties + repository.version = "1.2.3" + # Return one single repository + return [repository] +``` + +> The above command assumes a [Repository Schema](Schemas_and_Autogenerated_Classes.md#repositorydefinition-schema) defined as: + +```json +{ + "type": "object", + "additionalProperties": false, + "required": ["installPath"], + "properties": { + "installPath": { "type": "string" }, + "version": { "type": "string" } + }, + "identityFields": ["installPath"], + "nameField": ["installPath"] +} +``` + + +## Source Config Discovery + +Discovers the set of [source configs](Glossary.md#source-config) for a plugin for a [repository](Glossary.md#repository). For a DBMS, this can correspond to the set of unique databases running using a particular installation on a Unix host. + +### Required / Optional +**Required.** + +### Delphix Engine Operations + +* [Environment Refresh](Workflows.md#environment-discovery-refresh) +* [Environment Discovery](Workflows.md#environment-discovery-refresh) + +### Signature + +`def source_config_discovery(source_connection, repository)` + +### Decorator + +`discovery.source_config()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +source_connection | [RemoteConnection](Classes.md#remoteconnection) | The connection to the remote environment the corresponds to the repository. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository to discover source configs for. + +### Returns +A list of [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) objects. + +### Example + +```python +from dlpx.virtualization.platform import Plugin +from generated.definitions import SourceConfigDefinition + +plugin = Plugin() + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + source_config = SourceConfigDefinition(name="my_name", port=1000) + return [source_config] +``` + +> The above command assumes a [Source Config Schema](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-schema) defined as: + +```json +{ + "type": "object", + "additionalProperties": false, + "required": ["name"], + "properties": { + "name": { "type": "string" }, + "port": { "type": "number" } + }, + "identityFields": ["name"], + "nameField": ["name"] +} +``` + +## Direct Linked Source Pre-Snapshot + +Sets up a [dSource](Glossary.md#dsource) to ingest data. Only applies when using a [Direct Linking](Glossary.md#direct-linking) strategy. + +### Required / Optional +**Optional** + +### Delphix Engine Operations + +* [Linked Source Sync](Workflows.md#linked-source-sync) + +### Signature + +`def linked_pre_snapshot(direct_source, repository, source_config)` + +### Decorator + +`linked.pre_snapshot()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +direct_source | [DirectSource](Classes.md#directsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +None + +### Example + +```python +from dlpx.virtualization.platform import Plugin +from generated.definitions import SourceConfigDefinition + +plugin = Plugin() + +@plugin.linked.pre_snapshot() +def linked_pre_snapshot(direct_source, repository, source_config): + pass +``` + +## Direct Linked Source Post-Snapshot + +Captures metadata from a [dSource](Glossary.md#dsource) once data has been ingested. Only applies when using a [Direct Linking](Glossary.md#direct-linking) strategy. + +### Required / Optional +**Required.** + +### Delphix Engine Operations + +* [Linked Source Sync](Workflows.md#linked-source-sync) + +### Signature + +`def linked_post_snapshot(direct_source, repository, source_config)` + +### Decorator + +`linked.post_snapshot()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +direct_source | [DirectSource](Classes.md#directsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +[SnapshotDefinition](Schemas_and_Autogenerated_Classes.md#snapshotdefinition-class) + +### Example + +```python +from dlpx.virtualization.platform import Plugin +from generated.definitions import SnapshotDefinition + +plugin = Plugin() + +@plugin.linked.post_snapshot() +def linked_post_snapshot(direct_source, repository, source_config): + snapshot = SnapshotDefinition() + snapshot.transaction_id = 1000 + return snapshot +``` + +> The above command assumes a [Snapshot Schema](Schemas_and_Autogenerated_Classes.md#snapshot-schema) defined as: + +```json +{ + "type": "object", + "additionalProperties": false, + "properties": { + "transactionId": { "type": "integer" } + } +} +``` + +## Staged Linked Source Pre-Snapshot + +Sets up a [dSource](Glossary.md#dsource) to ingest data. Only applies when using a [Staged Linking](Glossary.md#staged-linking) strategy. + +### Required / Optional +**Optional.** + +### Delphix Engine Operations + +* [Linked Source Sync](Workflows.md#linked-source-sync) + +### Signature + +`def linked_pre_snapshot(staged_source, repository, source_config, snapshot_parameters)` + +### Decorator + +`linked.pre_snapshot()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +staged_source | [StagedSource](Classes.md#stagedsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. +snapshot_parameters | [SnapshotParametersDefinition](Classes.md#snapshotparametersdefinition) | The snapshot parameters. + +### Returns +None + +### Example + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.linked.pre_snapshot() +def linked_pre_snapshot(staged_source, repository, source_config, snapshot_parameters): + pass +``` + +## Staged Linked Source Post-Snapshot + +Captures metadata from a [dSource](Glossary.md#dsource) once data has been ingested. Only applies when using a [Staged Linking](Glossary.md#staged-linking) strategy. + +### Required / Optional +**Required.** + +### Delphix Engine Operations + +* [Linked Source Sync](Workflows.md#linked-source-sync) + +### Signature + +`def linked_post_snapshot(staged_source, repository, source_config, snapshot_parameters)` + +### Decorator + +`linked.post_snapshot()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +staged_source | [StagedSource](Classes.md#stagedsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. +snapshot_parameters | [SnapshotParametersDefinition](Classes.md#snapshotparametersdefinition) | The snapshot parameters. + +### Returns +[SnapshotDefinition](Schemas_and_Autogenerated_Classes.md#snapshotdefinition-class) + +### Example + +```python +from dlpx.virtualization.platform import Plugin +from generated.definitions import SnapshotDefinition + +plugin = Plugin() + +@plugin.linked.post_snapshot() +def linked_post_snapshot(staged_source, repository, source_config, snapshot_parameters): + snapshot = SnapshotDefinition() + if snapshot_parameters.resync: + snapshot.transaction_id = 1000 + else: + snapshot.transaction_id = 10 + return snapshot +``` + +> The above command assumes a [Snapshot Schema](Schemas_and_Autogenerated_Classes.md#snapshot-schema) defined as: + +```json +{ + "type": "object", + "additionalProperties": false, + "properties": { + "transactionId": { "type": "integer" } + } +} +``` + +## Staged Linked Source Start-Staging + +Sets up a [Staging Source](Glossary.md#staging-source) to ingest data. Only applies when using a [Staged Linking](Glossary.md#staged-linking) strategy. +Required to implement for Delphix Engine operations: + +### Required / Optional +**Optional.** + +### Delphix Engine Operations + +* [Linked Source Enable](Workflows.md#linked-source-enable) + +### Signature + +`def start_staging(staged_source, repository, source_config)` + +### Decorator + +`linked.start_staging()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +staged_source | [StagedSource](Classes.md#stagedsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +None + +### Example + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.linked.start_staging() +def start_staging(staged_source, repository, source_config): + pass +``` + + +## Staged Linked Source Stop-Staging + +Quiesces a [Staging Source](Glossary.md#staging-source) to pause ingestion. Only applies when using a [Staged Linking](Glossary.md#staged-linking) strategy. + +### Required / Optional +**Optional.** + +### Delphix Engine Operations + +* [Linked Source Disable](Workflows.md#linked-source-disable) +* [Linked Source Delete](Workflows.md#linked-source-delete) + +### Signature + +`def stop_staging(staged_source, repository, source_config)` + +### Decorator + +`linked.stop_staging()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +staged_source | [StagedSource](Classes.md#stagedsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +None + +###Examples + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.linked.stop_staging() +def stop_staging(staged_source, repository, source_config): + pass +``` + +## Staged Linked Source Status + +Determines the status of a [Staging Source](Glossary.md#staging-source) to show end users whether it is healthy or not. Only applies when using a [Staged Linking](Glossary.md#staged-linking) strategy. + +### Required / Optional +**Optional.**
+If not implemented, the platform assumes that the status is `Status.ACTIVE` + +### Delphix Engine Operations + +N/A + +### Signature + +`def linked_status(staged_source, repository, source_config)` + +### Decorator + +`linked.status()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +staged_source | [StagedSource](Classes.md#stagedsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +[Status](Classes.md#status)
+`Status.ACTIVE` if the plugin operation is not implemented. + +### Example + +```python +from dlpx.virtualization.platform import Plugin +from dlpx.virtualization.platform import Status + +plugin = Plugin() + +@plugin.linked.status() +def linked_status(staged_source, repository, source_config): + return Status.ACTIVE +``` + +## Staged Linked Source Worker + +Monitors the status of a [Staging Source](Glossary.md#staging-source) on a reqular interval. It can be used to fix up any errors on staging if it is not functioning as expected. Only applies when using a [Staged Linking](Glossary.md#staged-linking) strategy. + +### Required / Optional +**Optional.** + +### Delphix Engine Operations + +N/A + +### Signature + +`def worker(staged_source, repository, source_config)` + +### Decorator + +`linked.worker()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +staged_source | [StagedSource](Classes.md#stagedsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +None + +### Example + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.linked.worker() +def worker(staged_source, repository, source_config): + pass +``` + +## Staged Linked Source Mount Specification + +Returns configurations for the mounts associated for data in staged source. The `ownership_specification` is optional. If not specified, the platform will default the ownership settings to the environment user used for the Delphix Operation. + +### Required / Optional +**Required.** + +### Delphix Engine Operations + +* [Linked Source Sync](Workflows.md#linked-source-sync) +* [Linked Source Enable](Workflows.md#linked-source-enable) + +### Signature + +`def linked_mount_specification(staged_source, repository)` + +### Decorator + +`linked.mount_specification()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +staged_source | [StagedSource](Classes.md#stagedsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. + +### Returns +[MountSpecification](Classes.md#mountspecification) + +### Example + +!!! info + `ownership_specification` only applies to Unix hosts. + +```python +from dlpx.virtualization.platform import Plugin +from dlpx.virtualization.platform import Mount +from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import OwenershipSpecification +from generated.definitions import SnapshotDefinition + +plugin = Plugin() + +@plugin.linked.mount_specification() +def linked_mount_specification(staged_source, repository): + mount = Mount(staged_source.staged_connection.environment, "/some/path") + ownership_spec = OwenershipSpecification(repository.uid, repository.gid) + + return MountSpecification([mount], ownership_spec) +``` + +## Virtual Source Configure + +Configures the data in a particular snapshot to be usable on a target environment. For database data files, this may mean recovering from a crash consistent format or backup. For application files, this may mean reconfiguring XML files or rewriting hostnames and symlinks. + +### Required / Optional +**Required.** + +### Delphix Engine Operations + +* [Virtual Source Provision](Workflows.md#virtual-source-provision) +* [Virtual Source Refresh](Workflows.md#virtual-source-refresh) + +### Signature + +`def configure(virtual_source, snapshot, repository)` + +### Decorator + +`virtual.configure()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +virtual_source | [VirtualSource](Classes.md#virtualsource) | The source associated with this operation. +snapshot | [SnapshotDefinition](Schemas_and_Autogenerated_Classes.md#snapshotdefinition-class) | The snapshot of the data set to configure. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. + +### Returns +[SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) + +### Example + +```python +from dlpx.virtualization.platform import Plugin +from generated.defintions import SourceConfigDefinition + +plugin = Plugin() + +@plugin.virtual.configure() +def configure(virtual_source, repository, snapshot): + source_config = SourceConfigDefinition(name="config_name") + return source_config +``` + +> The above command assumes a [SourceConfig Schema](Schemas_and_Autogenerated_Classes.md#sourceconfig-schema) defined as: + +```json +{ + "type": "object", + "required": ["name"], + "additionalProperties": false, + "properties": { + "name": { "type": "string" } + }, + "identityFields": ["name"], + "nameField": ["name"] +} +``` + +## Virtual Source Unconfigure + +Quiesces the virtual source on a target environment. For database data files, shutting down and unregistering a database on a host. + +### Required / Optional +**Optional.** + +### Delphix Engine Operations + +* [Virtual Source Refresh](Workflows.md#virtual-source-refresh) +* [Virtual Source Delete](Workflows.md#virtual-source-delete) + +### Signature + +`def unconfigure(virtual_source, repository, source_config)` + +### Decorator + +`virtual.unconfigure()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +virtual_source | [VirtualSource](Classes.md#virtualsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +None + +### Example + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.virtual.unconfigure() +def unconfigure(virtual_source, repository, source_config): + pass +``` + +## Virtual Source Reconfigure + +Re-configures the data for a virtual source to point to the data in a prior snapshot for the virtual source. For database data files, this may mean recovering from a crash consistent format or backup of a new snapshot. For application files, this may mean reconfiguring XML files or rewriting hostnames and symlinks. + +### Required / Optional +**Required.** + +### Delphix Engine Operations + +* [Virtual Source Rollback](Workflows.md#virtual-source-rollback) +* [Virtual Source Enable](Workflows.md#virtual-source-enable) + +### Signature + +`def reconfigure(virtual_source, repository, source_config, snapshot)` + +### Decorator + +`virtual.reconfigure()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +virtual_source | [VirtualSource](Classes.md#virtualsource) | The source associated with this operation. +snapshot | [SnapshotDefinition](Schemas_and_Autogenerated_Classes.md#snapshotdefinition-class) | The snapshot of the data set to configure. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +[SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) + +### Example + +```python +from dlpx.virtualization.platform import Plugin +from generated.definitions import SourceConfigDefinition + +plugin = Plugin() + +@plugin.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + return SourceConfigDefinition(name="updated_config_name") +``` + +> The above command assumes a [SourceConfig Schema](Schemas_and_Autogenerated_Classes.md#sourceconfig-schema) defined as: + +```json +{ + "type": "object", + "required": ["name"], + "additionalProperties": false, + "properties": { + "name": { "type": "string" } + }, + "identityFields": ["name"], + "nameField": ["name"] +} +``` + +## Virtual Source Start + +Executed whenever the data should be placed in a "running" state. + +### Required / Optional +**Optional.** + +### Delphix Engine Operations + +* [Virtual Source Start](Workflows.md#virtual-source-start) + +### Signature + +`def start(virtual_source, repository, source_config)` + +### Decorator + +`virtual.start()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +virtual_source | [VirtualSource](Classes.md#virtualsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +None + +### Example + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.virtual.start() +def start(virtual_source, repository, source_config): + pass +``` + +## Virtual Source Stop + +Executed whenever the data needs to be shut down. +Required to implement for Delphix Engine operations: + +### Required / Optional +**Optional.** + +### Delphix Engine Operations + +* [Virtual Source Stop](Workflows.md#virtual-source-stop) + +### Signature + +`def stop(virtual_source, repository, source_config)` + +### Decorator + +`virtual.stop()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +virtual_source | [VirtualSource](Classes.md#virtualsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +None + +### Example + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.virtual.stop() +def stop(virtual_source, repository, source_config): + pass +``` + +## Virtual Source Pre-Snapshot + +Prepares the virtual source for taking a snapshot of the data. + +### Required / Optional +**Optional.** + +### Delphix Engine Operations + +* [Virtual Source Snapshot](Workflows.md#virtual-source-snapshot) + +### Signature + +`def virtual_pre_snapshot(virtual_source, repository, source_config)` + +### Decorator + +`virtual.pre_snapshot()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +virtual_source | [VirtualSource](Classes.md#virtualsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +None + +### Example + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.virtual.pre_snapshot() +def virtual_pre_snapshot(virtual_source, repository, source_config): + pass +``` + +## Virtual Source Post-Snapshot + +Captures metadata after a snapshot. + +### Required / Optional +**Required.** + +### Delphix Engine Operations + +* [Virtual Source Snapshot](Workflows.md#virtual-source-snapshot) + +### Signature + +`def virtual_post_snapshot(virtual_source, repository, source_config)` + +### Decorator + +`virtual.post_snapshot()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +virtual_source | [VirtualSource](Classes.md#virtualsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +[SnapshotDefinition](Schemas_and_Autogenerated_Classes.md#snapshotdefinition-class) + +### Example + +```python +from dlpx.virtualization.platform import Plugin +from generated.defintions import SnapshotDefinition + +plugin = Plugin() + +@plugin.virtual.post_snapshot() +def virtual_post_snapshot(virtual_source, repository, source_config): + snapshot = SnapshotDefinition() + snapshot.transaction_id = 1000 + return snapshot +``` + +> The above command assumes a [Snapshot Schema](Schemas_and_Autogenerated_Classes.md#snapshot-schema) defined as: + +```json +{ + "type": "object", + "additionalProperties": false, + "properties": { + "transactionId": { "type": "string" } + } +} +``` + +## Virtual Source Mount Specification + +Returns configurations for the mounts associated for data in virtual source. +The `ownership_specification` is optional. If not specified, the platform will default the ownership settings to the environment user used for the Delphix Operation. + +### Required / Optional +**Required.** + +### Delphix Engine Operations + +* [Virtual Source Enable](Workflows.md#virtual-source-enable) +* [Virtual Source Provision](Workflows.md#virtual-source-provision) +* [Virtual Source Refresh](Workflows.md#virtual-source-refresh) +* [Virtual Source Rollback](Workflows.md#virtual-source-rollback) +* [Virtual Source Start](Workflows.md#virtual-source-start) + +### Signature + +`def virtual_mount_specification(virtual_source, repository)` + +### Decorator + +`virtual.mount_specification()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +virtual_source | [VirtualSource](Classes.md#virtualsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. + +### Returns +[MountSpecification](Classes.md#mountspecification) + +### Example + +!!! info + `ownership_specification` only applies to Unix hosts. + +```python +from dlpx.virtualization.platform import Plugin +from dlpx.virtualization.platform import Mount +from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import OwenershipSpecification +from generated.definitions import SnapshotDefinition + +plugin = Plugin() + +@plugin.virtual.mount_specification() +def virtual_mount_specification(virtual_source, repository): + mount = Mount(virtual_source.connection.environment, "/some/path") + ownership_spec = OwenershipSpecification(repository.uid, repository.gid) + + return MountSpecification([mount], ownership_spec) +``` + + +## Virtual Source Status + +Determines the status of a [Virtual Source](Glossary.md#virtual-source) to show end users whether it is healthy or not. + +### Required / Optional +**Optional.**
+If not implemented, the platform assumes that the status is `Status.ACTIVE`. + +### Delphix Engine Operations + +* [Virtual Source Enable](Workflows.md#virtual-source-enable) + +### Signature + +`def virtual_status(virtual_source, repository, source_config)` + +### Decorator + +`virtual.status()` + +### Arguments + +Argument | Type | Description +-------- | ---- | ----------- +virtual_source | [VirtualSource](Classes.md#virtualsource) | The source associated with this operation. +repository | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-class) | The repository associated with this source. +source_config | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) | The source config associated with this source. + +### Returns +[Status](Classes.md#status)
+`Status.ACTIVE` if the plugin operation is not implemented. + +### Example +```python +from dlpx.virtualization.platform import Plugin +from dlpx.virtualization.platform import Status + +plugin = Plugin() + +@plugin.virtual.status() +def virtual_status(virtual_source, repository, source_config): + return Status.ACTIVE +``` + + +## Repository Data Migration + +A Repository [Data Migration](Glossary.md#data-migration) migrates repository data from an older [schema](Glossary.md#schema) format to an updated schema format. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all repository data will match your updated repository schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more repository data migrations. + +### Delphix Engine Operations + +* [Upgrade](Workflows.md#upgrade) + +### Signature + +`def migrate_repository(old_repository)` + +### Decorator + +`upgrade.repository(migration_id)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +migration_id | String | The ID of this migration. An ID is a string containing one or more positive integers separated by periods. Each ID must be unique. More details [here](/Versioning_And_Upgrade/Upgrade.md#rules-for-data-migrations). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_repository | Dictionary | The plugin-specific data associated with a repository, that conforms to the previous schema. + +!!! warning + The function argument `old_repository` is a Python dictionary, where each property name appears exactly as described in the previous repository schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_repository` input that must conform to the updated repository schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.repository("2019.12.15") +def add_new_flag_to_repo(old_repository): + new_repository = dict(old_repository) + new_repository["useNewFeature"] = False + return new_repository +``` + +## Source Config Data Migration + +A Source Config [Data Migration](Glossary.md#data-migration) migrates source config data from an older [schema](Glossary.md#schema) format to an updated schema format. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all source config data will match your source config schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more source config data migrations. + +### Delphix Engine Operations + +* [Upgrade](Workflows.md#upgrade) + +### Signature + +`def migrate_source_config(old_source_config)` + +### Decorator + +`upgrade.source_config(migration_id)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +migration_id | String | The ID of this migration. An ID is a string containing one or more positive integers separated by periods. Each ID must be unique. More details [here](/Versioning_And_Upgrade/Upgrade.md#rules-for-data-migrations). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_source_config | Dictionary | The plugin-specific data associated with a source config, that conforms to the previous schema. + +!!! warning + The function argument `old_source_config` is a Python dictionary, where each property name appears exactly as described in the previous source config schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_source_config` input that must conform to the updated source config schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.source_config("2019.12.15") +def add_new_flag_to_source_config(old_source_config): + new_source_config = dict(old_source_config) + new_source_config["useNewFeature"] = False + return new_source_config +``` +## Linked Source Data Migration + +A Linked Source [Data Migration](Glossary.md#data-migration) migrates linked source data from an older [schema](Glossary.md#schema) format to an updated schema format. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all linked source data will match your linked source schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more linked source data migrations. + +### Delphix Engine Operations + +* [Upgrade](Workflows.md#upgrade) + +### Signature + +`def migrate_linked_source(old_linked_source)` + +### Decorator + +`upgrade.linked_source(migration_id)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +migration_id | String | The ID of this migration. An ID is a string containing one or more positive integers separated by periods. Each ID must be unique. More details [here](/Versioning_And_Upgrade/Upgrade.md#rules-for-data-migrations). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_linked_source | Dictionary | The plugin-specific data associated with a linked source, that conforms to the previous schema. + +!!! warning + The function argument `old_linked_source` is a Python dictionary, where each property name appears exactly as described in the previous linked source schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_linked_source` input that must conform to the updated linked source schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.linked_source("2019.12.15") +def add_new_flag_to_dsource(old_linked_source): + new_linked_source = dict(old_linked_source) + new_linked_source["useNewFeature"] = False + return new_linked_source +``` +## Virtual Source Data Migration + +A Virtual Source [Data Migration](Glossary.md#data-migration) migrates virtual source data from an older [schema](Glossary.md#schema) format to an updated schema format. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all virtual source data will match your virtual source schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more virtual source data migrations. + +### Delphix Engine Operations + +* [Upgrade](Workflows.md#upgrade) + +### Signature + +`def migrate_virtual_source(old_virtual_source)` + +### Decorator + +`upgrade.virtual_source(migration_id)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +migration_id | String | The ID of this migration. An ID is a string containing one or more positive integers separated by periods. Each ID must be unique. More details [here](/Versioning_And_Upgrade/Upgrade.md#rules-for-data-migrations). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_virtual_source | Dictionary | The plugin-specific data associated with a virtual source, that conforms to the previous schema. + +!!! warning + The function argument `old_virtual_source` is a Python dictionary, where each property name appears exactly as described in the previous virtual source schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_virtual_source` input that must conform to the updated virtual source schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.virtual_source("2019.12.15") +def add_new_flag_to_vdb(old_virtual_source): + new_virtual_source = dict(old_virtual_source) + new_virtual_source["useNewFeature"] = False + return new_virtual_source +``` +## Snapshot Data Migration + +A Snapshot [Data Migration](Glossary.md#data-migration) migrates snapshot data from an older [schema](Glossary.md#schema) format to an updated schema format. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all snapshot data will match your snapshot schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more snapshot migrations. + +### Delphix Engine Operations + +* [Upgrade](Workflows.md#upgrade) + +### Signature + +`def migrate_snapshot(old_snapshot)` + +### Decorator + +`upgrade.snapshot(migration_id)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +migration_id | String | The ID of this migration. An ID is a string containing one or more positive integers separated by periods. Each ID must be unique. More details [here](/Versioning_And_Upgrade/Upgrade.md#rules-for-data-migrations). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_snapshot | Dictionary | The plugin-specific data associated with a snapshot, that conforms to the previous schema. + +!!! warning + The function argument `old_snapshot` is a Python dictionary, where each property name appears exactly as described in the previous snapshot schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_snapshot` input that must conform to the updated snapshot schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.snapshot("2019.12.15") +def add_new_flag_to_snapshot(old_snapshot): + new_snapshot = dict(old_snapshot) + new_snapshot["useNewFeature"] = False + return new_snapshot +``` diff --git a/docs/docs/References/Schemas.md b/docs/docs/References/Schemas.md new file mode 100644 index 00000000..c2a4ad54 --- /dev/null +++ b/docs/docs/References/Schemas.md @@ -0,0 +1,629 @@ +# Schemas + +## About Schemas + +Any time a plugin needs to store its own data, or needs to ask the user for data, the Delphix Engine needs to be told about the format of that data: + +* What is the set of data needed and what should they be called? +* What is the type of each piece of data: Strings? Integers? Booleans? + +Plugins use [schemas](Glossary.md#schema) to describe the format of such data. Once a schema is defined, it is used in three ways + +1. It tells the Delphix Engine how to store the data for later use. +2. It is used to autogenerate a custom user interface, and to validate user inputs. +3. It is used to [autogenerate Python classes](Schemas_and_Autogenerated_Classes.md) that can be used by plugin code to access and manipulate user input and stored data. + +There are five plugin-customizable data formats: + +Delphix Object | Schema | Autogenerated Class +-------------- | ------ | ------------------- +[Repository](Glossary.md#repository) | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-schema) | [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefintion-class) +[Source Config](Glossary.md#source-config) | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-schema) | [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-class) +[Linked Source](Glossary.md#linked-source) | [LinkedSourceDefinition](Schemas_and_Autogenerated_Classes.md#linkedsourcedefinition-schema) | [LinkedSourceDefinition](Schemas_and_Autogenerated_Classes.md#linkedsourcedefinition-class) +[Virtual Source](Glossary.md#virtual-source) | [VirtualSourceDefinition](Schemas_and_Autogenerated_Classes.md#virtualsourcedefinition-schema)| [VirtualSourceDefinition](Schemas_and_Autogenerated_Classes.md#virtualsourcedefinition-class) +[Snapshot](Glossary.md#linked-source) | [SnapshotDefinition](Schemas_and_Autogenerated_Classes.md#snapshotdefinition-schema) | [SnapshotDefinition](Schemas_and_Autogenerated_Classes.md#snapshotdefinition-class) + + +## JSON Schemas + +Plugins use JSON schemas for their custom datatypes. There are three main things to understand about them, which are explained just below: + +* What is JSON? +* What is a JSON schema? +* How has Delphix augmented JSON schemas? + +### JSON +JSON stands for "Javascript Object Notation". JSON is a data-interchange format that is intended to be precise and also somewhat human-readable. Here are some simple examples of data in JSON format: + +JSON | Description +---- | ------------ +`"hello"` | A string. Note the double quotes. +`17` | An integer +`true` | A boolean +`{"name": "Julie", "age": 37}` | A JSON object with two fields, `name` (a string), and `age` (an integer). Objects are denoted with curly braces. +`[ true, false, true] ` | A JSON array with three booleans. Arrays are denoted with square brackets. + +For more details on JSON, please see . + +### JSON Schemas + +The "JSON schema" format is built on top of JSON. This adds some special rules and keywords that are intended to facilitate the *description* of the format of data (whereas "raw" JSON is intended for storing data). + +Here is an example of a JSON schema that defines a (simplified) US address: + +```json +{ + "type": "object", + "required": ["name", "streetNumber", "street", "city", "state", "zip5"], + "additionalProperties": false, + "properties": { + "name": { "type": "string" }, + "streetNumber": { "type": "string" }, + "street": { "type": "string" }, + "unit": { "type": "string" }, + "city": { "type": "string", "pattern": "^[A-Z][A-Za-z ]*$" }, + "state": { "type": "string", "pattern": "^[A-Z]{2}$" }, + "zip5": { "type": "string", "pattern": "^[0-9]{5}"}, + "zipPlus4": { "type": "string", "pattern": "^[0-9]{4}"} + } +} +``` + +Note that this is perfectly valid JSON data. It's a JSON object with four fields: `type` (a JSON string), `required` (A JSON array), `additionalProperties` (a JSON boolean), and `properties`. `properties`, in turn is a JSON object with with 8 fields, each of which is a JSON object, with its own properties, etc. + +But, this isn't *just* a JSON object. This is a JSON schema. It uses special keywords like `type` `required`, and `additionalProperties`. These have specially-defined meanings in the context of JSON schemas. + +Here is a list of the special keywords used by the above schema. Note that this is only a small subset of JSON schema keywords. + +keyword | description +------- | ----------- +`additionalProperties` | Determines whether the schema allows properties that are not explicitly listed in the `properties` specification. Must be a `true` or `false`. +`pattern` | Used with string types to specify a regular expression that the property must conform to. +`required`| A list of required properties. Properties not listed in this list are optional. +`string` | Used with `type` to declare that a property must be a string. +`type` | Specifies a datatype. Common values are `object`, `array`, `number`, `integer`, `boolean`, and `string`. + +Some points to note about the address schema above: + +* Because of the `required` list, all valid addresses must have fields called `name`, `streetNumber` and so on. +* `unit` and `zipPlus4` do not appear in the `required` list, and therefore are optional. +* Because of `additionalProperties` being `false`, valid addresses cannot make up their own fields like `nickname` or `doorbellLocation`. +* Because of the `pattern`, any `state` field in a valid address must consist of exactly two capital letters. +* Similarly, `city` must only contain letters and spaces, and `zip` and `zipPlus4` must only contain digits. +* Each property has its own valid subschema that describes its own type definition. + +Here is a JSON object that conforms to the above schema: + +```json +{ + "name": "Delphix", + "streetNumber": "220", + "street": "Congress St.", + "unit": "200", + "city": "Boston", + "state": "MA", + "zip": "02210" +} +``` + +!!! info + A common point of confusion is the distinction between a JSON schema and a JSON object that conforms to a schema. Remember, a schema describes the form of data. In our example, the schema *describes* what an address looks like. The address itself is not a schema. + + +For much more detail on JSON schemas, including which keywords are available, what they mean, and where you can use them, see . + + +### Delphix-specific Extensions to JSON Schema + +The JSON schema vocabulary is designed to be extensible for special uses, and Delphix has taken advantage of this to add some new Delphix-specific keywords. + +The list below outlines each of these keywords, and provides minimal examples of how they might be used. + +#### `description` + +| Summary | | +|-------- | | +| Required or Optional? | Optional| +| Where? | In any property subschema, at the same level as `type`.| + +The `description` keyword can optionally appear on any property. If it does appear, it is used by the UI as explanatory text for the UI widget associated with the property. If it does not appear, then no explanatory text is shown. + +In this example, the UI would show "User-readable name for the provisioned database" in small text under the widget. + +```json +{ + "properties": { + "name": { + "type": "string", + "description": "User-readable name for the provisioned database" + } + } +} +``` + +#### `identityFields` + +| Summary | | +| ------- | | +| Required or Optional? | Required (for repository and source config schemas only) | +| Where? | At the top level of a repository or source config schema, at the same level as `type` and `properties`.| + +The `identityFields` is a list of property names that, together, serve as a unique identifier for a repository or source config. + +When a plugin's [automatic discovery](Glossary.md#automatic-discovery) code is called, it will return a list of repositories (or source configs). The Delphix Engine needs to be able to compare this new list with whatever repositories it already knows about. + +For example, suppose the engine already knows about a single repository with data `{"dbname": "my_databsae", "path": "/var/db/db01"}` (note the misspelling!). And, then suppose that automatic discovery is re-run and it returns repository data `{ "dbname": "my_database", "path": "/var/db/db01"}`. + +What should the Delphix Engine do? Should it conclude that "my_databsae" has been deleted, and there is a completely new repository named "my_database"? Or, should it conclude that we still have the same old repository, but with an updated name? + +`identityFields` is used to handle this. When the engine compares "new" data with "old" data, it concludes that they belong to the same repository if **all** of the identity fields match. If any of the identity fields do not match, then the "new" repository data is judged to represent a different repository than the old data. + +`identityFields` is **required** for [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-schema) and [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-schema) schemas, and may not be used in any other schemas. + +In this example, we'll tell the Delphix Engine that `path` is the sole unique identifier. + +```json +{ + "properties": { + "dbname": {"type": "string"}, + "path": {"type": "string"} + }, + "identityFields": ["path"] +} +``` + +#### `nameField` + +| Summary | | +| ------- | | +| Required or Optional? | Required (for repository and source config schemas only) | +| Where? | At the top level of a repository or source config schema, at the same level as `type` and `properties`.| + +The `nameField` keyword specifies a single property that is to be used to name the object in the Delphix Engine. The property must be a string field. This keyword is used at the same level as `properties`. It is **required** for [RepositoryDefinition](Schemas_and_Autogenerated_Classes.md#repositorydefinition-schema) and [SourceConfigDefinition](Schemas_and_Autogenerated_Classes.md#sourceconfigdefinition-schema) schemas, and may not be used in any other schemas. + +In this example, we will use the `path` property as the user-visible name. + +```json +{ + "properties": { + "path": { "type": "string" }, + "port": { "type": "integer" } + }, + "nameField": "path" +} +``` + +So, if we have an repository object that looks like + +```json +{ + "path": "/usr/bin", + "port": 8800 +} +``` +then the user will be able to refer to this object as `/usr/bin`. + +#### `ordering` + +| Summary | | +| ------- | | +| Required or Optional? | Optional | +| Where? | At the top level, same level as `type` and `properties`.| + +The `ordering` keyword can be used to order the fields when the UI is autogenerated. + +```json +{ + "properties": { + "path": { "type": "string" }, + "port": { "type": "integer" } + }, + "ordering": ["port", "path"] +} +``` + +In the example above, the `port` will be the first field in the autogenerated UI wizard followed by `path`. + +#### `password` + +| Summary | | +| ------- | | +| Required or Optional? | Optional| +| Where? | As the value for the `format` keyword in any string property's subschema.| + +The `password` keyword can be used to specify the `format` of a `string`. (Note that `format` is a standard keyword and is not Delphix-specific). If a property is tagged as a password, then the UI will never show the value on screen, and the value will be encrypted before being stored as described [here](/Best_Practices/Sensitive_Data.md). + +In this example, the `dbPass` field on any object will be treated as a password. + +```json +{ + "properties": { + "dbPass": { + "type": "string", + "format": "password" + } + } +} +``` + +#### `prettyName` + +| Summary | | +| ------- | | +| Required or Optional? | Optional| +| Where? | In any property subschema, at the same level as `type`.| + +The `prettyName` keyword can optionally appear on any property. If it does appear, it is used by the UI as a title for the UI widget associated with the property. If it does not appear, then the name of the property is used. + +In this example, the user would see "Name of Database" on the UI, instead of just "name". + +```json +{ + "properties": { + "name": { + "type": "string", + "prettyName": "Name of Database" + } + } +} +``` + +#### `unixpath` + +| Summary | | +| ------- | | +| Required or Optional? | Optional| +| Where? | As the value for the `format` keyword in any string property's subschema.| + +The `unixpath` keyword is used to specify the `format` of a string. This will allow the Delphix Engine to verify and enforce that a particular field can be parsed as a valid Unix path. + +```json +{ + "properties": { + "datapath": { + "type": "string", + "format": "unixpath" + } + } +} +``` + +#### `reference` + +| Summary | | +| ------- | | +| Required or Optional? | Optional| +| Where? | As the value for the `format` keyword in any string property's subschema.| + +The `reference` keyword is used to specify the `format` of a string. +This will allow the plugin author to ask the user to select [environments](/References/Glossary.md#environment.md) and [environment users](/References/Glossary.md#environment-users) on the Delphix Engine. + +```json +"properties": { + "env": { + "type": "string", + "format": "reference", + "referenceType": "UNIX_HOST_ENVIRONMENT" + }, + "envUser": { + "type": "string", + "format": "reference", + "referenceType": "HOST_USER", + "matches”: "env" + } +} +``` + +#### `referenceType` + +| Summary | | +| ------- | | +| Required or Optional? | Optional| +| Where? | In any property subschema of type `string` and format `reference`, at the same level as type.| + +The `referenceType` keyword is used to specify the [reference](#reference) type. Possible values: + +* [Environment](/References/Glossary.md#environment.md): `UNIX_HOST_ENVIRONMENT` +* [Environment User](/References/Glossary.md#environment-user.md): `HOST_USER` + +```json +"properties": { + "env": { + "type": "string", + "format": "reference", + "referenceType": "UNIX_HOST_ENVIRONMENT" + }, + "envUser": { + "type": "string", + "format": "reference", + "referenceType": "HOST_USER", + "matches”: "env" + } +} +``` + +#### `matches` + +| Summary | | +| ------- | | +| Required or Optional? | Optional| +| Where? | In any property subschema of type `string` and format `reference`, at the same level as type.| + +The `matches` keyword is used to map an [environment user](/References/Glossary.md#environment-user.md) to an [environment](/References/Glossary.md#environment.md) by specifying the environment's property name. + +```json +"properties": { + "env": { + "type": "string", + "format": "reference", + "referenceType": "UNIX_HOST_ENVIRONMENT" + }, + "envUser": { + "type": "string", + "format": "reference", + "referenceType": "HOST_USER", + "matches”: "env" + } +} +``` + +In the example above, environment user `envUser` maps to environment `env`. + +## JSON Schema Limitations + +To be able to autogenerate Python classes there are some restrictions to the JSON Schemas that are supported. + +### Generation Error +There are some valid JSON schemas that will cause the property to not be generated in the autogenerated Python classes. Unfortunately the build command will silently fail so be sure to look at the generated classes and verify all the properties exist. + +#### Multiple types +For the `type` keyword, only a single type may be specified. Arrays of types are not supported. +```json +{ + "repositoryDefinition": { + "type": "object", + "additionalProperties": "false", + "properties": { + "data": { + "type": ["integer", "string"] + } + }, + "nameField": "data", + "identityFields": ["data"] + } +} +``` +The `data` property will not even exist: +```python +from generated.defintions import RepositoryDefinition + +repository = RepositoryDefinition() +repository.data = 3 +print(repository) +``` +This would print: +``` +{} +``` + +#### Combining schemas +For the following keywords, if they are specified the property will not exist in the class. +* anyOf +* allOf +* oneOf +* not +```json +{ + "repositoryDefinition": { + "type": "object", + "additionalProperties": "false", + "properties": { + "any": { + "anyOf": [ + {"type": "integer", "minimum": 2}, + {"type": "string", "minLength": 4} + ] + }, + "one": { + "oneOf": [ + {"type": "integer", "minimum": 3}, + {"type": "integer", "maximum": 5} + ] + } + }, + "nameField": "data", + "identityFields": ["data"] + } +} +``` +The `any` and `one` properties would not exist: +```python +from generated.defintions import RepositoryDefinition + +repository = RepositoryDefinition() +repository.any = "string" +repository.one = 6 +print(repository) +``` +This would print: +``` +{} +``` + +#### Object Additional Properties +The `additionalProperties` keyword inside the object property can either be a boolean or a JSON schema. If it is a schema it needs to have the keyword `type`. If the `additionalProperties` is set to a JSON schema then the `properties` keyword will be ignored. If the keyword is set to a boolean the behaviour will be the same regardless of if it was set to `true` or `false`. + +```json +{ + "repositoryDefinition": { + "type": "object", + "additionalProperties": "false", + "properties": { + "dataOne": { + "type": "object", + "addtionalProperties": {"type": "string"} + }, + "dataTwo": { + "type": "object", + "addtionalProperties": {"type": "string"}, + "properties": { + "data": {"type": "string"} + } + }, + "dataThree": { + "type": "object", + "addtionalProperties": "false", + "properties": { + "data": {"type": "string"} + } + }, + "dataFour": { + "type": "object", + "addtionalProperties": "true", + "properties": { + "data": {"type": "string"} + } + }, + "dataFive": { + "type": "object", + "addtionalProperties": "false", + }, + "dataSix": { + "type": "object", + "addtionalProperties": "true", + } + }, + "nameField": "dataOne", + "identityFields": ["dataOne"] + } +} +``` +From the schema above, the properties `dataOne` and `dataTwo`, `dataThree` and `dataFour`, and `dataFive` and `dataSix` will have an identical validations. The first two will validate that the object passed in is a dict with key and value both `string` type. The next two will create a new inner Python class called either `OtherDefinitionDataThree` or `OtherDefinitionDataFour`, they optomize for creating only one as they are identical. Inside that object will be one property `data`. The last two properties will validate that the object passed in is a dict with the key as a `string` type, and the value can be anything. + +### Validation Keywords +In general all property types are supported however some validation keywords will be ignored during the execution of the Python code. This means that if these keywords are used, no error would be raised within Python if the object violates the schema. Listed below are the keywords ignored for each type that wouldn't validate. Some have examples to be more clear. + +#### Number / Integer +* multipleOf +```json +{ + "repositoryDefinition": { + "type": "object", + "additionalProperties": "false", + "properties": { + "data": { + "type": "integer", + "multipleOf": 2 + } + }, + "nameField": "data", + "identityFields": ["data"] + } +} +``` +This would work even though it would fail the schema check: +```python +from generated.defintions import RepositoryDefinition + +repository = RepositoryDefinition() +repository.data = 3 +``` + +#### Arrays / Tuples +* additionalItems +* minItems +* maxItems +* uniqueItems +* contains +* items + * Must be a single type, not an array (tuples are not supported): +```json +{ + "repositoryDefinition": { + "type": "object", + "additionalProperties": "false", + "properties": { + "data": { + "type": "array", + "items": [ + {"type": "number"}, + {"type": "string"}, + {"type": "boolean"} + ] + } + }, + "nameField": "data", + "identityFields": ["data"] + } +} +``` +This would work even though it would fail the schema check: +```python +from generated.defintions import RepositoryDefinition + +repository = RepositoryDefinition() +repository.data = ["string", False, 3] +``` + +#### Objects +* minProperties +* maxProperties +* patternProperties +* dependencies +* propertyNames + +#### Enumerated values +If the `enum` keyword is used within a subobject, `type` has to be `string`. +```json +{ + "repositoryDefinition": { + "type": "object", + "additionalProperties": "false", + "properties": { + "stringData": { + "enum": ["A", "B", "C"] + }, + "arrayData": { + "type": "array", + "items": { + "enum": ["DO", "RE", "MI"] + } + }, + "objectData": { + "type": "object", + "additionalProperties": { + "enum": ["ONE", "TWO", "THREE"] + } + }, + "definedObjectData": { + "type": "object", + "properties": { + "objectStringData": { + "enum": ["o.A", "o.B", "o.C"] + }, + }, + "additionalProperties": "false" + } + }, + "nameField": "stringData", + "identityFields": ["stringData"] + } +} +``` +In the above example there are four properties: `stringData`, `arrayData`, `objectData`, and `definedObjectData`. Validation works for stringData but are skipped for the other three. In fact the definedObjectData which with properties would usually create a separte Python class does not at all. +This means the following code would work even though it would fail the schema check: +```python +from generated.defintions import RepositoryDefinition + +repository = RepositoryDefinition() +repository.array_data = [10, 11, 12] +repository.object_data = {"key": 1} +repository.defined_object_data = {"key": 2} +``` +And this code would actually fail with a `GeneratedClassesError` during the Python execution saying `Invalid enum value D for 'string_data', must be one of [A, B, C] if defined.`: +```python +from generated.defintions import RepositoryDefinition + +repository = RepositoryDefinition() +repository.string_data = "D" +``` diff --git a/docs/docs/References/Schemas_and_Autogenerated_Classes.md b/docs/docs/References/Schemas_and_Autogenerated_Classes.md new file mode 100644 index 00000000..990efdb6 --- /dev/null +++ b/docs/docs/References/Schemas_and_Autogenerated_Classes.md @@ -0,0 +1,232 @@ +# Schemas and Autogenerated Classes + +[Plugin operations](Plugin_Operations.md) will sometimes need to work with data in these custom formats. For example, the `configure` operation will accept snapshot data as an input, and must produce source config data as an output. + +To enable this, Python classes are generated from the snapshot schema. The aforementioned inputs and outputs are instances of these autogenerated classes. + +!!! info + Autogenerated Python code will use `lower_case_with_underscores` as attribute names as per Python variable naming conventions. + That is, if we were to use `mountLocation` as the schema property name, it would be called + `mount_location` in the generated Python code. + +!!! info + Note that, wherever they can, these generated Python classes will enforce the constraints made by the schema. For example, if a property is listed as `required` in the schema, then every Python object will be required to always have this property. This implies that all `required` fields must be given values when the object is constructed. For various examples of this, see the examples below. + +## RepositoryDefinition + +Defines properties used to identify a [Repository](Glossary.md#repository). + +### RepositoryDefinition Schema + +The plugin must also decide on a [**name**](Schemas.md#namefield) field and a set of [**identityFields**](Schemas.md#identityfields) to display and uniquely identify the [repository](Glossary.md#repository). + +```json +{ + "type": "object", + "additionalProperties": false, + "required": ["name", "path"], + "properties": { + "name": { "type": "string" }, + "path": { "type": "string" } + }, + "identityFields": ["name", "path"], + "nameField": "name" +} +``` + +### RepositoryDefinition Class + +Autogenerated based on the [RepositoryDefinition Schema](#repositorydefinition-schema). + +```python +class RepositoryDefinition: + + def __init__(self, name, path): + self._inner_dict = {"name": name, "path": path} +``` + +> To use the class: + +```python +from generated.defintions import RepositoryDefinition + +# Since both properties are required, they must be specified when constructing the object +repository = RepositoryDefinition(name="name", path="/some/path") +``` + +## SourceConfigDefinition + +Defines properties used to identify a [Source Config](Glossary.md#source-config). + +### SourceConfigDefinition Schema + +The plugin must also decide on a [**name**](Schemas.md#namefield) field and a set of [**identityFields**](Schemas.md#identityfields) to display and uniquely identify the [source config](Glossary.md#source-configs). + +```json +{ + "type": "object", + "additionalProperties": false, + "required": ["name"], + "properties": { + "name": { "type": "string" }, + "path": { "type": "string" } + }, + "identityFields": ["name"], + "nameField": "name" +} +``` + +### SourceConfigDefinition Class + +Autogenerated based on the [SourceConfigDefinition Schema](#sourceconfigdefinition-schema). + +```python +class SourceConfigDefinition: + + def __init__(self, name, path): + self._inner_dict = {"name": name, "path": path} +``` + +> To use the class: + +```python +from generated.defintions import SourceConfigDefinition + +# A source config that only defines the required "name" property. +source_config1 = SourceConfigDefinition(name="sc1") + +# A Source config that defines both "name" and "path". +source_config2 = SourceConfigDefinition(name="sc2", path="/some/path") + +# Setting the optional "path" property after construction +source_config3 = SourceConfigDefinition(name="sc3") +install_path = find_install_path() +source_config3.path = install_path +``` + +## LinkedSourceDefinition + +Defines properties used to identify [linked sources](Glossary.md#linked-source). + +### LinkedSourceDefinition Schema + +```json +{ + "type": "object", + "required": ["name", "port"], + "additionalProperties": false, + "properties": { + "name": { "type": "string" }, + "port": { "type": "integer" } + } +} +``` + +### LinkedSourceDefinition Class + +Autogenerated based on the [LinkedSourceDefinition Schema](#linkedsourcedefinition-schema). + +```python +class LinkedSourceDefinition: + + def __init__(self, name, port): + self._inner_dict = {"name": name, "port": port} +``` + +> To use the class: + +```python +from generated.defintions import LinkedSourceDefinition + +source = LinkedSourceDefinition(name="name", port=1000) + +# Retrieve the properties from the object and log them +name = source.name +port = source.port +logger.debug("Creating source \"{}\" with port {}".format(name, port)) +``` + +## VirtualSourceDefinition + +Defines properties used to identify [virtual sources](Glossary.md#virtual-source). + +### VirtualSourceDefinition Schema + +```json +{ + "type": "object", + "required": ["name", "port"], + "additionalProperties": false, + "properties": { + "name": { "type": "string" }, + "port": { "type": "integer" } + } +} +``` + +### VirtualSourceDefinition Class + +Autogenerated based on the [VirtualSourceDefinition Schema](#virtualsourcedefinition-schema). + +```python +class VirtualSourceDefinition: + + def __init__(self, name, port): + self._inner_dict = {"name": name, "port": port} +``` + +> To use the class: + +```python +from generated.defintions import VirtualSourceDefinition + +source = VirtualSourceDefinition(name="name", port=1000) +``` + +## SnapshotDefinition + +Defines properties used to [snapshots](Glossary.md#snapshot). + +### SnapshotDefinition Schema + +```json +{ + "type": "object", + "properties": { + "version": { "type": "string" }, + "transation_id": { "type": "integer" } + } +} +``` + +### SnapshotDefinition Class + +Autogenerated based on the [VirtualSourceDefinition Schema](#virtualsourcedefinition-schema). + +```python +class VirtualSourceDefinition: + + def __init__(self, version, transaction_id): + self._inner_dict = + { + "version": version, + "transaction_id": transaction_id + } +``` + +> To use the class: + +```python +from generated.defintions import SnapshotDefinition + +# A snapshot with both properties defined at construction time +snapshot1 = SnapshotDefinition(version="1.2.3", transaction_id=1000) + +# A snapshot with properties defined after construction +snapshot2 = SnapshotDefinition() +snapshot2.version = "2.0.0" +snapshot2.transaction_id = 1500 + +# A snapshot that omits the optional "transaction_id" property +snapshot3 = SnapshotDefinition(version="1.0.0") +``` diff --git a/docs/docs/References/Workflows.md b/docs/docs/References/Workflows.md new file mode 100644 index 00000000..1d8cde14 --- /dev/null +++ b/docs/docs/References/Workflows.md @@ -0,0 +1,65 @@ +# Workflows + +## Legend + +![Screenshot](images/Legend.png) + +## Environment Discovery / Refresh + +![Screenshot](images/EnvironmentDiscoveryRefresh.png) + +## Linked Source Sync + +![Screenshot](images/LinkedSourceSync.png) + +## Linked Source Enable + +![Screenshot](images/LinkedSourceEnable.png) + +## Linked Source Disable + +![Screenshot](images/LinkedSourceDisable.png) + +## Linked Source Delete + +![Screenshot](images/LinkedSourceDelete.png) + +## Virtual Source Provision + +![Screenshot](images/VirtualSourceProvision.png) + +## Virtual Source Snapshot + +![Screenshot](images/VirtualSourceSnapshot.png) + +## Virtual Source Refresh + +![Screenshot](images/VirtualSourceRefresh.png) + +## Virtual Source Rollback + +![Screenshot](images/VirtualSourceRollback.png) + +## Virtual Source Delete + +![Screenshot](images/VirtualSourceDelete.png) + +## Virtual Source Start + +![Screenshot](images/VirtualSourceStart.png) + +## Virtual Source Stop + +![Screenshot](images/VirtualSourceStop.png) + +## Virtual Source Enable + +![Screenshot](images/VirtualSourceEnable.png) + +## Virtual Source Disable + +![Screenshot](images/VirtualSourceDisable.png) + +## Upgrade + +![Screenshot](images/PluginUpgrade.png) diff --git a/docs/docs/References/html/DirectLinkedSourceSync.html b/docs/docs/References/html/DirectLinkedSourceSync.html new file mode 100644 index 00000000..d5a79344 --- /dev/null +++ b/docs/docs/References/html/DirectLinkedSourceSync.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/EnvironmentDiscoveryRefresh.html b/docs/docs/References/html/EnvironmentDiscoveryRefresh.html new file mode 100644 index 00000000..8ddef263 --- /dev/null +++ b/docs/docs/References/html/EnvironmentDiscoveryRefresh.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/LinkedSourceDelete.html b/docs/docs/References/html/LinkedSourceDelete.html new file mode 100644 index 00000000..359a9cb7 --- /dev/null +++ b/docs/docs/References/html/LinkedSourceDelete.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/LinkedSourceDisable.html b/docs/docs/References/html/LinkedSourceDisable.html new file mode 100644 index 00000000..5c13fc16 --- /dev/null +++ b/docs/docs/References/html/LinkedSourceDisable.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/LinkedSourceEnable.html b/docs/docs/References/html/LinkedSourceEnable.html new file mode 100644 index 00000000..5ea97627 --- /dev/null +++ b/docs/docs/References/html/LinkedSourceEnable.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/LinkedSourceSync.html b/docs/docs/References/html/LinkedSourceSync.html new file mode 100644 index 00000000..3ef99231 --- /dev/null +++ b/docs/docs/References/html/LinkedSourceSync.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/PluginUpgrade.html b/docs/docs/References/html/PluginUpgrade.html new file mode 100644 index 00000000..97a4f75c --- /dev/null +++ b/docs/docs/References/html/PluginUpgrade.html @@ -0,0 +1,12 @@ + + + +diagrams.net + + + + +
+ + + diff --git a/docs/docs/References/html/VirtualSourceDelete.html b/docs/docs/References/html/VirtualSourceDelete.html new file mode 100644 index 00000000..9e42c12e --- /dev/null +++ b/docs/docs/References/html/VirtualSourceDelete.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/VirtualSourceDisable.html b/docs/docs/References/html/VirtualSourceDisable.html new file mode 100644 index 00000000..f86ca376 --- /dev/null +++ b/docs/docs/References/html/VirtualSourceDisable.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/VirtualSourceEnable.html b/docs/docs/References/html/VirtualSourceEnable.html new file mode 100644 index 00000000..8465f2dd --- /dev/null +++ b/docs/docs/References/html/VirtualSourceEnable.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/VirtualSourceProvision.html b/docs/docs/References/html/VirtualSourceProvision.html new file mode 100644 index 00000000..0a039f91 --- /dev/null +++ b/docs/docs/References/html/VirtualSourceProvision.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/VirtualSourceRefresh.html b/docs/docs/References/html/VirtualSourceRefresh.html new file mode 100644 index 00000000..12046bf1 --- /dev/null +++ b/docs/docs/References/html/VirtualSourceRefresh.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/VirtualSourceRollback.html b/docs/docs/References/html/VirtualSourceRollback.html new file mode 100644 index 00000000..5ad9056a --- /dev/null +++ b/docs/docs/References/html/VirtualSourceRollback.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/VirtualSourceSnapshot.html b/docs/docs/References/html/VirtualSourceSnapshot.html new file mode 100644 index 00000000..e10183b9 --- /dev/null +++ b/docs/docs/References/html/VirtualSourceSnapshot.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/VirtualSourceStart.html b/docs/docs/References/html/VirtualSourceStart.html new file mode 100644 index 00000000..1f1913d7 --- /dev/null +++ b/docs/docs/References/html/VirtualSourceStart.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/html/VirtualSourceStop.html b/docs/docs/References/html/VirtualSourceStop.html new file mode 100644 index 00000000..ba60ba8e --- /dev/null +++ b/docs/docs/References/html/VirtualSourceStop.html @@ -0,0 +1,12 @@ + + + +Draw.io Diagram + + + + +
+ + + diff --git a/docs/docs/References/images/DirectLinkedSourceSync.png b/docs/docs/References/images/DirectLinkedSourceSync.png new file mode 100644 index 00000000..fbe1f4b9 Binary files /dev/null and b/docs/docs/References/images/DirectLinkedSourceSync.png differ diff --git a/docs/docs/References/images/EnvironmentDiscoveryRefresh.png b/docs/docs/References/images/EnvironmentDiscoveryRefresh.png new file mode 100644 index 00000000..61902f70 Binary files /dev/null and b/docs/docs/References/images/EnvironmentDiscoveryRefresh.png differ diff --git a/docs/docs/References/images/Legend.png b/docs/docs/References/images/Legend.png new file mode 100644 index 00000000..5291f2bb Binary files /dev/null and b/docs/docs/References/images/Legend.png differ diff --git a/docs/docs/References/images/LinkedSourceDelete.png b/docs/docs/References/images/LinkedSourceDelete.png new file mode 100644 index 00000000..e772c42e Binary files /dev/null and b/docs/docs/References/images/LinkedSourceDelete.png differ diff --git a/docs/docs/References/images/LinkedSourceDisable.png b/docs/docs/References/images/LinkedSourceDisable.png new file mode 100644 index 00000000..2460b5a1 Binary files /dev/null and b/docs/docs/References/images/LinkedSourceDisable.png differ diff --git a/docs/docs/References/images/LinkedSourceEnable.png b/docs/docs/References/images/LinkedSourceEnable.png new file mode 100644 index 00000000..e331b85c Binary files /dev/null and b/docs/docs/References/images/LinkedSourceEnable.png differ diff --git a/docs/docs/References/images/LinkedSourceSync.png b/docs/docs/References/images/LinkedSourceSync.png new file mode 100644 index 00000000..778b47f8 Binary files /dev/null and b/docs/docs/References/images/LinkedSourceSync.png differ diff --git a/docs/docs/References/images/PluginUpgrade.png b/docs/docs/References/images/PluginUpgrade.png new file mode 100644 index 00000000..18bc6fa2 Binary files /dev/null and b/docs/docs/References/images/PluginUpgrade.png differ diff --git a/docs/docs/References/images/VirtualSourceDelete.png b/docs/docs/References/images/VirtualSourceDelete.png new file mode 100644 index 00000000..ab8f2cff Binary files /dev/null and b/docs/docs/References/images/VirtualSourceDelete.png differ diff --git a/docs/docs/References/images/VirtualSourceDisable.png b/docs/docs/References/images/VirtualSourceDisable.png new file mode 100644 index 00000000..8a486342 Binary files /dev/null and b/docs/docs/References/images/VirtualSourceDisable.png differ diff --git a/docs/docs/References/images/VirtualSourceEnable.png b/docs/docs/References/images/VirtualSourceEnable.png new file mode 100644 index 00000000..736eb83f Binary files /dev/null and b/docs/docs/References/images/VirtualSourceEnable.png differ diff --git a/docs/docs/References/images/VirtualSourceProvision.png b/docs/docs/References/images/VirtualSourceProvision.png new file mode 100644 index 00000000..533269ec Binary files /dev/null and b/docs/docs/References/images/VirtualSourceProvision.png differ diff --git a/docs/docs/References/images/VirtualSourceRefresh.png b/docs/docs/References/images/VirtualSourceRefresh.png new file mode 100644 index 00000000..5c2816fb Binary files /dev/null and b/docs/docs/References/images/VirtualSourceRefresh.png differ diff --git a/docs/docs/References/images/VirtualSourceRollback.png b/docs/docs/References/images/VirtualSourceRollback.png new file mode 100644 index 00000000..cffa4dfa Binary files /dev/null and b/docs/docs/References/images/VirtualSourceRollback.png differ diff --git a/docs/docs/References/images/VirtualSourceSnapshot.png b/docs/docs/References/images/VirtualSourceSnapshot.png new file mode 100644 index 00000000..78d4fe7b Binary files /dev/null and b/docs/docs/References/images/VirtualSourceSnapshot.png differ diff --git a/docs/docs/References/images/VirtualSourceStart.png b/docs/docs/References/images/VirtualSourceStart.png new file mode 100644 index 00000000..d305f005 Binary files /dev/null and b/docs/docs/References/images/VirtualSourceStart.png differ diff --git a/docs/docs/References/images/VirtualSourceStop.png b/docs/docs/References/images/VirtualSourceStop.png new file mode 100644 index 00000000..7c305264 Binary files /dev/null and b/docs/docs/References/images/VirtualSourceStop.png differ diff --git a/docs/docs/Release_Notes/.pages b/docs/docs/Release_Notes/.pages new file mode 100644 index 00000000..8396df7a --- /dev/null +++ b/docs/docs/Release_Notes/.pages @@ -0,0 +1,4 @@ +arrange: + - 2.0.0 + - 1.0.0 + - 0.4.0 diff --git a/docs/docs/Release_Notes/0.4.0/.pages b/docs/docs/Release_Notes/0.4.0/.pages new file mode 100644 index 00000000..72484dec --- /dev/null +++ b/docs/docs/Release_Notes/0.4.0/.pages @@ -0,0 +1,3 @@ +arrange: + - 0.4.0.md + - 0.4.0_Breaking_Changes.md diff --git a/docs/docs/Release_Notes/0.4.0/0.4.0.md b/docs/docs/Release_Notes/0.4.0/0.4.0.md new file mode 100644 index 00000000..65ce0e0f --- /dev/null +++ b/docs/docs/Release_Notes/0.4.0/0.4.0.md @@ -0,0 +1,64 @@ +# Release - Early Preview 2 (v0.4.0) + +To install or upgrade the SDK, refer to instructions [here](/Getting_Started.md#installation). + +## New & Improved + +* Added a new CLI command [download-logs](/References/CLI.md#download-logs) to enable downloading plugin generated logs from the Delphix Engine. +* Added an optional argument named `check` to the following [platform library](/References/Platform_Libraries.md) functions: + * [run_bash](/References/Platform_Libraries.md#run_bash) + * [run_powershell](/References/Platform_Libraries.md#run_powershell) + + With `check=true`, the platform library function checks the `exit_code` and raises an exception if it is non-zero. + +* Modified [init](/References/CLI.md#init) to auto-generate default implementations for all required plugin operations. +* Improved [build](/References/CLI.md#build) validation for: + * Required [plugin operations](/References/Plugin_Operations.md). + * Incorrect [plugin operation](/References/Plugin_Operations.md) argument names. + * [Plugin Config](/References/Plugin_Config.md) `entryPoint`: The `entryPoint` is now imported during the [build](/References/CLI.md#build) as part of the validation. + * [Schemas](/References/Schemas.md): Validated to conform to the [JSON Schema Draft-07 Specification](http://json-schema.org/specification-links.html#draft-7). +* Improved runtime validation and error messages for: + * Objects returned from [plugin operations](/References/Plugin_Operations.md). + * [Platform Classes](/References/Classes.md) during instantiation. + * [Platform Library](/References/Platform_Libraries.md) function arguments. + +* Added support for Docker based plugins by specifying `rootSquashEnabled: false` in the [plugin config](/References/Plugin_Config.md). +* Added Job and thread information to plugin generated log messages to increase diagnosability and observability. + +## Breaking Changes + +* A new argument `snapshot_parameters` was added to the following **staged** plugin operations: + * [Staged Linked Source Pre-Snapshot](/References/Plugin_Operations.md#staged-linked-source-pre-snapshot) + * [Staged Linked Source Post-Snapshot](/References/Plugin_Operations.md#staged-linked-source-post-snapshot) + + This argument will allow the end user to indicate to the plugin whether or not to initiate a full ingestion for a dSource. More details about the new argument are [here](/Building_Your_First_Plugin/Data_Ingestion.md#syncing). + + [**Detailed steps to detect and make changes.**](/Release_Notes/0.4.0/0.4.0_Breaking_Changes.md#new-argument-snapshot_parameters) + +* Properties of the [StagedSource](/References/Classes.md#stagedsource) class were modified: + * `connection` was renamed to `source_connection`. + * `staged_connection` was added to allow connecting to the staging environment. + + This will enable plugins to connect to both the source and staging environments. More details about these properties are [here](/References/Classes.md#stagedsource). + + [**Detailed steps to detect and make changes.**](/Release_Notes/0.4.0/0.4.0_Breaking_Changes.md#stagedsource-properties-modified) + +## Fixed + +* Allow access to nested package resources via `pkgutil.get_data`. +* Fixed Out of Memory exceptions. +* Fixed missing or incorrectly populated properties for the following classes: + + | Class | Properties | + | ----- | ---------- | + | [VirtualSource](/References/Classes.md#virtualsource) | `mounts` | + | [RemoteUser](/References/Classes.md#remoteuser) | `name` | + | [RemoteEnvironment](/References/Classes.md#remoteenvironment) | `name` | + | [RemoteHost](/References/Classes.md#remotehost) | `name` `binary_path` | + +* Updated Job warnings during discovery to display the underlying Python exceptions if one is raised by the plugin operations. +* Recreate the plugin's log directory if a plugin is deleted and re-uploaded to the Delphix Engine. +* Mark incorrectly provisioned VDBs as unusable and prevent subsequent Delphix Engine operations on such VDBs. +* Better error messages when incorrect environment types are used for Platform Libraries. +* Better error messages when a plugin's [schema](/References/Schemas.md) is updated and the plugin is re-uploaded to the Delphix Engine, with clear instructions on how to proceed. +* Fixed [build](/References/CLI.md#build) failures on Windows. diff --git a/docs/docs/Release_Notes/0.4.0/0.4.0_Breaking_Changes.md b/docs/docs/Release_Notes/0.4.0/0.4.0_Breaking_Changes.md new file mode 100644 index 00000000..b67fd272 --- /dev/null +++ b/docs/docs/Release_Notes/0.4.0/0.4.0_Breaking_Changes.md @@ -0,0 +1,123 @@ +# Breaking Changes - Early Preview 2 (v.0.4.0) + +## New Argument `snapshot_parameters` +A new argument `snapshot_parameters` was added to the following **staged** plugin operations: + +* [Staged Linked Source Pre-Snapshot](/References/Plugin_Operations.md#staged-linked-source-pre-snapshot) +* [Staged Linked Source Post-Snapshot](/References/Plugin_Operations.md#staged-linked-source-post-snapshot) + +This argument will allow the end user to indicate to the plugin whether or not to initiate a full ingestion for a dSource. More details about the new argument are [here](/Building_Your_First_Plugin/Data_Ingestion.md). + +### What is affected +This argument applies only to **staged** plugins. The plugin's source code will have to be updated for the following staged plugin operations: + +* [Staged Linked Source Pre-Snapshot](/References/Plugin_Operations/#staged-linked-source-pre-snapshot): This plugin operation is optional and will need to be updated if the plugin implements it. +* [Staged Linked Source Post-Snapshot](/References/Plugin_Operations/#staged-linked-source-post-snapshot): This plugin operation is required and will need to be updated. + +### How does it fail +[build](/References/CLI.md#build) will fail with the following error message if the new argument is not added to the affected staged plugin operations: + +```bash +$ dvp build +Error: Number of arguments do not match in method staged_post_snapshot. Expected: ['staged_source', 'repository', 'source_config', 'snapshot_parameters'], Found: ['repository', 'source_config', 'staged_source']. +Error: Number of arguments do not match in method staged_pre_snapshot. Expected: ['staged_source', 'repository', 'source_config', 'snapshot_parameters'], Found: ['repository', 'source_config', 'staged_source']. + +0 Warning(s). 2 Error(s). + +BUILD FAILED. +``` + +### How to fix it +Update the affected staged plugin operations to include the new argument `snapshot_parameters`. + +* Previous releases + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.linked.pre_snapshot() +def linked_pre_snapshot_prior(staged_source, repository, source_config): + # This was the function signature prior to 0.4.0 + pass + +@plugin.linked.post_snapshot() +def linked_post_snapshot_prior(staged_source, repository, source_config): + # This was the function signature prior to 0.4.0 + return SnapshotDefinition() +``` + +* 0.4.0 + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.linked.pre_snapshot() +def linked_pre_snapshot_040(staged_source, repository, source_config, snapshot_parameters): + # Updated function signature in 0.4.0 + pass + +@plugin.linked.post_snapshot() +def linked_post_snapshot_040(staged_source, repository, source_config, snapshot_parameters): + # Updated function signature in 0.4.0 + return SnapshotDefinition() + +``` + +## StagedSource Properties Modified +Properties of the [StagedSource](/References/Classes.md#stagedsource) class were modified: + +* `connection` was renamed to `source_connection`. +* `staged_connection` was added to allow connecting to the staging environment. + +This will enable plugins to connect to both the source and staging environments. More details about these properties are [here](/References/Classes.md#stagedsource). + +### What is affected +This change applies only to **staged** plugins.
+ +#### Required Changes +The plugin's source code will have to be updated for any staged plugin operations that accesses the `connection` propery of a [StagedSource](/References/Classes.md#stagedsource) object. + +#### Optional Changes +The plugin can choose to use the new `staged_connection` property to connect to the staging environment of a dSource. + +### How does it fail +Any Delphix Engine operation that calls a plugin operation that has not been fixed would fail with the following stack trace as part of the output of the user exception: + +```python +AttributeError: 'StagedSource' object has no attribute 'connection' +``` + +### How to fix it +Update any staged plugin operations that access the renamed property. + +* Previous releases + +```python +from dlpx.virtualization.platform import Plugin +from dlpx.virtualization import libs + +plugin = Plugin() + +@plugin.linked.pre_snapshot() +def linked_pre_snapshot_prior(staged_source, repository, source_config): + # Property name was 'connection' was the name of the property for staged_source prior to 0.4.0 + libs.run_bash(staged_source.connection, 'date') +``` + +* 0.4.0 + +```python +from dlpx.virtualization.platform import Plugin +from dlpx.virtualization import libs + +plugin = Plugin() + +@plugin.linked.pre_snapshot() +def linked_pre_snapshot_prior(staged_source, repository, source_config): + # Property name was updated to 'source_connection' in 0.4.0 + libs.run_bash(staged_source.source_connection, 'date') +``` diff --git a/docs/docs/Release_Notes/1.0.0/.pages b/docs/docs/Release_Notes/1.0.0/.pages new file mode 100644 index 00000000..f3a9c777 --- /dev/null +++ b/docs/docs/Release_Notes/1.0.0/.pages @@ -0,0 +1,3 @@ +arrange: + - 1.0.0.md + - 1.0.0_Breaking_Changes.md diff --git a/docs/docs/Release_Notes/1.0.0/1.0.0.md b/docs/docs/Release_Notes/1.0.0/1.0.0.md new file mode 100644 index 00000000..73769b82 --- /dev/null +++ b/docs/docs/Release_Notes/1.0.0/1.0.0.md @@ -0,0 +1,42 @@ +# Release - GA (v1.0.0) + +To install or upgrade the SDK, refer to instructions [here](/Getting_Started.md#installation). + +## New & Improved + +* Added support for a CLI configuration file to specify default options for `dvp` commands. More details [here](/Best_Practices/CLI_Configuration_File.md). +* Improved speed and scalability of plugin operations: + * Reduced startup time for plugin operations from seconds to milliseconds. + * Improved memory utilization on the Delphix Engine to enable a large number of plugin operations to execute in parallel. + +* Added the ability for plugins to raise user visible messages with a custom message, action and output related to a failure during a plugin operation. Refer to the [User Visible Errors](/Best_Practices/User_Visible_Errors.md) section for more details. +* Improved validation for type and range checks for autogenerated classes. +* Improved security for the plugin's runtime when executed on the Delphix Engine. +* Removed the Delphix Engine feature flag `PYTHON_TOOLKITS` as the Delphix Engine supports plugins built on the SDK by default. The [Getting Started](/Getting_Started.md#installation) section has been updated has well. + + +## Breaking Changes + +* The following fields in the [Plugin Config](/References/Plugin_Config.md) were renamed: + + | Previous | Updated | + | -------- | ------- | + | `name` | `plugin_id` | + | `prettyName` | `name` | + + Additionally, the `plugin_id` is now required to be a UUID with a format: `[0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12}`. + + [**Detailed steps to detect and make changes.**](/Release_Notes/1.0.0/1.0.0_Breaking_Changes.md#plugin-config-fields-renamed) + +## Fixed + +* Updated remote host operations to execute as the [RemoteUser](/References/Classes/#remoteuser) specfied instead of the primary environment user. +* Fixed an incorrect user exception when the required plugin operation `linked.post_snapshot` was missing. +* Updated [run_expect](/Platform_Libraries/#run_expect) to return an `exit_code`, `stdout`, `stderr` like other platform library functions. +* Fixed [run_powershell](/Platform_Libraries/#run_powershell) to not automatically redirect `stderr` to `stdout`. +* Ensured that all exceptions raised by the [Staged Linked Source Worker](/References/Plugin_Operations/#staged-linked-source-worker) plugin operation are converted to faults for the user. +* Enabled the [MountSpecification](/References/Classes.md#mountspecification) to be constructed with `mounts` that refer to different environments. +* Sanitized the Python stack traces from exceptions during plugin execution and removed paths that reference where the plugin was built. +* Removed a spurious build warning for `DIRECT` plugins that incorrectly suggested implementing the [Staged Linked Source Mount Specification](/References/Plugin_Operations.md#staged-linked-source-mount-specification) plugin operation. +* Removed a spurious message `global name 'exit' is not defined` which was displayed when a plugin library function failed. +* Updated `manualDiscovery` to be optional in the [Plugin Config](/References/Plugin_Config.md). The default value will be `True`. \ No newline at end of file diff --git a/docs/docs/Release_Notes/1.0.0/1.0.0_Breaking_Changes.md b/docs/docs/Release_Notes/1.0.0/1.0.0_Breaking_Changes.md new file mode 100644 index 00000000..d4c43e1c --- /dev/null +++ b/docs/docs/Release_Notes/1.0.0/1.0.0_Breaking_Changes.md @@ -0,0 +1,98 @@ +# Breaking Changes - GA (v.1.0.0) + +## Plugin Config Fields Renamed +The following fields in the [Plugin Config](/References/Plugin_Config/) were renamed: + +| Previous | Updated | +| -------- | ------- | +| `name` | `plugin_id` | +| `prettyName` | `name` | + +Additionally, the `plugin_id` is now required to be a UUID with format: `[0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12}`. This will allow the plugins to be uniquely identified across plugin developers. + +### What is affected +All plugins built with v0.3.0 or v0.4.0 will be affected. The [Plugin Config](/References/Plugin_Config) fields will have to be updated. + +### How does it fail +[dvp build](/References/CLI.md#build) will fail with the following error message if the [Plugin Config](/References/Plugin_Config) fields are not updated: + +```bash +$ dvp build +Error: Additional properties are not allowed ('prettyName' was unexpected) on [] +{ + "pluginType": "DIRECT", + "name": "My Plugin", + "language": "PYTHON27", + "manualDiscovery": true, + "hostTypes": [ + "UNIX" + ], + "version": "0.1.0", + "entryPoint": "plugin_runner:plugin", + "srcDir": "src", + "prettyName": "My Plugin", + "schemaFile": "schema.json" +} + +Error: 'id' is a required property on [] +{ + "pluginType": "DIRECT", + "name": "My Plugin", + "language": "PYTHON27", + "manualDiscovery": true, + "hostTypes": [ + "UNIX" + ], + "version": "0.1.0", + "entryPoint": "plugin_runner:plugin", + "srcDir": "src", + "prettyName": "My Plugin", + "schemaFile": "schema.json" +} +Validation failed on plugin_config.yml. +0 Warning(s). 2 Error(s) + +BUILD FAILED. +``` + +### How to fix it +Rename the [Plugin Config](/References/Plugin_Config) fields. Make sure that the `id` is a UUID of the format `[0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12}`. A UUID can be generated manually using an online generator or via Python: + +```bash +$ python +>>> import uuid +>>> uuid.uuid4() +UUID('4174f1b8-45df-43cc-8e4c-21d309c17861') +``` + +* Previous releases + +``` +name: my_plugin +prettyName: My Plugin +version: 0.1.0 +language: PYTHON27 +hostTypes: +- UNIX +pluginType: DIRECT +manualDiscovery: true +entryPoint: plugin_runner:plugin +srcDir: src +schemaFile: schema.json +``` + +* 1.0.0 + +``` +id: 4174f1b8-45df-43cc-8e4c-21d309c17861 +name: My Plugin +version: 0.1.0 +language: PYTHON27 +hostTypes: +- UNIX +pluginType: DIRECT +manualDiscovery: true +entryPoint: plugin_runner:plugin +srcDir: src +schemaFile: schema.json +``` \ No newline at end of file diff --git a/docs/docs/Release_Notes/2.0.0/.pages b/docs/docs/Release_Notes/2.0.0/.pages new file mode 100644 index 00000000..1034b1d4 --- /dev/null +++ b/docs/docs/Release_Notes/2.0.0/.pages @@ -0,0 +1,3 @@ +arrange: + - 2.0.0.md + - 2.0.0_Breaking_Changes.md diff --git a/docs/docs/Release_Notes/2.0.0/2.0.0.md b/docs/docs/Release_Notes/2.0.0/2.0.0.md new file mode 100644 index 00000000..68e80920 --- /dev/null +++ b/docs/docs/Release_Notes/2.0.0/2.0.0.md @@ -0,0 +1,29 @@ +# Release - GA (v2.0.0) + +To install or upgrade the SDK, refer to instructions [here](/Getting_Started.md#installation). + +## New & Improved + +* Added the ability for plugins to upgrade across plugin versions with schema changes. Some hightlights: + * Schema updates using data migrations. + * Flexiblity for plugins to pick any release strategy. + * Plugin upgrades supported across multiple plugin versions. + * Zero dSource and VDB downtime during plugin upgrade. + + More details about Plugin Upgrade can be found [here](/Versioning_And_Upgrade/Upgrade.md). + +* Added a new field `externalVersion` to the [Plugin Config](/References/Plugin_Config.md) that allows plugins to display an end-user friendly version. More details [here](/Versioning_And_Upgrade/Versioning.md#external-version). +* Added a new option to [init](/References/CLI.md#init) to select a host type for the plugin (`Unix` or `Windows`) to make it easier to get started with plugins that support either host platform. +* Added a new option to [upload](/References/CLI.md#upload) to block and wait for the upload job to finish on the Delphix Engine before the command returns. + +## Breaking Changes + +* The following field in the [Plugin Config](/References/Plugin_Config.md) was renamed: + + | Previous | Updated | + | -------- | ------- | + | `version` | `buidNumber` | + + Additionally `buildNumber` has to conform to the format described [here](/Versioning_And_Upgrade/Versioning.md#build-number-format-rules). + + [**Detailed steps to detect and make changes.**](/Release_Notes/2.0.0/2.0.0_Breaking_Changes.md#plugin-config-field-renamed) \ No newline at end of file diff --git a/docs/docs/Release_Notes/2.0.0/2.0.0_Breaking_Changes.md b/docs/docs/Release_Notes/2.0.0/2.0.0_Breaking_Changes.md new file mode 100644 index 00000000..ec296750 --- /dev/null +++ b/docs/docs/Release_Notes/2.0.0/2.0.0_Breaking_Changes.md @@ -0,0 +1,70 @@ +# Breaking Changes - GA (v.2.0.0) + +## Plugin Config Field Renamed +The following field in the [Plugin Config](/References/Plugin_Config/) were replaced: + +| Previous | Updated | +| -------- | ------- | +| `version` | `buildNumber` | + +Additionally, the `buildNumber` must be a string that conforms to the following rules: + +* The string must be composed of a sequence of non-negative integers, not all zero, separated by periods. +* Trailing zeros are ignored. So, "1.0.0" is treated the same as "1". +* Build numbers are sortable numerically, with earlier numbers having more significance than later numbers. So, "2.0" comes after "1.99999", and "1.10" comes after "1.2". +* The Delphix Engine will never allow installation of plugin with a build number that is ordered before the the already-installed build number. + + +More details about the format are [here](/Versioning_And_Upgrade/Versioning.md#build-number-format-rules). + +### What is affected +All plugins built with v1.0.0 or below will be affected. The [Plugin Config](/References/Plugin_Config) field `version` will have to be updated to `buildNumber`. + +### How does it fail +[dvp build](/References/CLI.md#build) will fail with the following error message if the [Plugin Config](/References/Plugin_Config) `version` field is not updated to `buildNumber`: + +```bash +$ dvp build +Error: Additional properties are not allowed ('version' was unexpected) on ['additionalProperties'] +Error: 'buildNumber' is a required property on ['required'] + +Validation failed on /private/var/tmp/fp/plugin_config.yml. +0 Warning(s). 2 Error(s) + +BUILD FAILED. +``` + +### How to fix it +Rename the [Plugin Config](/References/Plugin_Config) `version` field to `buildNumber`. Make sure that the `buildNumber ` conforms to the format described [here](/Versioning_And_Upgrade/Versioning.md#build-number-format-rules). + +* Previous releases + +``` +id: 4174f1b8-45df-43cc-8e4c-21d309c17861 +name: My Plugin +version: 1.0.0 +language: PYTHON27 +hostTypes: +- UNIX +pluginType: DIRECT +manualDiscovery: true +entryPoint: plugin_runner:plugin +srcDir: src +schemaFile: schema.json +``` + +* 2.0.0 + +``` +id: 4174f1b8-45df-43cc-8e4c-21d309c17861 +name: My Plugin +buildNumber: 1.0.0 +language: PYTHON27 +hostTypes: +- UNIX +pluginType: DIRECT +manualDiscovery: true +entryPoint: plugin_runner:plugin +srcDir: src +schemaFile: schema.json +``` \ No newline at end of file diff --git a/docs/docs/Versioning_And_Upgrade/.pages b/docs/docs/Versioning_And_Upgrade/.pages new file mode 100644 index 00000000..c8648003 --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/.pages @@ -0,0 +1,7 @@ +arrange: + - Overview.md + - Versioning.md + - Upgrade.md + - Compatibility.md + - Backports_And_Hotfixes.md + - Replication.md diff --git a/docs/docs/Versioning_And_Upgrade/Backports_And_Hotfixes.md b/docs/docs/Versioning_And_Upgrade/Backports_And_Hotfixes.md new file mode 100644 index 00000000..ff253ea0 --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Backports_And_Hotfixes.md @@ -0,0 +1,49 @@ +# Backports and Hotfixes + +If your plugin uses an ["enterprise-style"](/Versioning_And_Upgrade/Versioning.md#enterprise-style-release-strategy) release strategy, then you'll probably want to occasionally provide new "minor" or "patch" versions that build atop older versions. + +Code changes that are applied atop old releases are usually called "backports". Sometimes, they are also called "hotfixes", if the change is specifically created for a single user. + +These releases present a problem: although they are built atop an older code branch, they are still newer than some releases from a newer code branch. Below, we'll walk through how we prevent users from "upgrading" to a new-branch release that would be incompatible with an installed old-branch release. + +### Motivating Example +Let's take a look at an example of a possible timeline of releases. + +> **February**: The initial version of a plugin is released, with build number "1.0". This is a simple plugin that uses a simple strategy for syncing dSources. + +> **April**: A new version is released, with build number "1.1". This adds some bugfixes and adds some small optimizations to improve the performance of syncing. + +> **August**: A new version is released, with build number "2.0". This uses a completely new syncing strategy that is far more sophisticated and efficient. + +Let's assume that not all users will want to upgrade to the 2.0 release immediately. So, even months later, you expect to have a significant number of users still on version 1.0 or 1.1. + +Later, in October, a bug is found which impacts all releases. This bug is important enough that you want to fix it for **all** of your end users (not just the ones using 2.0). + +Here are the behaviors we need: + +* Our 2.0 end users should be able to get the new bugfix without giving up any of the major new features that were part of 2.0. +* Our 1.0 and 1.1 end users should be able to get the new bugfix without also needing to accept all the major new features that were part of 2.0. +* Once an end user has received the bugfix, it should be impossible to lose the bugfix in an upgrade. + +### Strategy + +You can include a [data migration](/Versioning_And_Upgrade/Upgrade.md#data-migrations) along with your bugfix. If your bugfix involves a schema change, you will have to do this anyways. If not, you can still include a data migration that simply does nothing. If a user with the bugfix attempts to "upgrade" to 2.0, the Delphix Engine will prevent it, because the 2.0 releases does not include this migration. + +You would typically follow these steps: + +* Fix the bug by applying a code change atop the 2.0 code. +* Include the new data migration in your 2.1 release. +* Separately, apply the same bugfix atop the 1.1 code. Note: depending on how code changed between 1.1 and 2.0, this 1.1-based bugfix might not contain the exact same code as we used with 2.0. +* Make another new release of the plugin, this time with build number "1.2". This release includes the 1.1-based bugfix. It also should include the new data migration. + + +This meets our requirements: + +* Our 2.0 end users can install version 2.1. This gives them the bugfix, and keeps all the features from 2.0. +* Our 1.0 and 1.1 end users can install version 1.2. This gives them the bugfix without any of the 2.0 features. +* It is impossible for a 2.1 end user to lose the bugfix, because the Delphix Engine will not allow the build number to go "backwards". So, a 2.1 end user will not be able to install versions 2.0, 1.1, or 1.0. +* It is also impossible for a 1.2 end user to lose the bugfix. + * They cannot install 1.0 or 1.1 because the build number is not allowed to decrease. + * They also cannot install 2.0. The missing data migration on 2.0 will prevent this. + +Note that a 1.2 end user can still upgrade to 2.1 at any time. This will allow them to keep the bugfix, and also take advantage of the new features that were part of 2.0. diff --git a/docs/docs/Versioning_And_Upgrade/Compatibility.md b/docs/docs/Versioning_And_Upgrade/Compatibility.md new file mode 100644 index 00000000..6b5ab42e --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Compatibility.md @@ -0,0 +1,17 @@ +# Compatibility + +Before we allow a newly-uploaded plugin to replace an already-installed plugin, we have to make sure that it will not cause any problems. + +For example: + +* The newly-uploaded plugin must be able to accept any existing data that has been written using the already-installed plugin. +* The user should not unexpectedly lose any features or bug fixes that are present in the already-installed plugin. + +These restrictions are enforced by the Delphix Engine, and sometimes, the plugin itself. + +## Delphix Engine Rules + +The Delphix Engine will enforce these rules before a newly-uploded plugin is allowed to be installed: + +* The [build number](/Versioning_And_Upgrade/Versioning.md#build-number) may only move forward, not backwards. +* All [data migration IDs](/References/Glossary.md#data-migration-id) that are present in the already-installed plugin must also be present on the newly-uploaded plugin. The newly-uploaded plugin may add more data migrations, of course. diff --git a/docs/docs/Versioning_And_Upgrade/Overview.md b/docs/docs/Versioning_And_Upgrade/Overview.md new file mode 100644 index 00000000..2193e02b --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Overview.md @@ -0,0 +1,11 @@ +# Overview + +Once you start writing and releasing your plugin, you’ll reach a point when bug fixes or new features may require schema changes. The plugin upgrade process enables objects that have been created with a prior schema to be migrated to the newly defined schema. When this happens, a new version of the plugin must be created. The following few pages will walk through how versions need to change between upgrades and what needs to be written in the plugin to make sure upgrade is successful. + +## Plugin Versioning + +Like any other piece of software, plugins change over time. Every so often, there will be a new release. To keep track of the different releases, each plugin release has its own versioning information. Depending on what changes are included in a particular release, there are different rules and recommendations for how the versioning information should be changed. More information on versioning is located [here](Versioning.md). + +## Upgrade + +Upgrade is the process by which an older version of a plugin is replaced by a newer version. Depending on what has changed between the two versions, this process may also include modifying pre-existing plugin defined objects so they conform to the new schema expected by the new version of the plugin. Information on the upgrade process can be found [here](Upgrade.md). diff --git a/docs/docs/Versioning_And_Upgrade/Replication.md b/docs/docs/Versioning_And_Upgrade/Replication.md new file mode 100644 index 00000000..1814680e --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Replication.md @@ -0,0 +1,21 @@ +# Replication +A Delphix Engine (source) can be setup to replicate data objects to another Delphix Engine (target). Plugins built using the Virtualization SDK work seamlessly with Delphix Engine replication with no additional development required from plugin developers. + +Only a single version of a plugin can be active on a Delphix Engine at a time. We discuss some basic scenarios below. For more detailed information refer to the [Delphix Engine Documentation](https://docs.delphix.com/docs/). + +## Replica Provisioning +Replicated dSource or VDB snapshots can be used to provision new VDBs onto a target Delphix Engine, without failing over any of the objects. When provisioning a VDB from a replicated snapshot: + +* A version of the plugin has to be installed on the target Delphix Engine. +* The versions of the plugins installed on the source and target Delphix Engines have to be [compatible](/Versioning_And_Upgrade/Compatibility.md). + +Once provisioned, the VDB on the target Delphix Engine will be associated with the version of the plugin installed on the target Delphix Engine, any required data migrations will be run as part of the provisioning process. For more details refer to the [Delphix Engine Documentation](https://docs.delphix.com/docs/). + +## Replication Failover +On failover, there are three scenarios for each plugin: + +| Scenario | Outcome +| -------- | ------- +Source plugin **not installed** on target Delphix Engine | The plugin will be failed over and marked as `active` on the target Delphix Engine. +Source plugin version **is equal to** the target plugin version | The plugin from the source will be merged with the plugin on the target Delphix Engine. +Source plugin version **is not equal to** the target plugin version | The plugin from the source will be marked `inactive` on the target Delphix Engine. An `inactive` plugin can be subsequently activated, after failover, if it is [compatible](/Versioning_And_Upgrade/Compatibility.md) with the existing `active` plugin. Activating a plugin will do an upgrade and merge the `inactive` plugin, and all its associated objects, with the `active` plugin. For more details refer to the [Delphix Engine Documentation](https://docs.delphix.com/docs/). \ No newline at end of file diff --git a/docs/docs/Versioning_And_Upgrade/Upgrade.md b/docs/docs/Versioning_And_Upgrade/Upgrade.md new file mode 100644 index 00000000..18cdc9c2 --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Upgrade.md @@ -0,0 +1,302 @@ +# Upgrade +Upgrade is the process of moving from an older version of a plugin to a newer version. +Upgrading is not as simple as just replacing the installed plugin with a newer one. The main complication comes when the new plugin version makes changes to its [schemas](/References/Glossary.md#schema). + +Consider the case of a plugin that works with collections of text files -- the user points it to a directory tree containing text files, and the plugin syncs the files from there. + +The first release of such a plugin might have no link-related user options. So the plugin's linked source schema might define no properties at all: + +```json +"linkedSourceDefinition": { + "type": "object", + "additionalProperties" : false, + "properties" : { + } +} +``` + +And, the syncing code is very simple: +```python +@plugin.linked.pre_snapshot() +def linked_pre_snapshot(direct_source, repository, source_config): + libs.run_sync( + remote_connection = direct_source.connection, + source_directory = source_config.path + ) +``` + + +But, later, some users request a new feature -- they want to avoid syncing any backup or hidden files. So, a new plugin version is released. This time, there is a new boolean property in the linked source schema where users can elect to skip these files, if desired. +```json +"linkedSourceDefinition": { + "type": "object", + "additionalProperties" : false, + "required": ["skipHiddenAndBackup"], + "properties" : { + "skipHiddenAndBackup": { "type": "boolean" } + } +} +``` + +The plugin code that handles the syncing can now pay attention to this new boolean property: +```python +_HIDDEN_AND_BACKUP_SPECS = [ + "*.bak", + "*~", # Backup files from certain editors + ".*" # Unix-style hidden files +] + +@plugin.linked.pre_snapshot() +def linked_pre_snapshot(direct_source, repository, source_config): + exclude_spec = _HIDDEN_AND_BACKUP_SPECS if direct_source.parameters.skip_hidden_and_backup else [] + + libs.run_sync( + remote_connection = direct_source.connection, + source_directory = source_config.path, + exclude_paths = exclude_spec + ) +``` + +Suppose a user has an engine with linked sources created by the older version of this plugin. That is, the existing linked sources have no `skipHiddenAndBackup` property. + +If the user installs the new version of the plugin, we have a problem! The above `pre_snapshot` code from the new plugin will attempt to access the `skip_hidden_and_backup` property, which we've just seen will not exist! + +The solution to this problem is to use [data migrations](/References/Glossary.md#data-migration), explained below. + +!!! info "Zero dSource and VDB downtime during plugin upgrade" + dSources and VDBs do not need to be disabled before a plugin upgrade is initiated. End users can continue to access data from existing VDBs during a plugin upgrade. However, while a particular plugin is in the process of being upgraded, no administrative Delphix Engine operations like: VDB Refresh, VDB Provision, dSource Disable/Enable etc will be allowed on the objects associated with that plugin. Objects associated with other plugins will not be affected. + +## Data Migrations + +### What is a Data Migration? + +Whenever a new version of a plugin is installed on a Delphix Engine, the engine needs to migrate pre-existing data from its old format (as specified by the schemas in the old version of the plugin), to its new format (as specified by the schemas in the new version of the plugin). + +A [data migration](/References/Glossary.md#data-migration) is a function that is responsible for doing this conversion. It is provided by the plugin. + +Thus, when the new plugin version is installed, the engine will call all applicable data migrations provided by the new plugin. This ensures that all data is always in the format expected by the new plugin. + +### A Simple Example + +Let's go back to the above example of the plugin that adds a new boolean option to allow users to avoid syncing backup and hidden files. Here is a data migration that the new plugin can provide to handle the data format change: + +```python +@plugin.upgrade.linked_source("2019.11.20") +def add_skip_option(old_linked_source): + return { + "skipHiddenAndBackup": false + } +``` + +The exact rules for data migrations are covered in detail [below](Upgrade.md#rules-for-data-migrations). Here, we'll just walk through this code line by line and make some observations. + +```python +@plugin.upgrade.linked_source("2019.11.20") +``` +The above line is a [decorator](/References/Glossary.md#decorator) that identifies the following function as a data migration. This particular migration will handle linked sources. It is given an ID of `2019.11.20` -- this controls when this migration is run in relation to other data migrations. + +```python +def add_skip_option(old_linked_source): +``` + +Note that the data migration takes an argument representing the old-format data. In this simple example, we know that there are no properties in the old-format data, so we can just ignore it. + +```python + return { + "skipHiddenAndBackup": false + } +``` + +Here, we are returning a Python dictionary representing the new format of the data. In this example, the dictionary has only one field: `skipHiddenAndBackup`. Because the old version of the plugin had no ability to skip files, we default this property to `false` to match the new schema. + + +### Rules for Data Migrations + +As shown above, the a data migration receives old-format input and produces new-format output. The rules and recommendations for data migrations follow: + +#### Rules + +* Input and output are Python dictionaries, with properties named exactly as specified in the schemas. Note that this differs from other plugin operations, where the inputs are defined with autogenerated Python [classes](/References/Schemas_and_Autogenerated_Classes.md), and whose properties use Python-style naming. + +* Each data migration must be tagged with an ID string. This string must consist of one or more positive integers separated by periods. + +* Data migration IDs must be numerically unique. Note that `"1.2"`, `"01.02"`, and "`1.2.0.0.0"` are all considered to be identical. + +* Once released, a data migration must never be deleted. An attempted upgrade will fail if the already-installed plugin version has a data migration that does not appear in the to-be-installed version. + +* At upgrade time, the engine will find the set of new migrations provided by the new version that are not already part of the already-installed version. Each of these migrations will then be run, in the order specified below. + +* After running all applicable migrations, the engine will confirm that the resultant data conforms to the new version's schemas. If not, the upgrade will fail. + +* Note that there is no requirement or guarantee that the input or output of any particular data migration will conform to a schema. We only guarantee that the input to the **first** data migration conforms to the schema of the already-installed plugin version. And, we only require that the output of the **final** data migration conforms to the schema of the new plugin version. + +* Data migrations are run in the order specified by their IDs. The ordering is numerical, not lexicographical. Thus `"1"` would run before `"2"`, which would run before `"10"`. + +* Data migrations have no access to [Platform Libraries](/References/Platform_Libraries.md) or remote hosts. For example: If a data migration attempts to use [run_bash](/References/Platform_Libraries.md#run_bash) the upgrade will fail. + +* Note that the above rules imply that at least one data migration is required any time a schema change is made that would invalidate any data produced using a previous version of the plugin. For example: adding a `"required"` property to the new schema. + + +#### Recommendations +* We recommend using a "Year.Month.Date" format like `"2019.11.04"` for migration IDs. You can use trailing integers as necessary (e.g. use `"2019.11.04.5"` if you need something to be run between `"2019.11.04"` and `"2019.11.05"`). + +* Even though they follow similar naming rules, migration IDs are not the same thing as plugin versions. We do not recommend using your plugin version in your migration IDs. + +* We recommend using small, single-purpose data migrations. That is, if you end up making four schema changes over the course of developing a new plugin version, we recommend writing four different data migrations, one for each change. + +### Data Migration Example + +Here is a very simple data migration. +```python +@plugin.upgrade.repository("2019.12.15") +def add_new_flag_to_repo(old_repository): + new_repository = dict(old_repository) + new_repository["useNewFeature"] = False + return new_repository +``` + +### Debugging Data Migration Problems + +During the process of upgrading to a new version, the Delphix Engine will run all applicable data migrations, and then ensure that the resulting object matches the new schema. But, what if there is a bug, and the resulting object does **not** match the schema? + +#### Security Concerns Prevent Detailed Error Messages +One problem here is that the Delphix Engine is limited in the information that it can provide in the error message. Ideally, the engine would say exactly what was wrong with the object (e.g.: "The field `port` has the value `15`, but the schema says it has to have a value between `256` and `1024`"). + +But, the Delphix Engine cannot do this for security reasons. Ordinarily, the Delphix Engine knows which fields contain sensitive information, and can redact such fields from error messages. But, the only reason the Delphix Engine has that knowledge is because the schema provides that information. If an object does +**not** conform to the schema, then the Delphix Engine can't know what is sensitive and what isn't. + +Therefore, the error message here might lack the detail necessary to debug the problem. + +#### One Solution: Temporary Logging + +During development of a new plugin version, you may find yourself trying to find and fix such a bug. +One technique is to use temporary logging. + +For example, while you are trying to locate and fix the bug, you could put a log statement at the very end of each of your data migrations, like so: +``` + logger.debug("Migration 2010.03.01 returning {}".format(new_object)) + return new_object +``` + +See the [Logging](/References/Logging.md) section for more information about logging works. + +From the logs, you'll be able to see exactly what each migration is returning. From there, hopefully the problem will become apparent. As a supplemental tool, consider pasting these results (along with your schema) into an online JSON validator for more information. + +!!! warning + It is **very important** that you only use logging as a temporary debugging strategy. **Such logging must be removed before you release the plugin to end users**. If this logging ends up in your end product, it could cause a serious security concern. Please see our [sensitive data best practices](/Best_Practices/Sensitive_Data.md) for more information. + +### When Data Migrations Are Insufficient + +New versions of plugins often require some modification of data that was written using an older version of the same plugin. Data migrations handle this modification. Unfortunately, data migrations cannot always fully handle all possible upgrade scenarios by themselves. + +For example, a new plugin version might want to add a new required field to one of its schemas. But, the correct value for this new field might not be knowable while the upgrade is underway -- perhaps it must be entered by the user, or perhaps it would require automatic discovery to be rerun. + +Such a situation will require some user intervention after the upgrade. + +In all cases, of course you will want to **clearly document** to your users that there will extra work required so they can make sure they known what they are getting into before they decide to upgrade. + +!!! tip + It should also be said that you should try to avoid cases like this. As much as possible, try to make your post-upgrade plugin function with no user intervention. Only resort to user intervention as a last resort. + +The recommended strategy here is to arrange for the affected objects to be in an "invalid" state, and for your plugin code to detect this state, and throw errors when the objects are used. + +For such a situation, we recommend the following process: + +* Make your schema changes so that the affected property can be set in such a way that plugin code can identify it as being invalid. Typically this is done by allowing for some "sentinel" value. This may require you to have a less-strict schema definition than you might otherwise want. +* In your data migrations, make sure the affected properties are indeed marked invalid. +* In any plugin code that needs to use these properties, first check them for validity. If they are invalid, then raise an error that explains the situation to the user, and tells them what steps they need to take. + +Following are two examples of schema changes that need extra user intervention after upgrade. One will require a rediscovery, and the other will require the user to enter information. + +#### Autodiscovery Example + +Suppose that a new plugin version adds a new required field to its repository schema. This new field specifies a full path to a database installation. The following listing shows what we'd ideally like the new repository schema to look like (`installationPath` is the new required property) + +``` +"repositoryDefinition": { + "type": "object", + "properties": { + "name": { "type": "string" }, + "installationPath": { "type": "string", "format": "unixpath"} + }, + "required": ["name", "installationPath"], + "nameField": "name", + "identityFields": ["name"] +} +``` + +The new plugin's autodiscovery code will know how to find this full path. Therefore, any repositories that are discovered (or rediscovered) after the upgrade will have this path filled in correctly. + +But, there may be repositories that were discovered before the upgrade. The data migrations will have to ensure that *some* value is provided for this new field. However, a data migration will not be able to determine what the correct final value is. + +One way to handle this is to modify the schema to allow a special value to indicate that the object needs to be rediscovered. In this example, we'll change the schema from the ideal version above, removing the `unixpath` constraint on this string: +``` +"installationPath": { "type": "string" } +``` + +Now, our data migration can set this property to some special sentinel value that will never be mistaken for an actual installation path. +``` +_REDISCOVERY_TOKEN = "###_REPOSITORY_NEEDS_REDISCOVERY_###" + +@plugin.upgrade.repository("2020.02.04.01") +def repo_path(old_repository): + # We need to add in a repository path, but there is no way for us to know + # what the correct path is here, so we cannot set this to anything useful. + # Instead, we'll set a special sentinel value that will indicate that the + # repository is unusable until the remote host is rediscovered. + old_repository["installationPath"] = _REDISCOVERY_TOKEN + return old_repository +``` + +Now, wherever the plugin needs to use this path, we'll need to check for this sentinel value, and error out if we find it. For example, we might need a valid path during the `configure` operation: +``` +@plugin.virtual.configure() +def configure(virtual_source, snapshot, repository): + if repository.installation_path == _REDISCOVERY_TOKEN: + # We cannot use this repository as/is -- it must be rediscovered. + msg = 'Unable to use repository "{}" because it has not been updated ' \ + 'since upgrade. Please re-run discovery and try again' + raise UserError(msg.format(repository.name)) + + # ... actual configure code goes here +``` + +#### Manual Entry + +Above, we looked at an example where the plugin could handle filling in new values for a new field at discovery time, so the user was simply asked to rediscover. + +Sometimes, though, users themselves will have to be the ones to supply new values. + +Suppose that a new plugin version wants to add a required field to the `virtualSource` object. This new property will tell which port the database should be accessible on. Ideally, we might want our new field to look like this: + +``` +"port": {"type": "integer", "minimum": 1024, "maximum": 65535} +``` + +Again, however, the data migration will not know which value is correct here. This is something the user must decide. Still, the data migration must provide *some* value. As before, we'll change the schema a bit from what would be ideal: + +``` +"port": {"type": "integer", "minimum": 0, "maximum": 65535} +``` + +Now, our data migration can use the value `0` as code for "this VDB needs user intervention". + +``` +@plugin.upgrade.virtual_source("2020.02.04.02") +def add_dummy_port(old_virtual_source): + # Set the "port" property to 0 to act as a placeholder. + old_virtual_source["port"] = 0 + return old_virtual_source +``` + +As with the previous example, our plugin code will need to look for this special value, and raise an error so that the user knows what to do. This example shows the [Virtual Source Reconfigure](/References/Plugin_Operations.md#virtual-source-reconfigure) operation, but of course, similar code will be needed anywhere else that the new `port` property is required. + +``` +@plugin.virtual.reconfigure() +def virtual_reconfigure(virtual_source, repository, source_config, snapshot): + if virtual_source.parameters.port == 0: + raise UserError('VDB "{}" cannot function properly. Please choose a ' \ + 'port number for this VDB to use.'.format(virtual_source.parameters.name)) + + # ... actual reconfigure code goes here +``` diff --git a/docs/docs/Versioning_And_Upgrade/Versioning.md b/docs/docs/Versioning_And_Upgrade/Versioning.md new file mode 100644 index 00000000..4f8e75db --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Versioning.md @@ -0,0 +1,82 @@ +# Versioning + +Almost all software products are periodically updated to include new features and bug fixes. Plugins are no exception -- a plugin's code will very likely be different two years from now. + +To deal with this, plugins use **versioning**. This just means that a plugin communicates (to the user, and to the Delphix Engine) exactly what code is in use. + +## Versioning Information + +There are three different pieces of version-related information, each used for different purposes. + +### External Version + +This field is intended only for use by the end user. The Delphix Engine does not use this field, and therefore imposes no restrictions on its content. This is a free-form string which the plugin can use in any way it feels like. + +Examples might be "5.3.0", "2012B", "MyPlugin Millennium Edition, Service Pack 3", "Playful Platypus" or "Salton City". + +The external version is specified using the `externalVersion` property in your [plugin config](/References/Plugin_Config.md) file. + +!!! tip + Use an external version that makes it easier for end users to determine newer vs older plugins. + +### Build Number + +Unlike "external version", this field is intended to convey information to the Delphix Engine. This is a string of integers, separated by periods. Examples would be "5.3.0", "7", "5.3.0.0.0.157". + +The Delphix Engine uses the build number to guard against end users trying to "downgrade" their plugin to an older, incompatible version. So, if a user has build number "3.4.1" installed, then they may not install a version with a build number like "2.x.y", "3.3.y" or "3.4.0". + +The build number is specified using the `buildNumber` property in your [plugin config](/References/Plugin_Config.md) file. + +This field is required to be a string. You might need to enclose your build number in quotes in order to prevent YAML from interpreting the field as a number. Examples: + +`buildNumber` | Allowed | Details +-------- | ---- | ----------- +1 | No | YAML will interpret this as an integer. +1.2 | No | YAML will interpret this as a floating-point number. +"1" | Yes | The quotes mean this is a string. +"1.2" | Yes | The quotes mean this is a string. +1.2.3 | Yes | YAML treats this as a string, since it cannot be a number. + +#### Build Number Format Rules + +Your build number must be a string, conforming to these rules: + +* The string must be composed of a sequence of non-negative integers, not all zero, separated by periods. +* Trailing zeros are ignored. So, "1.0.0" is treated the same as "1". +* Build numbers are sortable numerically, with earlier numbers having more significance than later numbers. So, "2.0" comes after "1.99999", and "1.10" comes after "1.2". +* The Delphix Engine will never allow installation of plugin with a build number that is ordered before the the already-installed build number. + +!!! tip + You can upload a plugin with the same `buildNumber` as the installed plugin. However this should only be done while a plugin is being developed. Plugin releases for end users should never have the same `buildNumber` + +Please also see the [App-Style vs. Enterprise-Style section](#app-style-vs-enterprise-style) below. We generally recommend using a single integer build number for app-style development. Build numbers need to have multiple parts if you are doing enterprise-style development. + +## Release Strategies + +There are two main strategies for releasing software: + +#### "App-style" Release Strategy +Here, all users are expected to use the latest available version of the software. Most consumer software works this way today -- websites, phone apps, etc. An app-style strategy is much simpler, but also more limiting: + +* At any time, there is only one branch under active development. +* Customers that want bugfixes must upgrade to the latest version. +* The plugin's build number can be a simple integer that is incremented with each new release. + +### "Enterprise-style" Release Strategy +Here, you might distinguish "major" releases of your software from "minor" releases. You might expect some customers to continue to use older major releases for a long time, even after a new major release comes out. This strategy is often used for software like operating systems and DBMSs, where upgrading can cause significant disruption. An enterprise-style strategy is more flexible, but also more cumbersome: + +* There may be multiple branches under active development at any time. Typically one branch for every "major release" that is still being supported. This requires careful coordination to make sure that each new code change ends up on the correct branch (or branches). +* It is possible to supply bugfix-only minor releases (often called "patch releases") which build atop older major releases. Customers do not need to move to the new major version in order to get these bugfixes. +* The plugin's build number needs to be composed of multiple integers. + +If you are using this strategy read more [here](#Backports_And_Hotfixes.md) about how to deal with backports and hotfixes. + +You may use whichever of these strategies works best for you. The SDK and the Delphix Engine support either strategy. You can even change your mind later and switch to the other strategy. + +## Recommendations + +* Build your plugin with the newest Virtualization SDK version available. +* Only publish one artifact built for a given official version of the plugin. +* The official release of a plugin should not use the same build number as a development build. +* Use an [external version](#external-version) that helps easily identify newer plugins. +* Publish a plugin version compatibility matrix which lists out the plugin version, the Virtualization SDK it was built with and the Delphix Engine version(s) it supports. diff --git a/docs/docs/images/.DS_Store b/docs/docs/images/.DS_Store new file mode 100644 index 00000000..5008ddfc Binary files /dev/null and b/docs/docs/images/.DS_Store differ diff --git a/docs/docs/images/delphix-logo-white.png b/docs/docs/images/delphix-logo-white.png new file mode 100644 index 00000000..80102867 Binary files /dev/null and b/docs/docs/images/delphix-logo-white.png differ diff --git a/docs/docs/images/logo.png b/docs/docs/images/logo.png new file mode 100755 index 00000000..b65b41f1 Binary files /dev/null and b/docs/docs/images/logo.png differ diff --git a/docs/docs/index.md b/docs/docs/index.md new file mode 100644 index 00000000..88f71516 --- /dev/null +++ b/docs/docs/index.md @@ -0,0 +1,43 @@ +# Welcome! + +With this Delphix Virtualization SDK documentation we hope you will find all you need to know in order to develop your own plugins! + +## Overview + +If you already know about plugins, and are looking for something specific, use the links to the left to find what you are looking for or search. + +If this is your first time here, and you are wondering what developing a Delphix plugin will do for you—read on! + + +## What Does a Delphix Plugin do? + +The Delphix Engine is an appliance that lets you quickly and cheaply make **virtual copies** of large datasets. The engine has built-in support for interfacing with certain types of datasets, such as Oracle, SQL Server and ASE. + +When you develop a plugin, you enable end users to use your dataset type as if they were using a built-in dataset type, whether it’s MongoDB, Cassandra, or something else. Your plugin will extend the Delphix Engine’s capabilities by teaching it how to run essential virtual data operations on your datasets: + + - How to stop and start them + - Where to store their data + - How to make virtual copies + +These plugin operations are the building blocks of the Delphix Engine. From these building blocks, the engine can provide all of the normal Delphix functionality to the datasets you connect to such as: + + - Provisioning + - Refreshing + - Rewinding + - Replication + - Syncing + + +## Where to Start + +Read through the first few sections of this documentation, and we will walk you through how to get setup for development, then how to develop, build, and deploy your first plugin. + +[Getting Started](Getting_Started.md) will show you how to setup the SDK. When you finish this section, you will have a full plugin development environment, and you will be ready to start building plugins. + +[Building Your First Plugin](/Building_Your_First_Plugin/Overview.md) will walk you step-by-step through the process of developing a very simple plugin. With it, you will learn the concepts and techniques that you will need to develop fully-fledged plugins. That does not mean this first plugin is useless—you will be able to virtualize simple datasets with it. + +Once you complete these sections, use the rest of the documentation whenever you would like. + +## Questions? + +If you have questions, bugs or feature requests reach out to us via the [Virtualization SDK GitHub repository](https://github.com/delphix/virtualization-sdk/). \ No newline at end of file diff --git a/docs/docs/media/image1.png b/docs/docs/media/image1.png new file mode 100644 index 00000000..c98ad1a7 Binary files /dev/null and b/docs/docs/media/image1.png differ diff --git a/docs/docs/media/image2.png b/docs/docs/media/image2.png new file mode 100644 index 00000000..997ec3be Binary files /dev/null and b/docs/docs/media/image2.png differ diff --git a/docs/docs/media/image3.png b/docs/docs/media/image3.png new file mode 100644 index 00000000..8588eeb8 Binary files /dev/null and b/docs/docs/media/image3.png differ diff --git a/docs/docs/media/image4.png b/docs/docs/media/image4.png new file mode 100644 index 00000000..49846ba3 Binary files /dev/null and b/docs/docs/media/image4.png differ diff --git a/docs/docs/media/image5.png b/docs/docs/media/image5.png new file mode 100644 index 00000000..f23a599b Binary files /dev/null and b/docs/docs/media/image5.png differ diff --git a/docs/docs/stylesheets/extra.css b/docs/docs/stylesheets/extra.css new file mode 100644 index 00000000..f10ad2d5 --- /dev/null +++ b/docs/docs/stylesheets/extra.css @@ -0,0 +1,9 @@ +select { + appearance: none; + -webkit-appearance: none; + -moz-appearance: none; + border: none; + /* needed for Firefox: */ + overflow:hidden; + width: 120%; +} diff --git a/docs/material/.DS_Store b/docs/material/.DS_Store new file mode 100644 index 00000000..fc4645d0 Binary files /dev/null and b/docs/material/.DS_Store differ diff --git a/docs/material/404.html b/docs/material/404.html new file mode 100644 index 00000000..52beb3b8 --- /dev/null +++ b/docs/material/404.html @@ -0,0 +1,4 @@ +{% extends "base.html" %} +{% block content %} +

404 - Not found

+{% endblock %} diff --git a/docs/material/__init__.py b/docs/material/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/docs/material/assets/fonts/font-awesome.css b/docs/material/assets/fonts/font-awesome.css new file mode 100644 index 00000000..b476b53e --- /dev/null +++ b/docs/material/assets/fonts/font-awesome.css @@ -0,0 +1,4 @@ +/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url("specimen/FontAwesome.woff2") format("woff2"),url("specimen/FontAwesome.woff") format("woff"),url("specimen/FontAwesome.ttf") format("truetype")}.fa{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571429em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14285714em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14285714em;width:2.14285714em;top:.14285714em;text-align:center}.fa-li.fa-lg{left:-1.85714286em}.fa-border{padding:.2em .25em .15em;border:solid .08em #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa.fa-pull-left{margin-right:.3em}.fa.fa-pull-right{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left{margin-right:.3em}.fa.pull-right{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s infinite linear;animation:fa-spin 2s infinite linear}.fa-pulse{-webkit-animation:fa-spin 1s infinite steps(8);animation:fa-spin 1s infinite steps(8)}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scale(-1,1);-ms-transform:scale(-1,1);transform:scale(-1,1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scale(1,-1);-ms-transform:scale(1,-1);transform:scale(1,-1)}:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270,:root .fa-flip-horizontal,:root .fa-flip-vertical{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:"\f000"}.fa-music:before{content:"\f001"}.fa-search:before{content:"\f002"}.fa-envelope-o:before{content:"\f003"}.fa-heart:before{content:"\f004"}.fa-star:before{content:"\f005"}.fa-star-o:before{content:"\f006"}.fa-user:before{content:"\f007"}.fa-film:before{content:"\f008"}.fa-th-large:before{content:"\f009"}.fa-th:before{content:"\f00a"}.fa-th-list:before{content:"\f00b"}.fa-check:before{content:"\f00c"}.fa-remove:before,.fa-close:before,.fa-times:before{content:"\f00d"}.fa-search-plus:before{content:"\f00e"}.fa-search-minus:before{content:"\f010"}.fa-power-off:before{content:"\f011"}.fa-signal:before{content:"\f012"}.fa-gear:before,.fa-cog:before{content:"\f013"}.fa-trash-o:before{content:"\f014"}.fa-home:before{content:"\f015"}.fa-file-o:before{content:"\f016"}.fa-clock-o:before{content:"\f017"}.fa-road:before{content:"\f018"}.fa-download:before{content:"\f019"}.fa-arrow-circle-o-down:before{content:"\f01a"}.fa-arrow-circle-o-up:before{content:"\f01b"}.fa-inbox:before{content:"\f01c"}.fa-play-circle-o:before{content:"\f01d"}.fa-rotate-right:before,.fa-repeat:before{content:"\f01e"}.fa-refresh:before{content:"\f021"}.fa-list-alt:before{content:"\f022"}.fa-lock:before{content:"\f023"}.fa-flag:before{content:"\f024"}.fa-headphones:before{content:"\f025"}.fa-volume-off:before{content:"\f026"}.fa-volume-down:before{content:"\f027"}.fa-volume-up:before{content:"\f028"}.fa-qrcode:before{content:"\f029"}.fa-barcode:before{content:"\f02a"}.fa-tag:before{content:"\f02b"}.fa-tags:before{content:"\f02c"}.fa-book:before{content:"\f02d"}.fa-bookmark:before{content:"\f02e"}.fa-print:before{content:"\f02f"}.fa-camera:before{content:"\f030"}.fa-font:before{content:"\f031"}.fa-bold:before{content:"\f032"}.fa-italic:before{content:"\f033"}.fa-text-height:before{content:"\f034"}.fa-text-width:before{content:"\f035"}.fa-align-left:before{content:"\f036"}.fa-align-center:before{content:"\f037"}.fa-align-right:before{content:"\f038"}.fa-align-justify:before{content:"\f039"}.fa-list:before{content:"\f03a"}.fa-dedent:before,.fa-outdent:before{content:"\f03b"}.fa-indent:before{content:"\f03c"}.fa-video-camera:before{content:"\f03d"}.fa-photo:before,.fa-image:before,.fa-picture-o:before{content:"\f03e"}.fa-pencil:before{content:"\f040"}.fa-map-marker:before{content:"\f041"}.fa-adjust:before{content:"\f042"}.fa-tint:before{content:"\f043"}.fa-edit:before,.fa-pencil-square-o:before{content:"\f044"}.fa-share-square-o:before{content:"\f045"}.fa-check-square-o:before{content:"\f046"}.fa-arrows:before{content:"\f047"}.fa-step-backward:before{content:"\f048"}.fa-fast-backward:before{content:"\f049"}.fa-backward:before{content:"\f04a"}.fa-play:before{content:"\f04b"}.fa-pause:before{content:"\f04c"}.fa-stop:before{content:"\f04d"}.fa-forward:before{content:"\f04e"}.fa-fast-forward:before{content:"\f050"}.fa-step-forward:before{content:"\f051"}.fa-eject:before{content:"\f052"}.fa-chevron-left:before{content:"\f053"}.fa-chevron-right:before{content:"\f054"}.fa-plus-circle:before{content:"\f055"}.fa-minus-circle:before{content:"\f056"}.fa-times-circle:before{content:"\f057"}.fa-check-circle:before{content:"\f058"}.fa-question-circle:before{content:"\f059"}.fa-info-circle:before{content:"\f05a"}.fa-crosshairs:before{content:"\f05b"}.fa-times-circle-o:before{content:"\f05c"}.fa-check-circle-o:before{content:"\f05d"}.fa-ban:before{content:"\f05e"}.fa-arrow-left:before{content:"\f060"}.fa-arrow-right:before{content:"\f061"}.fa-arrow-up:before{content:"\f062"}.fa-arrow-down:before{content:"\f063"}.fa-mail-forward:before,.fa-share:before{content:"\f064"}.fa-expand:before{content:"\f065"}.fa-compress:before{content:"\f066"}.fa-plus:before{content:"\f067"}.fa-minus:before{content:"\f068"}.fa-asterisk:before{content:"\f069"}.fa-exclamation-circle:before{content:"\f06a"}.fa-gift:before{content:"\f06b"}.fa-leaf:before{content:"\f06c"}.fa-fire:before{content:"\f06d"}.fa-eye:before{content:"\f06e"}.fa-eye-slash:before{content:"\f070"}.fa-warning:before,.fa-exclamation-triangle:before{content:"\f071"}.fa-plane:before{content:"\f072"}.fa-calendar:before{content:"\f073"}.fa-random:before{content:"\f074"}.fa-comment:before{content:"\f075"}.fa-magnet:before{content:"\f076"}.fa-chevron-up:before{content:"\f077"}.fa-chevron-down:before{content:"\f078"}.fa-retweet:before{content:"\f079"}.fa-shopping-cart:before{content:"\f07a"}.fa-folder:before{content:"\f07b"}.fa-folder-open:before{content:"\f07c"}.fa-arrows-v:before{content:"\f07d"}.fa-arrows-h:before{content:"\f07e"}.fa-bar-chart-o:before,.fa-bar-chart:before{content:"\f080"}.fa-twitter-square:before{content:"\f081"}.fa-facebook-square:before{content:"\f082"}.fa-camera-retro:before{content:"\f083"}.fa-key:before{content:"\f084"}.fa-gears:before,.fa-cogs:before{content:"\f085"}.fa-comments:before{content:"\f086"}.fa-thumbs-o-up:before{content:"\f087"}.fa-thumbs-o-down:before{content:"\f088"}.fa-star-half:before{content:"\f089"}.fa-heart-o:before{content:"\f08a"}.fa-sign-out:before{content:"\f08b"}.fa-linkedin-square:before{content:"\f08c"}.fa-thumb-tack:before{content:"\f08d"}.fa-external-link:before{content:"\f08e"}.fa-sign-in:before{content:"\f090"}.fa-trophy:before{content:"\f091"}.fa-github-square:before{content:"\f092"}.fa-upload:before{content:"\f093"}.fa-lemon-o:before{content:"\f094"}.fa-phone:before{content:"\f095"}.fa-square-o:before{content:"\f096"}.fa-bookmark-o:before{content:"\f097"}.fa-phone-square:before{content:"\f098"}.fa-twitter:before{content:"\f099"}.fa-facebook-f:before,.fa-facebook:before{content:"\f09a"}.fa-github:before{content:"\f09b"}.fa-unlock:before{content:"\f09c"}.fa-credit-card:before{content:"\f09d"}.fa-feed:before,.fa-rss:before{content:"\f09e"}.fa-hdd-o:before{content:"\f0a0"}.fa-bullhorn:before{content:"\f0a1"}.fa-bell:before{content:"\f0f3"}.fa-certificate:before{content:"\f0a3"}.fa-hand-o-right:before{content:"\f0a4"}.fa-hand-o-left:before{content:"\f0a5"}.fa-hand-o-up:before{content:"\f0a6"}.fa-hand-o-down:before{content:"\f0a7"}.fa-arrow-circle-left:before{content:"\f0a8"}.fa-arrow-circle-right:before{content:"\f0a9"}.fa-arrow-circle-up:before{content:"\f0aa"}.fa-arrow-circle-down:before{content:"\f0ab"}.fa-globe:before{content:"\f0ac"}.fa-wrench:before{content:"\f0ad"}.fa-tasks:before{content:"\f0ae"}.fa-filter:before{content:"\f0b0"}.fa-briefcase:before{content:"\f0b1"}.fa-arrows-alt:before{content:"\f0b2"}.fa-group:before,.fa-users:before{content:"\f0c0"}.fa-chain:before,.fa-link:before{content:"\f0c1"}.fa-cloud:before{content:"\f0c2"}.fa-flask:before{content:"\f0c3"}.fa-cut:before,.fa-scissors:before{content:"\f0c4"}.fa-copy:before,.fa-files-o:before{content:"\f0c5"}.fa-paperclip:before{content:"\f0c6"}.fa-save:before,.fa-floppy-o:before{content:"\f0c7"}.fa-square:before{content:"\f0c8"}.fa-navicon:before,.fa-reorder:before,.fa-bars:before{content:"\f0c9"}.fa-list-ul:before{content:"\f0ca"}.fa-list-ol:before{content:"\f0cb"}.fa-strikethrough:before{content:"\f0cc"}.fa-underline:before{content:"\f0cd"}.fa-table:before{content:"\f0ce"}.fa-magic:before{content:"\f0d0"}.fa-truck:before{content:"\f0d1"}.fa-pinterest:before{content:"\f0d2"}.fa-pinterest-square:before{content:"\f0d3"}.fa-google-plus-square:before{content:"\f0d4"}.fa-google-plus:before{content:"\f0d5"}.fa-money:before{content:"\f0d6"}.fa-caret-down:before{content:"\f0d7"}.fa-caret-up:before{content:"\f0d8"}.fa-caret-left:before{content:"\f0d9"}.fa-caret-right:before{content:"\f0da"}.fa-columns:before{content:"\f0db"}.fa-unsorted:before,.fa-sort:before{content:"\f0dc"}.fa-sort-down:before,.fa-sort-desc:before{content:"\f0dd"}.fa-sort-up:before,.fa-sort-asc:before{content:"\f0de"}.fa-envelope:before{content:"\f0e0"}.fa-linkedin:before{content:"\f0e1"}.fa-rotate-left:before,.fa-undo:before{content:"\f0e2"}.fa-legal:before,.fa-gavel:before{content:"\f0e3"}.fa-dashboard:before,.fa-tachometer:before{content:"\f0e4"}.fa-comment-o:before{content:"\f0e5"}.fa-comments-o:before{content:"\f0e6"}.fa-flash:before,.fa-bolt:before{content:"\f0e7"}.fa-sitemap:before{content:"\f0e8"}.fa-umbrella:before{content:"\f0e9"}.fa-paste:before,.fa-clipboard:before{content:"\f0ea"}.fa-lightbulb-o:before{content:"\f0eb"}.fa-exchange:before{content:"\f0ec"}.fa-cloud-download:before{content:"\f0ed"}.fa-cloud-upload:before{content:"\f0ee"}.fa-user-md:before{content:"\f0f0"}.fa-stethoscope:before{content:"\f0f1"}.fa-suitcase:before{content:"\f0f2"}.fa-bell-o:before{content:"\f0a2"}.fa-coffee:before{content:"\f0f4"}.fa-cutlery:before{content:"\f0f5"}.fa-file-text-o:before{content:"\f0f6"}.fa-building-o:before{content:"\f0f7"}.fa-hospital-o:before{content:"\f0f8"}.fa-ambulance:before{content:"\f0f9"}.fa-medkit:before{content:"\f0fa"}.fa-fighter-jet:before{content:"\f0fb"}.fa-beer:before{content:"\f0fc"}.fa-h-square:before{content:"\f0fd"}.fa-plus-square:before{content:"\f0fe"}.fa-angle-double-left:before{content:"\f100"}.fa-angle-double-right:before{content:"\f101"}.fa-angle-double-up:before{content:"\f102"}.fa-angle-double-down:before{content:"\f103"}.fa-angle-left:before{content:"\f104"}.fa-angle-right:before{content:"\f105"}.fa-angle-up:before{content:"\f106"}.fa-angle-down:before{content:"\f107"}.fa-desktop:before{content:"\f108"}.fa-laptop:before{content:"\f109"}.fa-tablet:before{content:"\f10a"}.fa-mobile-phone:before,.fa-mobile:before{content:"\f10b"}.fa-circle-o:before{content:"\f10c"}.fa-quote-left:before{content:"\f10d"}.fa-quote-right:before{content:"\f10e"}.fa-spinner:before{content:"\f110"}.fa-circle:before{content:"\f111"}.fa-mail-reply:before,.fa-reply:before{content:"\f112"}.fa-github-alt:before{content:"\f113"}.fa-folder-o:before{content:"\f114"}.fa-folder-open-o:before{content:"\f115"}.fa-smile-o:before{content:"\f118"}.fa-frown-o:before{content:"\f119"}.fa-meh-o:before{content:"\f11a"}.fa-gamepad:before{content:"\f11b"}.fa-keyboard-o:before{content:"\f11c"}.fa-flag-o:before{content:"\f11d"}.fa-flag-checkered:before{content:"\f11e"}.fa-terminal:before{content:"\f120"}.fa-code:before{content:"\f121"}.fa-mail-reply-all:before,.fa-reply-all:before{content:"\f122"}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:"\f123"}.fa-location-arrow:before{content:"\f124"}.fa-crop:before{content:"\f125"}.fa-code-fork:before{content:"\f126"}.fa-unlink:before,.fa-chain-broken:before{content:"\f127"}.fa-question:before{content:"\f128"}.fa-info:before{content:"\f129"}.fa-exclamation:before{content:"\f12a"}.fa-superscript:before{content:"\f12b"}.fa-subscript:before{content:"\f12c"}.fa-eraser:before{content:"\f12d"}.fa-puzzle-piece:before{content:"\f12e"}.fa-microphone:before{content:"\f130"}.fa-microphone-slash:before{content:"\f131"}.fa-shield:before{content:"\f132"}.fa-calendar-o:before{content:"\f133"}.fa-fire-extinguisher:before{content:"\f134"}.fa-rocket:before{content:"\f135"}.fa-maxcdn:before{content:"\f136"}.fa-chevron-circle-left:before{content:"\f137"}.fa-chevron-circle-right:before{content:"\f138"}.fa-chevron-circle-up:before{content:"\f139"}.fa-chevron-circle-down:before{content:"\f13a"}.fa-html5:before{content:"\f13b"}.fa-css3:before{content:"\f13c"}.fa-anchor:before{content:"\f13d"}.fa-unlock-alt:before{content:"\f13e"}.fa-bullseye:before{content:"\f140"}.fa-ellipsis-h:before{content:"\f141"}.fa-ellipsis-v:before{content:"\f142"}.fa-rss-square:before{content:"\f143"}.fa-play-circle:before{content:"\f144"}.fa-ticket:before{content:"\f145"}.fa-minus-square:before{content:"\f146"}.fa-minus-square-o:before{content:"\f147"}.fa-level-up:before{content:"\f148"}.fa-level-down:before{content:"\f149"}.fa-check-square:before{content:"\f14a"}.fa-pencil-square:before{content:"\f14b"}.fa-external-link-square:before{content:"\f14c"}.fa-share-square:before{content:"\f14d"}.fa-compass:before{content:"\f14e"}.fa-toggle-down:before,.fa-caret-square-o-down:before{content:"\f150"}.fa-toggle-up:before,.fa-caret-square-o-up:before{content:"\f151"}.fa-toggle-right:before,.fa-caret-square-o-right:before{content:"\f152"}.fa-euro:before,.fa-eur:before{content:"\f153"}.fa-gbp:before{content:"\f154"}.fa-dollar:before,.fa-usd:before{content:"\f155"}.fa-rupee:before,.fa-inr:before{content:"\f156"}.fa-cny:before,.fa-rmb:before,.fa-yen:before,.fa-jpy:before{content:"\f157"}.fa-ruble:before,.fa-rouble:before,.fa-rub:before{content:"\f158"}.fa-won:before,.fa-krw:before{content:"\f159"}.fa-bitcoin:before,.fa-btc:before{content:"\f15a"}.fa-file:before{content:"\f15b"}.fa-file-text:before{content:"\f15c"}.fa-sort-alpha-asc:before{content:"\f15d"}.fa-sort-alpha-desc:before{content:"\f15e"}.fa-sort-amount-asc:before{content:"\f160"}.fa-sort-amount-desc:before{content:"\f161"}.fa-sort-numeric-asc:before{content:"\f162"}.fa-sort-numeric-desc:before{content:"\f163"}.fa-thumbs-up:before{content:"\f164"}.fa-thumbs-down:before{content:"\f165"}.fa-youtube-square:before{content:"\f166"}.fa-youtube:before{content:"\f167"}.fa-xing:before{content:"\f168"}.fa-xing-square:before{content:"\f169"}.fa-youtube-play:before{content:"\f16a"}.fa-dropbox:before{content:"\f16b"}.fa-stack-overflow:before{content:"\f16c"}.fa-instagram:before{content:"\f16d"}.fa-flickr:before{content:"\f16e"}.fa-adn:before{content:"\f170"}.fa-bitbucket:before{content:"\f171"}.fa-bitbucket-square:before{content:"\f172"}.fa-tumblr:before{content:"\f173"}.fa-tumblr-square:before{content:"\f174"}.fa-long-arrow-down:before{content:"\f175"}.fa-long-arrow-up:before{content:"\f176"}.fa-long-arrow-left:before{content:"\f177"}.fa-long-arrow-right:before{content:"\f178"}.fa-apple:before{content:"\f179"}.fa-windows:before{content:"\f17a"}.fa-android:before{content:"\f17b"}.fa-linux:before{content:"\f17c"}.fa-dribbble:before{content:"\f17d"}.fa-skype:before{content:"\f17e"}.fa-foursquare:before{content:"\f180"}.fa-trello:before{content:"\f181"}.fa-female:before{content:"\f182"}.fa-male:before{content:"\f183"}.fa-gittip:before,.fa-gratipay:before{content:"\f184"}.fa-sun-o:before{content:"\f185"}.fa-moon-o:before{content:"\f186"}.fa-archive:before{content:"\f187"}.fa-bug:before{content:"\f188"}.fa-vk:before{content:"\f189"}.fa-weibo:before{content:"\f18a"}.fa-renren:before{content:"\f18b"}.fa-pagelines:before{content:"\f18c"}.fa-stack-exchange:before{content:"\f18d"}.fa-arrow-circle-o-right:before{content:"\f18e"}.fa-arrow-circle-o-left:before{content:"\f190"}.fa-toggle-left:before,.fa-caret-square-o-left:before{content:"\f191"}.fa-dot-circle-o:before{content:"\f192"}.fa-wheelchair:before{content:"\f193"}.fa-vimeo-square:before{content:"\f194"}.fa-turkish-lira:before,.fa-try:before{content:"\f195"}.fa-plus-square-o:before{content:"\f196"}.fa-space-shuttle:before{content:"\f197"}.fa-slack:before{content:"\f198"}.fa-envelope-square:before{content:"\f199"}.fa-wordpress:before{content:"\f19a"}.fa-openid:before{content:"\f19b"}.fa-institution:before,.fa-bank:before,.fa-university:before{content:"\f19c"}.fa-mortar-board:before,.fa-graduation-cap:before{content:"\f19d"}.fa-yahoo:before{content:"\f19e"}.fa-google:before{content:"\f1a0"}.fa-reddit:before{content:"\f1a1"}.fa-reddit-square:before{content:"\f1a2"}.fa-stumbleupon-circle:before{content:"\f1a3"}.fa-stumbleupon:before{content:"\f1a4"}.fa-delicious:before{content:"\f1a5"}.fa-digg:before{content:"\f1a6"}.fa-pied-piper-pp:before{content:"\f1a7"}.fa-pied-piper-alt:before{content:"\f1a8"}.fa-drupal:before{content:"\f1a9"}.fa-joomla:before{content:"\f1aa"}.fa-language:before{content:"\f1ab"}.fa-fax:before{content:"\f1ac"}.fa-building:before{content:"\f1ad"}.fa-child:before{content:"\f1ae"}.fa-paw:before{content:"\f1b0"}.fa-spoon:before{content:"\f1b1"}.fa-cube:before{content:"\f1b2"}.fa-cubes:before{content:"\f1b3"}.fa-behance:before{content:"\f1b4"}.fa-behance-square:before{content:"\f1b5"}.fa-steam:before{content:"\f1b6"}.fa-steam-square:before{content:"\f1b7"}.fa-recycle:before{content:"\f1b8"}.fa-automobile:before,.fa-car:before{content:"\f1b9"}.fa-cab:before,.fa-taxi:before{content:"\f1ba"}.fa-tree:before{content:"\f1bb"}.fa-spotify:before{content:"\f1bc"}.fa-deviantart:before{content:"\f1bd"}.fa-soundcloud:before{content:"\f1be"}.fa-database:before{content:"\f1c0"}.fa-file-pdf-o:before{content:"\f1c1"}.fa-file-word-o:before{content:"\f1c2"}.fa-file-excel-o:before{content:"\f1c3"}.fa-file-powerpoint-o:before{content:"\f1c4"}.fa-file-photo-o:before,.fa-file-picture-o:before,.fa-file-image-o:before{content:"\f1c5"}.fa-file-zip-o:before,.fa-file-archive-o:before{content:"\f1c6"}.fa-file-sound-o:before,.fa-file-audio-o:before{content:"\f1c7"}.fa-file-movie-o:before,.fa-file-video-o:before{content:"\f1c8"}.fa-file-code-o:before{content:"\f1c9"}.fa-vine:before{content:"\f1ca"}.fa-codepen:before{content:"\f1cb"}.fa-jsfiddle:before{content:"\f1cc"}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-saver:before,.fa-support:before,.fa-life-ring:before{content:"\f1cd"}.fa-circle-o-notch:before{content:"\f1ce"}.fa-ra:before,.fa-resistance:before,.fa-rebel:before{content:"\f1d0"}.fa-ge:before,.fa-empire:before{content:"\f1d1"}.fa-git-square:before{content:"\f1d2"}.fa-git:before{content:"\f1d3"}.fa-y-combinator-square:before,.fa-yc-square:before,.fa-hacker-news:before{content:"\f1d4"}.fa-tencent-weibo:before{content:"\f1d5"}.fa-qq:before{content:"\f1d6"}.fa-wechat:before,.fa-weixin:before{content:"\f1d7"}.fa-send:before,.fa-paper-plane:before{content:"\f1d8"}.fa-send-o:before,.fa-paper-plane-o:before{content:"\f1d9"}.fa-history:before{content:"\f1da"}.fa-circle-thin:before{content:"\f1db"}.fa-header:before{content:"\f1dc"}.fa-paragraph:before{content:"\f1dd"}.fa-sliders:before{content:"\f1de"}.fa-share-alt:before{content:"\f1e0"}.fa-share-alt-square:before{content:"\f1e1"}.fa-bomb:before{content:"\f1e2"}.fa-soccer-ball-o:before,.fa-futbol-o:before{content:"\f1e3"}.fa-tty:before{content:"\f1e4"}.fa-binoculars:before{content:"\f1e5"}.fa-plug:before{content:"\f1e6"}.fa-slideshare:before{content:"\f1e7"}.fa-twitch:before{content:"\f1e8"}.fa-yelp:before{content:"\f1e9"}.fa-newspaper-o:before{content:"\f1ea"}.fa-wifi:before{content:"\f1eb"}.fa-calculator:before{content:"\f1ec"}.fa-paypal:before{content:"\f1ed"}.fa-google-wallet:before{content:"\f1ee"}.fa-cc-visa:before{content:"\f1f0"}.fa-cc-mastercard:before{content:"\f1f1"}.fa-cc-discover:before{content:"\f1f2"}.fa-cc-amex:before{content:"\f1f3"}.fa-cc-paypal:before{content:"\f1f4"}.fa-cc-stripe:before{content:"\f1f5"}.fa-bell-slash:before{content:"\f1f6"}.fa-bell-slash-o:before{content:"\f1f7"}.fa-trash:before{content:"\f1f8"}.fa-copyright:before{content:"\f1f9"}.fa-at:before{content:"\f1fa"}.fa-eyedropper:before{content:"\f1fb"}.fa-paint-brush:before{content:"\f1fc"}.fa-birthday-cake:before{content:"\f1fd"}.fa-area-chart:before{content:"\f1fe"}.fa-pie-chart:before{content:"\f200"}.fa-line-chart:before{content:"\f201"}.fa-lastfm:before{content:"\f202"}.fa-lastfm-square:before{content:"\f203"}.fa-toggle-off:before{content:"\f204"}.fa-toggle-on:before{content:"\f205"}.fa-bicycle:before{content:"\f206"}.fa-bus:before{content:"\f207"}.fa-ioxhost:before{content:"\f208"}.fa-angellist:before{content:"\f209"}.fa-cc:before{content:"\f20a"}.fa-shekel:before,.fa-sheqel:before,.fa-ils:before{content:"\f20b"}.fa-meanpath:before{content:"\f20c"}.fa-buysellads:before{content:"\f20d"}.fa-connectdevelop:before{content:"\f20e"}.fa-dashcube:before{content:"\f210"}.fa-forumbee:before{content:"\f211"}.fa-leanpub:before{content:"\f212"}.fa-sellsy:before{content:"\f213"}.fa-shirtsinbulk:before{content:"\f214"}.fa-simplybuilt:before{content:"\f215"}.fa-skyatlas:before{content:"\f216"}.fa-cart-plus:before{content:"\f217"}.fa-cart-arrow-down:before{content:"\f218"}.fa-diamond:before{content:"\f219"}.fa-ship:before{content:"\f21a"}.fa-user-secret:before{content:"\f21b"}.fa-motorcycle:before{content:"\f21c"}.fa-street-view:before{content:"\f21d"}.fa-heartbeat:before{content:"\f21e"}.fa-venus:before{content:"\f221"}.fa-mars:before{content:"\f222"}.fa-mercury:before{content:"\f223"}.fa-intersex:before,.fa-transgender:before{content:"\f224"}.fa-transgender-alt:before{content:"\f225"}.fa-venus-double:before{content:"\f226"}.fa-mars-double:before{content:"\f227"}.fa-venus-mars:before{content:"\f228"}.fa-mars-stroke:before{content:"\f229"}.fa-mars-stroke-v:before{content:"\f22a"}.fa-mars-stroke-h:before{content:"\f22b"}.fa-neuter:before{content:"\f22c"}.fa-genderless:before{content:"\f22d"}.fa-facebook-official:before{content:"\f230"}.fa-pinterest-p:before{content:"\f231"}.fa-whatsapp:before{content:"\f232"}.fa-server:before{content:"\f233"}.fa-user-plus:before{content:"\f234"}.fa-user-times:before{content:"\f235"}.fa-hotel:before,.fa-bed:before{content:"\f236"}.fa-viacoin:before{content:"\f237"}.fa-train:before{content:"\f238"}.fa-subway:before{content:"\f239"}.fa-medium:before{content:"\f23a"}.fa-yc:before,.fa-y-combinator:before{content:"\f23b"}.fa-optin-monster:before{content:"\f23c"}.fa-opencart:before{content:"\f23d"}.fa-expeditedssl:before{content:"\f23e"}.fa-battery-4:before,.fa-battery:before,.fa-battery-full:before{content:"\f240"}.fa-battery-3:before,.fa-battery-three-quarters:before{content:"\f241"}.fa-battery-2:before,.fa-battery-half:before{content:"\f242"}.fa-battery-1:before,.fa-battery-quarter:before{content:"\f243"}.fa-battery-0:before,.fa-battery-empty:before{content:"\f244"}.fa-mouse-pointer:before{content:"\f245"}.fa-i-cursor:before{content:"\f246"}.fa-object-group:before{content:"\f247"}.fa-object-ungroup:before{content:"\f248"}.fa-sticky-note:before{content:"\f249"}.fa-sticky-note-o:before{content:"\f24a"}.fa-cc-jcb:before{content:"\f24b"}.fa-cc-diners-club:before{content:"\f24c"}.fa-clone:before{content:"\f24d"}.fa-balance-scale:before{content:"\f24e"}.fa-hourglass-o:before{content:"\f250"}.fa-hourglass-1:before,.fa-hourglass-start:before{content:"\f251"}.fa-hourglass-2:before,.fa-hourglass-half:before{content:"\f252"}.fa-hourglass-3:before,.fa-hourglass-end:before{content:"\f253"}.fa-hourglass:before{content:"\f254"}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:"\f255"}.fa-hand-stop-o:before,.fa-hand-paper-o:before{content:"\f256"}.fa-hand-scissors-o:before{content:"\f257"}.fa-hand-lizard-o:before{content:"\f258"}.fa-hand-spock-o:before{content:"\f259"}.fa-hand-pointer-o:before{content:"\f25a"}.fa-hand-peace-o:before{content:"\f25b"}.fa-trademark:before{content:"\f25c"}.fa-registered:before{content:"\f25d"}.fa-creative-commons:before{content:"\f25e"}.fa-gg:before{content:"\f260"}.fa-gg-circle:before{content:"\f261"}.fa-tripadvisor:before{content:"\f262"}.fa-odnoklassniki:before{content:"\f263"}.fa-odnoklassniki-square:before{content:"\f264"}.fa-get-pocket:before{content:"\f265"}.fa-wikipedia-w:before{content:"\f266"}.fa-safari:before{content:"\f267"}.fa-chrome:before{content:"\f268"}.fa-firefox:before{content:"\f269"}.fa-opera:before{content:"\f26a"}.fa-internet-explorer:before{content:"\f26b"}.fa-tv:before,.fa-television:before{content:"\f26c"}.fa-contao:before{content:"\f26d"}.fa-500px:before{content:"\f26e"}.fa-amazon:before{content:"\f270"}.fa-calendar-plus-o:before{content:"\f271"}.fa-calendar-minus-o:before{content:"\f272"}.fa-calendar-times-o:before{content:"\f273"}.fa-calendar-check-o:before{content:"\f274"}.fa-industry:before{content:"\f275"}.fa-map-pin:before{content:"\f276"}.fa-map-signs:before{content:"\f277"}.fa-map-o:before{content:"\f278"}.fa-map:before{content:"\f279"}.fa-commenting:before{content:"\f27a"}.fa-commenting-o:before{content:"\f27b"}.fa-houzz:before{content:"\f27c"}.fa-vimeo:before{content:"\f27d"}.fa-black-tie:before{content:"\f27e"}.fa-fonticons:before{content:"\f280"}.fa-reddit-alien:before{content:"\f281"}.fa-edge:before{content:"\f282"}.fa-credit-card-alt:before{content:"\f283"}.fa-codiepie:before{content:"\f284"}.fa-modx:before{content:"\f285"}.fa-fort-awesome:before{content:"\f286"}.fa-usb:before{content:"\f287"}.fa-product-hunt:before{content:"\f288"}.fa-mixcloud:before{content:"\f289"}.fa-scribd:before{content:"\f28a"}.fa-pause-circle:before{content:"\f28b"}.fa-pause-circle-o:before{content:"\f28c"}.fa-stop-circle:before{content:"\f28d"}.fa-stop-circle-o:before{content:"\f28e"}.fa-shopping-bag:before{content:"\f290"}.fa-shopping-basket:before{content:"\f291"}.fa-hashtag:before{content:"\f292"}.fa-bluetooth:before{content:"\f293"}.fa-bluetooth-b:before{content:"\f294"}.fa-percent:before{content:"\f295"}.fa-gitlab:before{content:"\f296"}.fa-wpbeginner:before{content:"\f297"}.fa-wpforms:before{content:"\f298"}.fa-envira:before{content:"\f299"}.fa-universal-access:before{content:"\f29a"}.fa-wheelchair-alt:before{content:"\f29b"}.fa-question-circle-o:before{content:"\f29c"}.fa-blind:before{content:"\f29d"}.fa-audio-description:before{content:"\f29e"}.fa-volume-control-phone:before{content:"\f2a0"}.fa-braille:before{content:"\f2a1"}.fa-assistive-listening-systems:before{content:"\f2a2"}.fa-asl-interpreting:before,.fa-american-sign-language-interpreting:before{content:"\f2a3"}.fa-deafness:before,.fa-hard-of-hearing:before,.fa-deaf:before{content:"\f2a4"}.fa-glide:before{content:"\f2a5"}.fa-glide-g:before{content:"\f2a6"}.fa-signing:before,.fa-sign-language:before{content:"\f2a7"}.fa-low-vision:before{content:"\f2a8"}.fa-viadeo:before{content:"\f2a9"}.fa-viadeo-square:before{content:"\f2aa"}.fa-snapchat:before{content:"\f2ab"}.fa-snapchat-ghost:before{content:"\f2ac"}.fa-snapchat-square:before{content:"\f2ad"}.fa-pied-piper:before{content:"\f2ae"}.fa-first-order:before{content:"\f2b0"}.fa-yoast:before{content:"\f2b1"}.fa-themeisle:before{content:"\f2b2"}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:"\f2b3"}.fa-fa:before,.fa-font-awesome:before{content:"\f2b4"}.fa-handshake-o:before{content:"\f2b5"}.fa-envelope-open:before{content:"\f2b6"}.fa-envelope-open-o:before{content:"\f2b7"}.fa-linode:before{content:"\f2b8"}.fa-address-book:before{content:"\f2b9"}.fa-address-book-o:before{content:"\f2ba"}.fa-vcard:before,.fa-address-card:before{content:"\f2bb"}.fa-vcard-o:before,.fa-address-card-o:before{content:"\f2bc"}.fa-user-circle:before{content:"\f2bd"}.fa-user-circle-o:before{content:"\f2be"}.fa-user-o:before{content:"\f2c0"}.fa-id-badge:before{content:"\f2c1"}.fa-drivers-license:before,.fa-id-card:before{content:"\f2c2"}.fa-drivers-license-o:before,.fa-id-card-o:before{content:"\f2c3"}.fa-quora:before{content:"\f2c4"}.fa-free-code-camp:before{content:"\f2c5"}.fa-telegram:before{content:"\f2c6"}.fa-thermometer-4:before,.fa-thermometer:before,.fa-thermometer-full:before{content:"\f2c7"}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:"\f2c8"}.fa-thermometer-2:before,.fa-thermometer-half:before{content:"\f2c9"}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:"\f2ca"}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:"\f2cb"}.fa-shower:before{content:"\f2cc"}.fa-bathtub:before,.fa-s15:before,.fa-bath:before{content:"\f2cd"}.fa-podcast:before{content:"\f2ce"}.fa-window-maximize:before{content:"\f2d0"}.fa-window-minimize:before{content:"\f2d1"}.fa-window-restore:before{content:"\f2d2"}.fa-times-rectangle:before,.fa-window-close:before{content:"\f2d3"}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:"\f2d4"}.fa-bandcamp:before{content:"\f2d5"}.fa-grav:before{content:"\f2d6"}.fa-etsy:before{content:"\f2d7"}.fa-imdb:before{content:"\f2d8"}.fa-ravelry:before{content:"\f2d9"}.fa-eercast:before{content:"\f2da"}.fa-microchip:before{content:"\f2db"}.fa-snowflake-o:before{content:"\f2dc"}.fa-superpowers:before{content:"\f2dd"}.fa-wpexplorer:before{content:"\f2de"}.fa-meetup:before{content:"\f2e0"}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto} \ No newline at end of file diff --git a/docs/material/assets/fonts/material-icons.css b/docs/material/assets/fonts/material-icons.css new file mode 100644 index 00000000..d23d365e --- /dev/null +++ b/docs/material/assets/fonts/material-icons.css @@ -0,0 +1,13 @@ +/*! + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy + * of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING, SOFTWARE + * DISTRIBUTED UNDER THE LICENSE IS DISTRIBUTED ON AN "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. + * SEE THE LICENSE FOR THE SPECIFIC LANGUAGE GOVERNING PERMISSIONS AND + * LIMITATIONS UNDER THE LICENSE. + */@font-face{font-family:"Material Icons";font-style:normal;font-weight:400;src:local("Material Icons"),local("MaterialIcons-Regular"),url("specimen/MaterialIcons-Regular.woff2") format("woff2"),url("specimen/MaterialIcons-Regular.woff") format("woff"),url("specimen/MaterialIcons-Regular.ttf") format("truetype")} \ No newline at end of file diff --git a/docs/material/assets/fonts/specimen/FontAwesome.ttf b/docs/material/assets/fonts/specimen/FontAwesome.ttf new file mode 100644 index 00000000..35acda2f Binary files /dev/null and b/docs/material/assets/fonts/specimen/FontAwesome.ttf differ diff --git a/docs/material/assets/fonts/specimen/FontAwesome.woff b/docs/material/assets/fonts/specimen/FontAwesome.woff new file mode 100644 index 00000000..400014a4 Binary files /dev/null and b/docs/material/assets/fonts/specimen/FontAwesome.woff differ diff --git a/docs/material/assets/fonts/specimen/FontAwesome.woff2 b/docs/material/assets/fonts/specimen/FontAwesome.woff2 new file mode 100644 index 00000000..4d13fc60 Binary files /dev/null and b/docs/material/assets/fonts/specimen/FontAwesome.woff2 differ diff --git a/docs/material/assets/fonts/specimen/MaterialIcons-Regular.ttf b/docs/material/assets/fonts/specimen/MaterialIcons-Regular.ttf new file mode 100644 index 00000000..7015564a Binary files /dev/null and b/docs/material/assets/fonts/specimen/MaterialIcons-Regular.ttf differ diff --git a/docs/material/assets/fonts/specimen/MaterialIcons-Regular.woff b/docs/material/assets/fonts/specimen/MaterialIcons-Regular.woff new file mode 100644 index 00000000..b648a3ee Binary files /dev/null and b/docs/material/assets/fonts/specimen/MaterialIcons-Regular.woff differ diff --git a/docs/material/assets/fonts/specimen/MaterialIcons-Regular.woff2 b/docs/material/assets/fonts/specimen/MaterialIcons-Regular.woff2 new file mode 100644 index 00000000..9fa21125 Binary files /dev/null and b/docs/material/assets/fonts/specimen/MaterialIcons-Regular.woff2 differ diff --git a/docs/material/assets/images/favicon.png b/docs/material/assets/images/favicon.png new file mode 100644 index 00000000..76d17f57 Binary files /dev/null and b/docs/material/assets/images/favicon.png differ diff --git a/docs/material/assets/images/icons/bitbucket.1b09e088.svg b/docs/material/assets/images/icons/bitbucket.1b09e088.svg new file mode 100644 index 00000000..a25435af --- /dev/null +++ b/docs/material/assets/images/icons/bitbucket.1b09e088.svg @@ -0,0 +1,20 @@ + + + diff --git a/docs/material/assets/images/icons/github.f0b8504a.svg b/docs/material/assets/images/icons/github.f0b8504a.svg new file mode 100644 index 00000000..c009420a --- /dev/null +++ b/docs/material/assets/images/icons/github.f0b8504a.svg @@ -0,0 +1,18 @@ + + + diff --git a/docs/material/assets/images/icons/gitlab.6dd19c00.svg b/docs/material/assets/images/icons/gitlab.6dd19c00.svg new file mode 100644 index 00000000..9e3d6f05 --- /dev/null +++ b/docs/material/assets/images/icons/gitlab.6dd19c00.svg @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/material/assets/javascripts/application.878fdd8d.js b/docs/material/assets/javascripts/application.878fdd8d.js new file mode 100644 index 00000000..17baeca5 --- /dev/null +++ b/docs/material/assets/javascripts/application.878fdd8d.js @@ -0,0 +1 @@ +!function(e,t){for(var n in t)e[n]=t[n]}(window,function(e){function t(r){if(n[r])return n[r].exports;var i=n[r]={i:r,l:!1,exports:{}};return e[r].call(i.exports,i,i.exports,t),i.l=!0,i.exports}var n={};return t.m=e,t.c=n,t.d=function(e,n,r){t.o(e,n)||Object.defineProperty(e,n,{configurable:!1,enumerable:!0,get:r})},t.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return t.d(n,"a",n),n},t.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},t.p="",t(t.s=6)}([function(e,t,n){"use strict";t.__esModule=!0,t.default={createElement:function(e,t){var n=document.createElement(e);t&&Array.prototype.forEach.call(Object.keys(t),function(e){n.setAttribute(e,t[e])});for(var r=arguments.length,i=Array(r>2?r-2:0),o=2;o pre, pre > code");Array.prototype.forEach.call(n,function(t,n){var r="__code_"+n,i=e.createElement("button",{class:"md-clipboard",title:h("clipboard.copy"),"data-clipboard-target":"#"+r+" pre, #"+r+" code"},e.createElement("span",{class:"md-clipboard__message"})),o=t.parentNode;o.id=r,o.insertBefore(i,t)});new c.default(".md-clipboard").on("success",function(e){var t=e.trigger.querySelector(".md-clipboard__message");if(!(t instanceof HTMLElement))throw new ReferenceError;e.clearSelection(),t.dataset.mdTimer&&clearTimeout(parseInt(t.dataset.mdTimer,10)),t.classList.add("md-clipboard__message--active"),t.innerHTML=h("clipboard.copied"),t.dataset.mdTimer=setTimeout(function(){t.classList.remove("md-clipboard__message--active"),t.dataset.mdTimer=""},2e3).toString()})}if(!Modernizr.details){var r=document.querySelectorAll("details > summary");Array.prototype.forEach.call(r,function(e){e.addEventListener("click",function(e){var t=e.target.parentNode;t.hasAttribute("open")?t.removeAttribute("open"):t.setAttribute("open","")})})}var i=function(){if(document.location.hash){var e=document.getElementById(document.location.hash.substring(1));if(!e)return;for(var t=e.parentNode;t&&!(t instanceof HTMLDetailsElement);)t=t.parentNode;if(t&&!t.open){t.open=!0;var n=location.hash;location.hash=" ",location.hash=n}}};if(window.addEventListener("hashchange",i),i(),Modernizr.ios){var o=document.querySelectorAll("[data-md-scrollfix]");Array.prototype.forEach.call(o,function(e){e.addEventListener("touchstart",function(){var t=e.scrollTop;0===t?e.scrollTop=1:t+e.offsetHeight===e.scrollHeight&&(e.scrollTop=t-1)})})}}).listen(),new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Header.Shadow("[data-md-component=container]","[data-md-component=header]")).listen(),new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Header.Title("[data-md-component=title]",".md-typeset h1")).listen(),document.querySelector("[data-md-component=hero]")&&new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Tabs.Toggle("[data-md-component=hero]")).listen(),document.querySelector("[data-md-component=tabs]")&&new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Tabs.Toggle("[data-md-component=tabs]")).listen(),new f.default.Event.MatchMedia("(min-width: 1220px)",new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Sidebar.Position("[data-md-component=navigation]","[data-md-component=header]"))),document.querySelector("[data-md-component=toc]")&&new f.default.Event.MatchMedia("(min-width: 960px)",new f.default.Event.Listener(window,["scroll","resize","orientationchange"],new f.default.Sidebar.Position("[data-md-component=toc]","[data-md-component=header]"))),new f.default.Event.MatchMedia("(min-width: 960px)",new f.default.Event.Listener(window,"scroll",new f.default.Nav.Blur("[data-md-component=toc] [href]")));var n=document.querySelectorAll("[data-md-component=collapsible]");Array.prototype.forEach.call(n,function(e){new f.default.Event.MatchMedia("(min-width: 1220px)",new f.default.Event.Listener(e.previousElementSibling,"click",new f.default.Nav.Collapse(e)))}),new f.default.Event.MatchMedia("(max-width: 1219px)",new f.default.Event.Listener("[data-md-component=navigation] [data-md-toggle]","change",new f.default.Nav.Scrolling("[data-md-component=navigation] nav"))),document.querySelector("[data-md-component=search]")&&(new f.default.Event.MatchMedia("(max-width: 959px)",new f.default.Event.Listener("[data-md-toggle=search]","change",new f.default.Search.Lock("[data-md-toggle=search]"))),new f.default.Event.Listener("[data-md-component=query]",["focus","keyup","change"],new f.default.Search.Result("[data-md-component=result]",function(){return fetch(t.url.base+"/"+(t.version<"0.17"?"mkdocs":"search")+"/search_index.json",{credentials:"same-origin"}).then(function(e){return e.json()}).then(function(e){return e.docs.map(function(e){return e.location=t.url.base+e.location,e})})})).listen(),new f.default.Event.Listener("[data-md-component=reset]","click",function(){setTimeout(function(){var e=document.querySelector("[data-md-component=query]");if(!(e instanceof HTMLInputElement))throw new ReferenceError;e.focus()},10)}).listen(),new f.default.Event.Listener("[data-md-toggle=search]","change",function(e){setTimeout(function(e){if(!(e instanceof HTMLInputElement))throw new ReferenceError;if(e.checked){var t=document.querySelector("[data-md-component=query]");if(!(t instanceof HTMLInputElement))throw new ReferenceError;t.focus()}},400,e.target)}).listen(),new f.default.Event.MatchMedia("(min-width: 960px)",new f.default.Event.Listener("[data-md-component=query]","focus",function(){var e=document.querySelector("[data-md-toggle=search]");if(!(e instanceof HTMLInputElement))throw new ReferenceError;e.checked||(e.checked=!0,e.dispatchEvent(new CustomEvent("change")))})),new f.default.Event.Listener(window,"keydown",function(e){var t=document.querySelector("[data-md-toggle=search]");if(!(t instanceof HTMLInputElement))throw new ReferenceError;var n=document.querySelector("[data-md-component=query]");if(!(n instanceof HTMLInputElement))throw new ReferenceError;if(!e.metaKey&&!e.ctrlKey)if(t.checked){if(13===e.keyCode){if(n===document.activeElement){e.preventDefault();var r=document.querySelector("[data-md-component=search] [href][data-md-state=active]");r instanceof HTMLLinkElement&&(window.location=r.getAttribute("href"),t.checked=!1,t.dispatchEvent(new CustomEvent("change")),n.blur())}}else if(9===e.keyCode||27===e.keyCode)t.checked=!1,t.dispatchEvent(new CustomEvent("change")),n.blur();else if(-1!==[8,37,39].indexOf(e.keyCode))n!==document.activeElement&&n.focus();else if(-1!==[38,40].indexOf(e.keyCode)){var i=e.keyCode,o=Array.prototype.slice.call(document.querySelectorAll("[data-md-component=query], [data-md-component=search] [href]")),a=o.find(function(e){if(!(e instanceof HTMLElement))throw new ReferenceError;return"active"===e.dataset.mdState});a&&(a.dataset.mdState="");var s=Math.max(0,(o.indexOf(a)+o.length+(38===i?-1:1))%o.length);return o[s]&&(o[s].dataset.mdState="active",o[s].focus()),e.preventDefault(),e.stopPropagation(),!1}}else document.activeElement&&!document.activeElement.form&&(70!==e.keyCode&&83!==e.keyCode||(n.focus(),e.preventDefault()))}).listen(),new f.default.Event.Listener(window,"keypress",function(){var e=document.querySelector("[data-md-toggle=search]");if(!(e instanceof HTMLInputElement))throw new ReferenceError;if(e.checked){var t=document.querySelector("[data-md-component=query]");if(!(t instanceof HTMLInputElement))throw new ReferenceError;t!==document.activeElement&&t.focus()}}).listen()),new f.default.Event.Listener(document.body,"keydown",function(e){if(9===e.keyCode){var t=document.querySelectorAll("[data-md-component=navigation] .md-nav__link[for]:not([tabindex])");Array.prototype.forEach.call(t,function(e){e.offsetHeight&&(e.tabIndex=0)})}}).listen(),new f.default.Event.Listener(document.body,"mousedown",function(){var e=document.querySelectorAll("[data-md-component=navigation] .md-nav__link[tabindex]");Array.prototype.forEach.call(e,function(e){e.removeAttribute("tabIndex")})}).listen(),document.body.addEventListener("click",function(){"tabbing"===document.body.dataset.mdState&&(document.body.dataset.mdState="")}),new f.default.Event.MatchMedia("(max-width: 959px)",new f.default.Event.Listener("[data-md-component=navigation] [href^='#']","click",function(){var e=document.querySelector("[data-md-toggle=drawer]");if(!(e instanceof HTMLInputElement))throw new ReferenceError;e.checked&&(e.checked=!1,e.dispatchEvent(new CustomEvent("change")))})),function(){var e=document.querySelector("[data-md-source]");if(!e)return a.default.resolve([]);if(!(e instanceof HTMLAnchorElement))throw new ReferenceError;switch(e.dataset.mdSource){case"github":return new f.default.Source.Adapter.GitHub(e).fetch();default:return a.default.resolve([])}}().then(function(e){var t=document.querySelectorAll("[data-md-source]");Array.prototype.forEach.call(t,function(t){new f.default.Source.Repository(t).initialize(e)})})}t.__esModule=!0,t.app=void 0,n(7),n(8),n(9),n(10),n(11),n(12),n(13);var o=n(14),a=r(o),s=n(19),c=r(s),u=n(20),l=r(u),d=n(21),f=r(d);window.Promise=window.Promise||a.default;var h=function(e){var t=document.getElementsByName("lang:"+e)[0];if(!(t instanceof HTMLMetaElement))throw new ReferenceError;return t.content},p={initialize:i};t.app=p}).call(t,n(0))},function(e,t,n){e.exports=n.p+"assets/images/icons/bitbucket.1b09e088.svg"},function(e,t,n){e.exports=n.p+"assets/images/icons/github.f0b8504a.svg"},function(e,t,n){e.exports=n.p+"assets/images/icons/gitlab.6dd19c00.svg"},function(e,t){},function(e,t){},function(e,t){!function(){if("undefined"!=typeof window)try{var e=new window.CustomEvent("test",{cancelable:!0});if(e.preventDefault(),!0!==e.defaultPrevented)throw new Error("Could not prevent default")}catch(e){var t=function(e,t){var n,r;return t=t||{bubbles:!1,cancelable:!1,detail:void 0},n=document.createEvent("CustomEvent"),n.initCustomEvent(e,t.bubbles,t.cancelable,t.detail),r=n.preventDefault,n.preventDefault=function(){r.call(this);try{Object.defineProperty(this,"defaultPrevented",{get:function(){return!0}})}catch(e){this.defaultPrevented=!0}},n};t.prototype=window.Event.prototype,window.CustomEvent=t}}()},function(e,t,n){window.fetch||(window.fetch=n(2).default||n(2))},function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e){function r(){}function i(e,t){return function(){e.apply(t,arguments)}}function o(e){if(!(this instanceof o))throw new TypeError("Promises must be constructed via new");if("function"!=typeof e)throw new TypeError("not a function");this._state=0,this._handled=!1,this._value=void 0,this._deferreds=[],d(e,this)}function a(e,t){for(;3===e._state;)e=e._value;if(0===e._state)return void e._deferreds.push(t);e._handled=!0,o._immediateFn(function(){var n=1===e._state?t.onFulfilled:t.onRejected;if(null===n)return void(1===e._state?s:c)(t.promise,e._value);var r;try{r=n(e._value)}catch(e){return void c(t.promise,e)}s(t.promise,r)})}function s(e,t){try{if(t===e)throw new TypeError("A promise cannot be resolved with itself.");if(t&&("object"==typeof t||"function"==typeof t)){var n=t.then;if(t instanceof o)return e._state=3,e._value=t,void u(e);if("function"==typeof n)return void d(i(n,t),e)}e._state=1,e._value=t,u(e)}catch(t){c(e,t)}}function c(e,t){e._state=2,e._value=t,u(e)}function u(e){2===e._state&&0===e._deferreds.length&&o._immediateFn(function(){e._handled||o._unhandledRejectionFn(e._value)});for(var t=0,n=e._deferreds.length;t=0&&(e._idleTimeoutId=setTimeout(function(){e._onTimeout&&e._onTimeout()},t))},n(16),t.setImmediate="undefined"!=typeof self&&self.setImmediate||void 0!==e&&e.setImmediate||this&&this.setImmediate,t.clearImmediate="undefined"!=typeof self&&self.clearImmediate||void 0!==e&&e.clearImmediate||this&&this.clearImmediate}).call(t,n(1))},function(e,t,n){(function(e,t){!function(e,n){"use strict";function r(e){"function"!=typeof e&&(e=new Function(""+e));for(var t=new Array(arguments.length-1),n=0;n1)for(var n=1;n0&&void 0!==arguments[0]?arguments[0]:{};this.action=e.action,this.container=e.container,this.emitter=e.emitter,this.target=e.target,this.text=e.text,this.trigger=e.trigger,this.selectedText=""}},{key:"initSelection",value:function(){this.text?this.selectFake():this.target&&this.selectTarget()}},{key:"selectFake",value:function(){var e=this,t="rtl"==document.documentElement.getAttribute("dir");this.removeFake(),this.fakeHandlerCallback=function(){return e.removeFake()},this.fakeHandler=this.container.addEventListener("click",this.fakeHandlerCallback)||!0,this.fakeElem=document.createElement("textarea"),this.fakeElem.style.fontSize="12pt",this.fakeElem.style.border="0",this.fakeElem.style.padding="0",this.fakeElem.style.margin="0",this.fakeElem.style.position="absolute",this.fakeElem.style[t?"right":"left"]="-9999px";var n=window.pageYOffset||document.documentElement.scrollTop;this.fakeElem.style.top=n+"px",this.fakeElem.setAttribute("readonly",""),this.fakeElem.value=this.text,this.container.appendChild(this.fakeElem),this.selectedText=(0,r.default)(this.fakeElem),this.copyText()}},{key:"removeFake",value:function(){this.fakeHandler&&(this.container.removeEventListener("click",this.fakeHandlerCallback),this.fakeHandler=null,this.fakeHandlerCallback=null),this.fakeElem&&(this.container.removeChild(this.fakeElem),this.fakeElem=null)}},{key:"selectTarget",value:function(){this.selectedText=(0,r.default)(this.target),this.copyText()}},{key:"copyText",value:function(){var e=void 0;try{e=document.execCommand(this.action)}catch(t){e=!1}this.handleResult(e)}},{key:"handleResult",value:function(e){this.emitter.emit(e?"success":"error",{action:this.action,text:this.selectedText,trigger:this.trigger,clearSelection:this.clearSelection.bind(this)})}},{key:"clearSelection",value:function(){this.trigger&&this.trigger.focus(),window.getSelection().removeAllRanges()}},{key:"destroy",value:function(){this.removeFake()}},{key:"action",set:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"copy";if(this._action=e,"copy"!==this._action&&"cut"!==this._action)throw new Error('Invalid "action" value, use either "copy" or "cut"')},get:function(){return this._action}},{key:"target",set:function(e){if(void 0!==e){if(!e||"object"!==(void 0===e?"undefined":i(e))||1!==e.nodeType)throw new Error('Invalid "target" value, use a valid Element');if("copy"===this.action&&e.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if("cut"===this.action&&(e.hasAttribute("readonly")||e.hasAttribute("disabled")))throw new Error('Invalid "target" attribute. You can\'t cut text from elements with "readonly" or "disabled" attributes');this._target=e}},get:function(){return this._target}}]),e}();e.exports=a})},function(e,t,n){function r(e,t,n){if(!e&&!t&&!n)throw new Error("Missing required arguments");if(!s.string(t))throw new TypeError("Second argument must be a String");if(!s.fn(n))throw new TypeError("Third argument must be a Function");if(s.node(e))return i(e,t,n);if(s.nodeList(e))return o(e,t,n);if(s.string(e))return a(e,t,n);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function i(e,t,n){return e.addEventListener(t,n),{destroy:function(){e.removeEventListener(t,n)}}}function o(e,t,n){return Array.prototype.forEach.call(e,function(e){e.addEventListener(t,n)}),{destroy:function(){Array.prototype.forEach.call(e,function(e){e.removeEventListener(t,n)})}}}function a(e,t,n){return c(document.body,e,t,n)}var s=n(6),c=n(5);e.exports=r},function(e,t){function n(){}n.prototype={on:function(e,t,n){var r=this.e||(this.e={});return(r[e]||(r[e]=[])).push({fn:t,ctx:n}),this},once:function(e,t,n){function r(){i.off(e,r),t.apply(n,arguments)}var i=this;return r._=t,this.on(e,r,n)},emit:function(e){var t=[].slice.call(arguments,1),n=((this.e||(this.e={}))[e]||[]).slice(),r=0,i=n.length;for(r;r0&&void 0!==arguments[0]?arguments[0]:{};this.action="function"==typeof e.action?e.action:this.defaultAction,this.target="function"==typeof e.target?e.target:this.defaultTarget,this.text="function"==typeof e.text?e.text:this.defaultText,this.container="object"===f(e.container)?e.container:document.body}},{key:"listenClick",value:function(e){var t=this;this.listener=(0,d.default)(e,"click",function(e){return t.onClick(e)})}},{key:"onClick",value:function(e){var t=e.delegateTarget||e.currentTarget;this.clipboardAction&&(this.clipboardAction=null),this.clipboardAction=new u.default({action:this.action(t),target:this.target(t),text:this.text(t),container:this.container,trigger:t,emitter:this})}},{key:"defaultAction",value:function(e){return c("action",e)}},{key:"defaultTarget",value:function(e){var t=c("target",e);if(t)return document.querySelector(t)}},{key:"defaultText",value:function(e){return c("text",e)}},{key:"destroy",value:function(){this.listener.destroy(),this.clipboardAction&&(this.clipboardAction.destroy(),this.clipboardAction=null)}}],[{key:"isSupported",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:["copy","cut"],t="string"==typeof e?[e]:e,n=!!document.queryCommandSupported;return t.forEach(function(e){n=n&&!!document.queryCommandSupported(e)}),n}}]),t}(l.default);e.exports=p})},function(e,t){function n(e,t){for(;e&&e.nodeType!==r;){if("function"==typeof e.matches&&e.matches(t))return e;e=e.parentNode}}var r=9;if("undefined"!=typeof Element&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}e.exports=n},function(e,t,n){function r(e,t,n,r,i){var a=o.apply(this,arguments);return e.addEventListener(n,a,i),{destroy:function(){e.removeEventListener(n,a,i)}}}function i(e,t,n,i,o){return"function"==typeof e.addEventListener?r.apply(null,arguments):"function"==typeof n?r.bind(null,document).apply(null,arguments):("string"==typeof e&&(e=document.querySelectorAll(e)),Array.prototype.map.call(e,function(e){return r(e,t,n,i,o)}))}function o(e,t,n,r){return function(n){n.delegateTarget=a(n.target,t),n.delegateTarget&&r.call(e,n)}}var a=n(4);e.exports=i},function(e,t){t.node=function(e){return void 0!==e&&e instanceof HTMLElement&&1===e.nodeType},t.nodeList=function(e){var n=Object.prototype.toString.call(e);return void 0!==e&&("[object NodeList]"===n||"[object HTMLCollection]"===n)&&"length"in e&&(0===e.length||t.node(e[0]))},t.string=function(e){return"string"==typeof e||e instanceof String},t.fn=function(e){return"[object Function]"===Object.prototype.toString.call(e)}},function(e,t){function n(e){var t;if("SELECT"===e.nodeName)e.focus(),t=e.value;else if("INPUT"===e.nodeName||"TEXTAREA"===e.nodeName){var n=e.hasAttribute("readonly");n||e.setAttribute("readonly",""),e.select(),e.setSelectionRange(0,e.value.length),n||e.removeAttribute("readonly"),t=e.value}else{e.hasAttribute("contenteditable")&&e.focus();var r=window.getSelection(),i=document.createRange();i.selectNodeContents(e),r.removeAllRanges(),r.addRange(i),t=r.toString()}return t}e.exports=n}])})},function(e,t,n){var r;!function(){"use strict";function i(e,t){var n;if(t=t||{},this.trackingClick=!1,this.trackingClickStart=0,this.targetElement=null,this.touchStartX=0,this.touchStartY=0,this.lastTouchIdentifier=0,this.touchBoundary=t.touchBoundary||10,this.layer=e,this.tapDelay=t.tapDelay||200,this.tapTimeout=t.tapTimeout||700,!i.notNeeded(e)){for(var r=["onMouse","onClick","onTouchStart","onTouchMove","onTouchEnd","onTouchCancel"],o=this,s=0,c=r.length;s=0,a=navigator.userAgent.indexOf("Android")>0&&!o,s=/iP(ad|hone|od)/.test(navigator.userAgent)&&!o,c=s&&/OS 4_\d(_\d)?/.test(navigator.userAgent),u=s&&/OS [6-7]_\d/.test(navigator.userAgent),l=navigator.userAgent.indexOf("BB10")>0;i.prototype.needsClick=function(e){switch(e.nodeName.toLowerCase()){case"button":case"select":case"textarea":if(e.disabled)return!0;break;case"input":if(s&&"file"===e.type||e.disabled)return!0;break;case"label":case"iframe":case"video":return!0}return/\bneedsclick\b/.test(e.className)},i.prototype.needsFocus=function(e){switch(e.nodeName.toLowerCase()){case"textarea":return!0;case"select":return!a;case"input":switch(e.type){case"button":case"checkbox":case"file":case"image":case"radio":case"submit":return!1}return!e.disabled&&!e.readOnly;default:return/\bneedsfocus\b/.test(e.className)}},i.prototype.sendClick=function(e,t){var n,r;document.activeElement&&document.activeElement!==e&&document.activeElement.blur(),r=t.changedTouches[0],n=document.createEvent("MouseEvents"),n.initMouseEvent(this.determineEventType(e),!0,!0,window,1,r.screenX,r.screenY,r.clientX,r.clientY,!1,!1,!1,!1,0,null),n.forwardedTouchEvent=!0,e.dispatchEvent(n)},i.prototype.determineEventType=function(e){return a&&"select"===e.tagName.toLowerCase()?"mousedown":"click"},i.prototype.focus=function(e){var t;s&&e.setSelectionRange&&0!==e.type.indexOf("date")&&"time"!==e.type&&"month"!==e.type?(t=e.value.length,e.setSelectionRange(t,t)):e.focus()},i.prototype.updateScrollParent=function(e){var t,n;if(!(t=e.fastClickScrollParent)||!t.contains(e)){n=e;do{if(n.scrollHeight>n.offsetHeight){t=n,e.fastClickScrollParent=n;break}n=n.parentElement}while(n)}t&&(t.fastClickLastScrollTop=t.scrollTop)},i.prototype.getTargetElementFromEventTarget=function(e){return e.nodeType===Node.TEXT_NODE?e.parentNode:e},i.prototype.onTouchStart=function(e){var t,n,r;if(e.targetTouches.length>1)return!0;if(t=this.getTargetElementFromEventTarget(e.target),n=e.targetTouches[0],s){if(r=window.getSelection(),r.rangeCount&&!r.isCollapsed)return!0;if(!c){if(n.identifier&&n.identifier===this.lastTouchIdentifier)return e.preventDefault(),!1;this.lastTouchIdentifier=n.identifier,this.updateScrollParent(t)}}return this.trackingClick=!0,this.trackingClickStart=e.timeStamp,this.targetElement=t,this.touchStartX=n.pageX,this.touchStartY=n.pageY,e.timeStamp-this.lastClickTimen||Math.abs(t.pageY-this.touchStartY)>n},i.prototype.onTouchMove=function(e){return!this.trackingClick||((this.targetElement!==this.getTargetElementFromEventTarget(e.target)||this.touchHasMoved(e))&&(this.trackingClick=!1,this.targetElement=null),!0)},i.prototype.findControl=function(e){return void 0!==e.control?e.control:e.htmlFor?document.getElementById(e.htmlFor):e.querySelector("button, input:not([type=hidden]), keygen, meter, output, progress, select, textarea")},i.prototype.onTouchEnd=function(e){var t,n,r,i,o,l=this.targetElement;if(!this.trackingClick)return!0;if(e.timeStamp-this.lastClickTimethis.tapTimeout)return!0;if(this.cancelNextClick=!1,this.lastClickTime=e.timeStamp,n=this.trackingClickStart,this.trackingClick=!1,this.trackingClickStart=0,u&&(o=e.changedTouches[0],l=document.elementFromPoint(o.pageX-window.pageXOffset,o.pageY-window.pageYOffset)||l,l.fastClickScrollParent=this.targetElement.fastClickScrollParent),"label"===(r=l.tagName.toLowerCase())){if(t=this.findControl(l)){if(this.focus(l),a)return!1;l=t}}else if(this.needsFocus(l))return e.timeStamp-n>100||s&&window.top!==window&&"input"===r?(this.targetElement=null,!1):(this.focus(l),this.sendClick(l,e),s&&"select"===r||(this.targetElement=null,e.preventDefault()),!1);return!(!s||c||!(i=l.fastClickScrollParent)||i.fastClickLastScrollTop===i.scrollTop)||(this.needsClick(l)||(e.preventDefault(),this.sendClick(l,e)),!1)},i.prototype.onTouchCancel=function(){this.trackingClick=!1,this.targetElement=null},i.prototype.onMouse=function(e){return!this.targetElement||(!!e.forwardedTouchEvent||(!e.cancelable||(!(!this.needsClick(this.targetElement)||this.cancelNextClick)||(e.stopImmediatePropagation?e.stopImmediatePropagation():e.propagationStopped=!0,e.stopPropagation(),e.preventDefault(),!1))))},i.prototype.onClick=function(e){var t;return this.trackingClick?(this.targetElement=null,this.trackingClick=!1,!0):"submit"===e.target.type&&0===e.detail||(t=this.onMouse(e),t||(this.targetElement=null),t)},i.prototype.destroy=function(){var e=this.layer;a&&(e.removeEventListener("mouseover",this.onMouse,!0),e.removeEventListener("mousedown",this.onMouse,!0),e.removeEventListener("mouseup",this.onMouse,!0)),e.removeEventListener("click",this.onClick,!0),e.removeEventListener("touchstart",this.onTouchStart,!1),e.removeEventListener("touchmove",this.onTouchMove,!1),e.removeEventListener("touchend",this.onTouchEnd,!1),e.removeEventListener("touchcancel",this.onTouchCancel,!1)},i.notNeeded=function(e){var t,n,r;if(void 0===window.ontouchstart)return!0;if(n=+(/Chrome\/([0-9]+)/.exec(navigator.userAgent)||[,0])[1]){if(!a)return!0;if(t=document.querySelector("meta[name=viewport]")){if(-1!==t.content.indexOf("user-scalable=no"))return!0;if(n>31&&document.documentElement.scrollWidth<=window.outerWidth)return!0}}if(l&&(r=navigator.userAgent.match(/Version\/([0-9]*)\.([0-9]*)/),r[1]>=10&&r[2]>=3&&(t=document.querySelector("meta[name=viewport]")))){if(-1!==t.content.indexOf("user-scalable=no"))return!0;if(document.documentElement.scrollWidth<=window.outerWidth)return!0}return"none"===e.style.msTouchAction||"manipulation"===e.style.touchAction||(!!(+(/Firefox\/([0-9]+)/.exec(navigator.userAgent)||[,0])[1]>=27&&(t=document.querySelector("meta[name=viewport]"))&&(-1!==t.content.indexOf("user-scalable=no")||document.documentElement.scrollWidth<=window.outerWidth))||("none"===e.style.touchAction||"manipulation"===e.style.touchAction))},i.attach=function(e,t){return new i(e,t)},void 0!==(r=function(){return i}.call(t,n,t,e))&&(e.exports=r)}()},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(22),o=r(i),a=n(24),s=r(a),c=n(27),u=r(c),l=n(31),d=r(l),f=n(37),h=r(f),p=n(39),m=r(p),v=n(45),y=r(v);t.default={Event:o.default,Header:s.default,Nav:u.default,Search:d.default,Sidebar:h.default,Source:m.default,Tabs:y.default}},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(3),o=r(i),a=n(23),s=r(a);t.default={Listener:o.default,MatchMedia:s.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=n(3),o=(function(e){e&&e.__esModule}(i),function e(t,n){r(this,e),this.handler_=function(e){e.matches?n.listen():n.unlisten()};var i=window.matchMedia(t);i.addListener(this.handler_),this.handler_(i)});t.default=o},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(25),o=r(i),a=n(26),s=r(a);t.default={Shadow:o.default,Title:s.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t,n){r(this,e);var i="string"==typeof t?document.querySelector(t):t;if(!(i instanceof HTMLElement&&i.parentNode instanceof HTMLElement))throw new ReferenceError;if(this.el_=i.parentNode,!((i="string"==typeof n?document.querySelector(n):n)instanceof HTMLElement))throw new ReferenceError;this.header_=i,this.height_=0,this.active_=!1}return e.prototype.setup=function(){for(var e=this.el_;e=e.previousElementSibling;){if(!(e instanceof HTMLElement))throw new ReferenceError;this.height_+=e.offsetHeight}this.update()},e.prototype.update=function(e){if(!e||"resize"!==e.type&&"orientationchange"!==e.type){var t=window.pageYOffset>=this.height_;t!==this.active_&&(this.header_.dataset.mdState=(this.active_=t)?"shadow":"")}else this.height_=0,this.setup()},e.prototype.reset=function(){this.header_.dataset.mdState="",this.height_=0,this.active_=!1},e}();t.default=i},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t,n){r(this,e);var i="string"==typeof t?document.querySelector(t):t;if(!(i instanceof HTMLElement))throw new ReferenceError;if(this.el_=i,!((i="string"==typeof n?document.querySelector(n):n)instanceof HTMLHeadingElement))throw new ReferenceError;this.header_=i,this.active_=!1}return e.prototype.setup=function(){var e=this;Array.prototype.forEach.call(this.el_.children,function(t){t.style.width=e.el_.offsetWidth-20+"px"})},e.prototype.update=function(e){var t=this,n=window.pageYOffset>=this.header_.offsetTop;n!==this.active_&&(this.el_.dataset.mdState=(this.active_=n)?"active":""),"resize"!==e.type&&"orientationchange"!==e.type||Array.prototype.forEach.call(this.el_.children,function(e){e.style.width=t.el_.offsetWidth-20+"px"})},e.prototype.reset=function(){this.el_.dataset.mdState="",this.el_.style.width="",this.active_=!1},e}();t.default=i},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(28),o=r(i),a=n(29),s=r(a),c=n(30),u=r(c);t.default={Blur:o.default,Collapse:s.default,Scrolling:u.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t){r(this,e),this.els_="string"==typeof t?document.querySelectorAll(t):t,this.index_=0,this.offset_=window.pageYOffset,this.dir_=!1,this.anchors_=[].reduce.call(this.els_,function(e,t){return e.concat(document.getElementById(t.hash.substring(1))||[])},[])}return e.prototype.setup=function(){this.update()},e.prototype.update=function(){var e=window.pageYOffset,t=this.offset_-e<0;if(this.dir_!==t&&(this.index_=this.index_=t?0:this.els_.length-1),0!==this.anchors_.length){if(this.offset_<=e)for(var n=this.index_+1;n0&&(this.els_[n-1].dataset.mdState="blur"),this.index_=n;else for(var r=this.index_;r>=0;r--){if(!(this.anchors_[r].offsetTop-80>e)){this.index_=r;break}r>0&&(this.els_[r-1].dataset.mdState="")}this.offset_=e,this.dir_=t}},e.prototype.reset=function(){Array.prototype.forEach.call(this.els_,function(e){e.dataset.mdState=""}),this.index_=0,this.offset_=window.pageYOffset},e}();t.default=i},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t){r(this,e);var n="string"==typeof t?document.querySelector(t):t;if(!(n instanceof HTMLElement))throw new ReferenceError;this.el_=n}return e.prototype.setup=function(){var e=this.el_.getBoundingClientRect().height;this.el_.style.display=e?"block":"none",this.el_.style.overflow=e?"visible":"hidden"},e.prototype.update=function(){var e=this,t=this.el_.getBoundingClientRect().height;if(this.el_.style.display="block",this.el_.style.overflow="",t)this.el_.style.maxHeight=t+"px",requestAnimationFrame(function(){e.el_.setAttribute("data-md-state","animate"),e.el_.style.maxHeight="0px"});else{this.el_.setAttribute("data-md-state","expand"),this.el_.style.maxHeight="";var n=this.el_.getBoundingClientRect().height;this.el_.removeAttribute("data-md-state"),this.el_.style.maxHeight="0px",requestAnimationFrame(function(){e.el_.setAttribute("data-md-state","animate"),e.el_.style.maxHeight=n+"px"})}var r=function e(n){var r=n.target;if(!(r instanceof HTMLElement))throw new ReferenceError;r.removeAttribute("data-md-state"),r.style.maxHeight="",r.style.display=t?"none":"block",r.style.overflow=t?"hidden":"visible",r.removeEventListener("transitionend",e)};this.el_.addEventListener("transitionend",r,!1)},e.prototype.reset=function(){this.el_.dataset.mdState="",this.el_.style.maxHeight="",this.el_.style.display="",this.el_.style.overflow=""},e}();t.default=i},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t){r(this,e);var n="string"==typeof t?document.querySelector(t):t;if(!(n instanceof HTMLElement))throw new ReferenceError;this.el_=n}return e.prototype.setup=function(){this.el_.children[this.el_.children.length-1].style.webkitOverflowScrolling="touch";var e=this.el_.querySelectorAll("[data-md-toggle]");Array.prototype.forEach.call(e,function(e){if(!(e instanceof HTMLInputElement))throw new ReferenceError;if(e.checked){var t=e.nextElementSibling;if(!(t instanceof HTMLElement))throw new ReferenceError;for(;"NAV"!==t.tagName&&t.nextElementSibling;)t=t.nextElementSibling;if(!(e.parentNode instanceof HTMLElement&&e.parentNode.parentNode instanceof HTMLElement))throw new ReferenceError;var n=e.parentNode.parentNode,r=t.children[t.children.length-1];n.style.webkitOverflowScrolling="",r.style.webkitOverflowScrolling="touch"}})},e.prototype.update=function(e){var t=e.target;if(!(t instanceof HTMLElement))throw new ReferenceError;var n=t.nextElementSibling;if(!(n instanceof HTMLElement))throw new ReferenceError;for(;"NAV"!==n.tagName&&n.nextElementSibling;)n=n.nextElementSibling;if(!(t.parentNode instanceof HTMLElement&&t.parentNode.parentNode instanceof HTMLElement))throw new ReferenceError;var r=t.parentNode.parentNode,i=n.children[n.children.length-1];if(r.style.webkitOverflowScrolling="",i.style.webkitOverflowScrolling="",!t.checked){var o=function e(){n instanceof HTMLElement&&(r.style.webkitOverflowScrolling="touch",n.removeEventListener("transitionend",e))};n.addEventListener("transitionend",o,!1)}if(t.checked){var a=function e(){n instanceof HTMLElement&&(i.style.webkitOverflowScrolling="touch",n.removeEventListener("transitionend",e))};n.addEventListener("transitionend",a,!1)}},e.prototype.reset=function(){this.el_.children[1].style.webkitOverflowScrolling="";var e=this.el_.querySelectorAll("[data-md-toggle]");Array.prototype.forEach.call(e,function(e){if(!(e instanceof HTMLInputElement))throw new ReferenceError;if(e.checked){var t=e.nextElementSibling;if(!(t instanceof HTMLElement))throw new ReferenceError;for(;"NAV"!==t.tagName&&t.nextElementSibling;)t=t.nextElementSibling;if(!(e.parentNode instanceof HTMLElement&&e.parentNode.parentNode instanceof HTMLElement))throw new ReferenceError;var n=e.parentNode.parentNode,r=t.children[t.children.length-1];n.style.webkitOverflowScrolling="",r.style.webkitOverflowScrolling=""}})},e}();t.default=i},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(32),o=r(i),a=n(33),s=r(a);t.default={Lock:o.default,Result:s.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t){r(this,e);var n="string"==typeof t?document.querySelector(t):t;if(!(n instanceof HTMLInputElement))throw new ReferenceError;if(this.el_=n,!document.body)throw new ReferenceError;this.lock_=document.body}return e.prototype.setup=function(){this.update()},e.prototype.update=function(){var e=this;this.el_.checked?(this.offset_=window.pageYOffset,setTimeout(function(){window.scrollTo(0,0),e.el_.checked&&(e.lock_.dataset.mdState="lock")},400)):(this.lock_.dataset.mdState="",setTimeout(function(){void 0!==e.offset_&&window.scrollTo(0,e.offset_)},100))},e.prototype.reset=function(){"lock"===this.lock_.dataset.mdState&&window.scrollTo(0,this.offset_),this.lock_.dataset.mdState=""},e}();t.default=i},function(e,t,n){"use strict";(function(e){function r(e){return e&&e.__esModule?e:{default:e}}function i(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var o=n(34),a=r(o),s=n(35),c=r(s),u=function(e,t){var n=t;if(e.length>n){for(;" "!==e[n]&&--n>0;);return e.substring(0,n)+"..."}return e},l=function(e){var t=document.getElementsByName("lang:"+e)[0];if(!(t instanceof HTMLMetaElement))throw new ReferenceError;return t.content},d=function(){function t(e,n){i(this,t);var r="string"==typeof e?document.querySelector(e):e;if(!(r instanceof HTMLElement))throw new ReferenceError;this.el_=r;var o=Array.prototype.slice.call(this.el_.children),a=o[0],s=o[1];this.data_=n,this.meta_=a,this.list_=s,this.message_={placeholder:this.meta_.textContent,none:l("search.result.none"),one:l("search.result.one"),other:l("search.result.other")};var u=l("search.tokenizer");u.length&&(c.default.tokenizer.separator=u),this.lang_=l("search.language").split(",").filter(Boolean).map(function(e){return e.trim()})}return t.prototype.update=function(t){var n=this;if("focus"!==t.type||this.index_){if("focus"===t.type||"keyup"===t.type){var r=t.target;if(!(r instanceof HTMLInputElement))throw new ReferenceError;if(!this.index_||r.value===this.value_)return;for(;this.list_.firstChild;)this.list_.removeChild(this.list_.firstChild);if(this.value_=r.value,0===this.value_.length)return void(this.meta_.textContent=this.message_.placeholder);var i=this.index_.query(function(e){n.value_.toLowerCase().split(" ").filter(Boolean).forEach(function(t){e.term(t,{wildcard:c.default.Query.wildcard.TRAILING})})}).reduce(function(e,t){var r=n.docs_.get(t.ref);if(r.parent){var i=r.parent.location;e.set(i,(e.get(i)||[]).concat(t))}else{var o=r.location;e.set(o,e.get(o)||[])}return e},new Map),o=(0,a.default)(this.value_.trim()).replace(new RegExp(c.default.tokenizer.separator,"img"),"|"),s=new RegExp("(^|"+c.default.tokenizer.separator+")("+o+")","img"),d=function(e,t,n){return t+""+n+""};this.stack_=[],i.forEach(function(t,r){var i,o=n.docs_.get(r),a=e.createElement("li",{class:"md-search-result__item"},e.createElement("a",{href:o.location,title:o.title,class:"md-search-result__link",tabindex:"-1"},e.createElement("article",{class:"md-search-result__article md-search-result__article--document"},e.createElement("h1",{class:"md-search-result__title"},{__html:o.title.replace(s,d)}),o.text.length?e.createElement("p",{class:"md-search-result__teaser"},{__html:o.text.replace(s,d)}):{}))),c=t.map(function(t){return function(){var r=n.docs_.get(t.ref);a.appendChild(e.createElement("a",{href:r.location,title:r.title,class:"md-search-result__link","data-md-rel":"anchor",tabindex:"-1"},e.createElement("article",{class:"md-search-result__article"},e.createElement("h1",{class:"md-search-result__title"},{__html:r.title.replace(s,d)}),r.text.length?e.createElement("p",{class:"md-search-result__teaser"},{__html:u(r.text.replace(s,d),400)}):{})))}});(i=n.stack_).push.apply(i,[function(){return n.list_.appendChild(a)}].concat(c))});var f=this.el_.parentNode;if(!(f instanceof HTMLElement))throw new ReferenceError;for(;this.stack_.length&&f.offsetHeight>=f.scrollHeight-16;)this.stack_.shift()();var h=this.list_.querySelectorAll("[data-md-rel=anchor]");switch(Array.prototype.forEach.call(h,function(e){["click","keydown"].forEach(function(t){e.addEventListener(t,function(n){if("keydown"!==t||13===n.keyCode){var r=document.querySelector("[data-md-toggle=search]");if(!(r instanceof HTMLInputElement))throw new ReferenceError;r.checked&&(r.checked=!1,r.dispatchEvent(new CustomEvent("change"))),n.preventDefault(),setTimeout(function(){document.location.href=e.href},100)}})})}),i.size){case 0:this.meta_.textContent=this.message_.none;break;case 1:this.meta_.textContent=this.message_.one;break;default:this.meta_.textContent=this.message_.other.replace("#",i.size)}}}else{var p=function(e){n.docs_=e.reduce(function(e,t){var n=t.location.split("#"),r=n[0];return n[1]&&(t.parent=e.get(r),t.parent&&!t.parent.done&&(t.parent.title=t.title,t.parent.text=t.text,t.parent.done=!0)),t.text=t.text.replace(/\n/g," ").replace(/\s+/g," ").replace(/\s+([,.:;!?])/g,function(e,t){return t}),t.parent&&t.parent.title===t.title||e.set(t.location,t),e},new Map);var t=n.docs_,r=n.lang_;n.stack_=[],n.index_=(0,c.default)(function(){var e,n=this,i={"search.pipeline.trimmer":c.default.trimmer,"search.pipeline.stopwords":c.default.stopWordFilter},o=Object.keys(i).reduce(function(e,t){return l(t).match(/^false$/i)||e.push(i[t]),e},[]);this.pipeline.reset(),o&&(e=this.pipeline).add.apply(e,o),1===r.length&&"en"!==r[0]&&c.default[r[0]]?this.use(c.default[r[0]]):r.length>1&&this.use(c.default.multiLanguage.apply(c.default,r)),this.field("title",{boost:10}),this.field("text"),this.ref("location"),t.forEach(function(e){return n.add(e)})});var i=n.el_.parentNode;if(!(i instanceof HTMLElement))throw new ReferenceError;i.addEventListener("scroll",function(){for(;n.stack_.length&&i.scrollTop+i.offsetHeight>=i.scrollHeight-16;)n.stack_.splice(0,10).forEach(function(e){return e()})})};setTimeout(function(){return"function"==typeof n.data_?n.data_().then(p):p(n.data_)},250)}},t}();t.default=d}).call(t,n(0))},function(e,t,n){"use strict";var r=/[|\\{}()[\]^$+*?.]/g;e.exports=function(e){if("string"!=typeof e)throw new TypeError("Expected a string");return e.replace(r,"\\$&")}},function(e,t,n){(function(t){e.exports=t.lunr=n(36)}).call(t,n(1))},function(e,t,n){var r,i;!function(){var o=function(e){var t=new o.Builder;return t.pipeline.add(o.trimmer,o.stopWordFilter,o.stemmer),t.searchPipeline.add(o.stemmer),e.call(t,t),t.build()};o.version="2.1.5",o.utils={},o.utils.warn=function(e){return function(t){e.console&&console.warn&&console.warn(t)}}(this),o.utils.asString=function(e){return void 0===e||null===e?"":e.toString()},o.FieldRef=function(e,t,n){this.docRef=e,this.fieldName=t,this._stringValue=n},o.FieldRef.joiner="/",o.FieldRef.fromString=function(e){var t=e.indexOf(o.FieldRef.joiner);if(-1===t)throw"malformed field ref string";var n=e.slice(0,t),r=e.slice(t+1);return new o.FieldRef(r,n,e)},o.FieldRef.prototype.toString=function(){return void 0==this._stringValue&&(this._stringValue=this.fieldName+o.FieldRef.joiner+this.docRef),this._stringValue},o.idf=function(e,t){var n=0;for(var r in e)"_index"!=r&&(n+=Object.keys(e[r]).length);var i=(t-n+.5)/(n+.5);return Math.log(1+Math.abs(i))},o.Token=function(e,t){this.str=e||"",this.metadata=t||{}},o.Token.prototype.toString=function(){return this.str},o.Token.prototype.update=function(e){return this.str=e(this.str,this.metadata),this},o.Token.prototype.clone=function(e){return e=e||function(e){return e},new o.Token(e(this.str,this.metadata),this.metadata)},o.tokenizer=function(e){if(null==e||void 0==e)return[];if(Array.isArray(e))return e.map(function(e){return new o.Token(o.utils.asString(e).toLowerCase())});for(var t=e.toString().trim().toLowerCase(),n=t.length,r=[],i=0,a=0;i<=n;i++){var s=t.charAt(i),c=i-a;(s.match(o.tokenizer.separator)||i==n)&&(c>0&&r.push(new o.Token(t.slice(a,i),{position:[a,c],index:r.length})),a=i+1)}return r},o.tokenizer.separator=/[\s\-]+/,o.Pipeline=function(){this._stack=[]},o.Pipeline.registeredFunctions=Object.create(null),o.Pipeline.registerFunction=function(e,t){t in this.registeredFunctions&&o.utils.warn("Overwriting existing registered function: "+t),e.label=t,o.Pipeline.registeredFunctions[e.label]=e},o.Pipeline.warnIfFunctionNotRegistered=function(e){e.label&&e.label in this.registeredFunctions||o.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",e)},o.Pipeline.load=function(e){var t=new o.Pipeline;return e.forEach(function(e){var n=o.Pipeline.registeredFunctions[e];if(!n)throw new Error("Cannot load unregistered function: "+e);t.add(n)}),t},o.Pipeline.prototype.add=function(){Array.prototype.slice.call(arguments).forEach(function(e){o.Pipeline.warnIfFunctionNotRegistered(e),this._stack.push(e)},this)},o.Pipeline.prototype.after=function(e,t){o.Pipeline.warnIfFunctionNotRegistered(t);var n=this._stack.indexOf(e);if(-1==n)throw new Error("Cannot find existingFn");n+=1,this._stack.splice(n,0,t)},o.Pipeline.prototype.before=function(e,t){o.Pipeline.warnIfFunctionNotRegistered(t);var n=this._stack.indexOf(e);if(-1==n)throw new Error("Cannot find existingFn");this._stack.splice(n,0,t)},o.Pipeline.prototype.remove=function(e){var t=this._stack.indexOf(e);-1!=t&&this._stack.splice(t,1)},o.Pipeline.prototype.run=function(e){for(var t=this._stack.length,n=0;n1&&(oe&&(n=i),o!=e);)r=n-t,i=t+Math.floor(r/2),o=this.elements[2*i];return o==e?2*i:o>e?2*i:os?u+=2:a==s&&(t+=n[c+1]*r[u+1],c+=2,u+=2);return t},o.Vector.prototype.similarity=function(e){return this.dot(e)/(this.magnitude()*e.magnitude())},o.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),t=1,n=0;t0){var a,s=i.str.charAt(0);s in i.node.edges?a=i.node.edges[s]:(a=new o.TokenSet,i.node.edges[s]=a),1==i.str.length?a.final=!0:r.push({node:a,editsRemaining:i.editsRemaining,str:i.str.slice(1)})}if(i.editsRemaining>0&&i.str.length>1){var c,s=i.str.charAt(1);s in i.node.edges?c=i.node.edges[s]:(c=new o.TokenSet,i.node.edges[s]=c),i.str.length<=2?c.final=!0:r.push({node:c,editsRemaining:i.editsRemaining-1,str:i.str.slice(2)})}if(i.editsRemaining>0&&1==i.str.length&&(i.node.final=!0),i.editsRemaining>0&&i.str.length>=1){if("*"in i.node.edges)var u=i.node.edges["*"];else{var u=new o.TokenSet;i.node.edges["*"]=u}1==i.str.length?u.final=!0:r.push({node:u,editsRemaining:i.editsRemaining-1,str:i.str.slice(1)})}if(i.editsRemaining>0){if("*"in i.node.edges)var l=i.node.edges["*"];else{var l=new o.TokenSet;i.node.edges["*"]=l}0==i.str.length?l.final=!0:r.push({node:l,editsRemaining:i.editsRemaining-1,str:i.str})}if(i.editsRemaining>0&&i.str.length>1){var d,f=i.str.charAt(0),h=i.str.charAt(1);h in i.node.edges?d=i.node.edges[h]:(d=new o.TokenSet,i.node.edges[h]=d),1==i.str.length?d.final=!0:r.push({node:d,editsRemaining:i.editsRemaining-1,str:f+i.str.slice(2)})}}return n},o.TokenSet.fromString=function(e){for(var t=new o.TokenSet,n=t,r=!1,i=0,a=e.length;i=e;t--){var n=this.uncheckedNodes[t],r=n.child.toString();r in this.minimizedNodes?n.parent.edges[n.char]=this.minimizedNodes[r]:(n.child._str=r,this.minimizedNodes[r]=n.child),this.uncheckedNodes.pop()}},o.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},o.Index.prototype.search=function(e){return this.query(function(t){new o.QueryParser(e,t).parse()})},o.Index.prototype.query=function(e){var t=new o.Query(this.fields),n=Object.create(null),r=Object.create(null),i=Object.create(null);e.call(t,t);for(var a=0;a1?1:e},o.Builder.prototype.k1=function(e){this._k1=e},o.Builder.prototype.add=function(e){var t=e[this._ref];this.documentCount+=1;for(var n=0;n=this.length)return o.QueryLexer.EOS;var e=this.str.charAt(this.pos);return this.pos+=1,e},o.QueryLexer.prototype.width=function(){return this.pos-this.start},o.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},o.QueryLexer.prototype.backup=function(){this.pos-=1},o.QueryLexer.prototype.acceptDigitRun=function(){var e,t;do{e=this.next(),t=e.charCodeAt(0)}while(t>47&&t<58);e!=o.QueryLexer.EOS&&this.backup()},o.QueryLexer.prototype.more=function(){return this.pos1&&(e.backup(),e.emit(o.QueryLexer.TERM)),e.ignore(),e.more())return o.QueryLexer.lexText},o.QueryLexer.lexEditDistance=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(o.QueryLexer.EDIT_DISTANCE),o.QueryLexer.lexText},o.QueryLexer.lexBoost=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(o.QueryLexer.BOOST),o.QueryLexer.lexText},o.QueryLexer.lexEOS=function(e){e.width()>0&&e.emit(o.QueryLexer.TERM)},o.QueryLexer.termSeparator=o.tokenizer.separator,o.QueryLexer.lexText=function(e){for(;;){var t=e.next();if(t==o.QueryLexer.EOS)return o.QueryLexer.lexEOS;if(92!=t.charCodeAt(0)){if(":"==t)return o.QueryLexer.lexField;if("~"==t)return e.backup(),e.width()>0&&e.emit(o.QueryLexer.TERM),o.QueryLexer.lexEditDistance;if("^"==t)return e.backup(),e.width()>0&&e.emit(o.QueryLexer.TERM),o.QueryLexer.lexBoost;if(t.match(o.QueryLexer.termSeparator))return o.QueryLexer.lexTerm}else e.escapeCharacter()}},o.QueryParser=function(e,t){this.lexer=new o.QueryLexer(e),this.query=t,this.currentClause={},this.lexemeIdx=0},o.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var e=o.QueryParser.parseFieldOrTerm;e;)e=e(this);return this.query},o.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},o.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},o.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},o.QueryParser.parseFieldOrTerm=function(e){var t=e.peekLexeme();if(void 0!=t)switch(t.type){case o.QueryLexer.FIELD:return o.QueryParser.parseField;case o.QueryLexer.TERM:return o.QueryParser.parseTerm;default:var n="expected either a field or a term, found "+t.type;throw t.str.length>=1&&(n+=" with value '"+t.str+"'"),new o.QueryParseError(n,t.start,t.end)}},o.QueryParser.parseField=function(e){var t=e.consumeLexeme();if(void 0!=t){if(-1==e.query.allFields.indexOf(t.str)){var n=e.query.allFields.map(function(e){return"'"+e+"'"}).join(", "),r="unrecognised field '"+t.str+"', possible fields: "+n;throw new o.QueryParseError(r,t.start,t.end)}e.currentClause.fields=[t.str];var i=e.peekLexeme();if(void 0==i){var r="expecting term, found nothing";throw new o.QueryParseError(r,t.start,t.end)}switch(i.type){case o.QueryLexer.TERM:return o.QueryParser.parseTerm;default:var r="expecting term, found '"+i.type+"'";throw new o.QueryParseError(r,i.start,i.end)}}},o.QueryParser.parseTerm=function(e){var t=e.consumeLexeme();if(void 0!=t){e.currentClause.term=t.str.toLowerCase(),-1!=t.str.indexOf("*")&&(e.currentClause.usePipeline=!1);var n=e.peekLexeme();if(void 0==n)return void e.nextClause();switch(n.type){case o.QueryLexer.TERM:return e.nextClause(),o.QueryParser.parseTerm;case o.QueryLexer.FIELD:return e.nextClause(),o.QueryParser.parseField;case o.QueryLexer.EDIT_DISTANCE:return o.QueryParser.parseEditDistance;case o.QueryLexer.BOOST:return o.QueryParser.parseBoost;default:var r="Unexpected lexeme type '"+n.type+"'";throw new o.QueryParseError(r,n.start,n.end)}}},o.QueryParser.parseEditDistance=function(e){var t=e.consumeLexeme();if(void 0!=t){var n=parseInt(t.str,10);if(isNaN(n)){var r="edit distance must be numeric";throw new o.QueryParseError(r,t.start,t.end)}e.currentClause.editDistance=n;var i=e.peekLexeme();if(void 0==i)return void e.nextClause();switch(i.type){case o.QueryLexer.TERM:return e.nextClause(),o.QueryParser.parseTerm;case o.QueryLexer.FIELD:return e.nextClause(),o.QueryParser.parseField;case o.QueryLexer.EDIT_DISTANCE:return o.QueryParser.parseEditDistance;case o.QueryLexer.BOOST:return o.QueryParser.parseBoost;default:var r="Unexpected lexeme type '"+i.type+"'";throw new o.QueryParseError(r,i.start,i.end)}}},o.QueryParser.parseBoost=function(e){var t=e.consumeLexeme();if(void 0!=t){var n=parseInt(t.str,10);if(isNaN(n)){var r="boost must be numeric";throw new o.QueryParseError(r,t.start,t.end)}e.currentClause.boost=n;var i=e.peekLexeme();if(void 0==i)return void e.nextClause();switch(i.type){case o.QueryLexer.TERM:return e.nextClause(),o.QueryParser.parseTerm;case o.QueryLexer.FIELD:return e.nextClause(),o.QueryParser.parseField;case o.QueryLexer.EDIT_DISTANCE:return o.QueryParser.parseEditDistance;case o.QueryLexer.BOOST:return o.QueryParser.parseBoost;default:var r="Unexpected lexeme type '"+i.type+"'";throw new o.QueryParseError(r,i.start,i.end)}}},function(o,a){r=a,void 0!==(i="function"==typeof r?r.call(t,n,t,e):r)&&(e.exports=i)}(0,function(){return o})}()},function(e,t,n){"use strict";t.__esModule=!0;var r=n(38),i=function(e){return e&&e.__esModule?e:{default:e}}(r);t.default={Position:i.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=function(){function e(t,n){r(this,e);var i="string"==typeof t?document.querySelector(t):t;if(!(i instanceof HTMLElement&&i.parentNode instanceof HTMLElement))throw new ReferenceError;if(this.el_=i,this.parent_=i.parentNode,!((i="string"==typeof n?document.querySelector(n):n)instanceof HTMLElement))throw new ReferenceError;this.header_=i,this.height_=0,this.pad_="fixed"===window.getComputedStyle(this.header_).position}return e.prototype.setup=function(){var e=Array.prototype.reduce.call(this.parent_.children,function(e,t){return Math.max(e,t.offsetTop)},0);this.offset_=e-(this.pad_?this.header_.offsetHeight:0),this.update()},e.prototype.update=function(e){var t=window.pageYOffset,n=window.innerHeight;e&&"resize"===e.type&&this.setup();var r={top:this.pad_?this.header_.offsetHeight:0,bottom:this.parent_.offsetTop+this.parent_.offsetHeight},i=n-r.top-Math.max(0,this.offset_-t)-Math.max(0,t+n-r.bottom);i!==this.height_&&(this.el_.style.height=(this.height_=i)+"px"),t>=this.offset_?"lock"!==this.el_.dataset.mdState&&(this.el_.dataset.mdState="lock"):"lock"===this.el_.dataset.mdState&&(this.el_.dataset.mdState="")},e.prototype.reset=function(){this.el_.dataset.mdState="",this.el_.style.height="",this.height_=0},e}();t.default=i},function(e,t,n){"use strict";function r(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var i=n(40),o=r(i),a=n(44),s=r(a);t.default={Adapter:o.default,Repository:s.default}},function(e,t,n){"use strict";t.__esModule=!0;var r=n(41),i=function(e){return e&&e.__esModule?e:{default:e}}(r);t.default={GitHub:i.default}},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function i(e,t){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!t||"object"!=typeof t&&"function"!=typeof t?e:t}function o(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function, not "+typeof t);e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),t&&(Object.setPrototypeOf?Object.setPrototypeOf(e,t):e.__proto__=t)}t.__esModule=!0;var a=n(42),s=function(e){return e&&e.__esModule?e:{default:e}}(a),c=function(e){function t(n){r(this,t);var o=i(this,e.call(this,n)),a=/^.+github\.com\/([^\/]+)\/?([^\/]+)?.*$/.exec(o.base_);if(a&&3===a.length){var s=a[1],c=a[2];o.base_="https://api.github.com/users/"+s+"/repos",o.name_=c}return o}return o(t,e),t.prototype.fetch_=function(){var e=this;return function t(){var n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0;return fetch(e.base_+"?per_page=30&page="+n).then(function(e){return e.json()}).then(function(r){if(!(r instanceof Array))throw new TypeError;if(e.name_){var i=r.find(function(t){return t.name===e.name_});return i||30!==r.length?i?[e.format_(i.stargazers_count)+" Stars",e.format_(i.forks_count)+" Forks"]:[]:t(n+1)}return[r.length+" Repositories"]})}()},t}(s.default);t.default=c},function(e,t,n){"use strict";function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}t.__esModule=!0;var i=n(43),o=function(e){return e&&e.__esModule?e:{default:e}}(i),a=function(){function e(t){r(this,e);var n="string"==typeof t?document.querySelector(t):t;if(!(n instanceof HTMLAnchorElement))throw new ReferenceError;this.el_=n,this.base_=this.el_.href,this.salt_=this.hash_(this.base_)}return e.prototype.fetch=function(){var e=this;return new Promise(function(t){var n=o.default.getJSON(e.salt_+".cache-source");void 0!==n?t(n):e.fetch_().then(function(n){o.default.set(e.salt_+".cache-source",n,{expires:1/96}),t(n)})})},e.prototype.fetch_=function(){throw new Error("fetch_(): Not implemented")},e.prototype.format_=function(e){return e>1e4?(e/1e3).toFixed(0)+"k":e>1e3?(e/1e3).toFixed(1)+"k":""+e},e.prototype.hash_=function(e){var t=0;if(0===e.length)return t;for(var n=0,r=e.length;n1){if(o=e({path:"/"},r.defaults,o),"number"==typeof o.expires){var s=new Date;s.setMilliseconds(s.getMilliseconds()+864e5*o.expires),o.expires=s}o.expires=o.expires?o.expires.toUTCString():"";try{a=JSON.stringify(i),/^[\{\[]/.test(a)&&(i=a)}catch(e){}i=n.write?n.write(i,t):encodeURIComponent(String(i)).replace(/%(23|24|26|2B|3A|3C|3E|3D|2F|3F|40|5B|5D|5E|60|7B|7D|7C)/g,decodeURIComponent),t=encodeURIComponent(String(t)),t=t.replace(/%(23|24|26|2B|5E|60|7C)/g,decodeURIComponent),t=t.replace(/[\(\)]/g,escape);var c="";for(var u in o)o[u]&&(c+="; "+u,!0!==o[u]&&(c+="="+o[u]));return document.cookie=t+"="+i+c}t||(a={});for(var l=document.cookie?document.cookie.split("; "):[],d=/(%[0-9A-Z]{2})+/g,f=0;f=this.el_.children[0].offsetTop+-43;e!==this.active_&&(this.el_.dataset.mdState=(this.active_=e)?"hidden":"")},e.prototype.reset=function(){this.el_.dataset.mdState="",this.active_=!1},e}();t.default=i}])); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.da.js b/docs/material/assets/javascripts/lunr/lunr.da.js new file mode 100644 index 00000000..3b07b2c1 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.da.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,i,n;e.da=function(){this.pipeline.reset(),this.pipeline.add(e.da.trimmer,e.da.stopWordFilter,e.da.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.da.stemmer))},e.da.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.da.trimmer=e.trimmerSupport.generateTrimmer(e.da.wordCharacters),e.Pipeline.registerFunction(e.da.trimmer,"trimmer-da"),e.da.stemmer=(r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,t,s=[new r("hed",-1,1),new r("ethed",0,1),new r("ered",-1,1),new r("e",-1,1),new r("erede",3,1),new r("ende",3,1),new r("erende",5,1),new r("ene",3,1),new r("erne",3,1),new r("ere",3,1),new r("en",-1,1),new r("heden",10,1),new r("eren",10,1),new r("er",-1,1),new r("heder",13,1),new r("erer",13,1),new r("s",-1,2),new r("heds",16,1),new r("es",16,1),new r("endes",18,1),new r("erendes",19,1),new r("enes",18,1),new r("ernes",18,1),new r("eres",18,1),new r("ens",16,1),new r("hedens",24,1),new r("erens",24,1),new r("ers",16,1),new r("ets",16,1),new r("erets",28,1),new r("et",-1,1),new r("eret",30,1)],o=[new r("gd",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1)],a=[new r("ig",-1,1),new r("lig",0,1),new r("elig",1,1),new r("els",-1,1),new r("løst",-1,2)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],u=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16],c=new i;function l(){var e,r=c.limit-c.cursor;c.cursor>=n&&(e=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,c.find_among_b(o,4)?(c.bra=c.cursor,c.limit_backward=e,c.cursor=c.limit-r,c.cursor>c.limit_backward&&(c.cursor--,c.bra=c.cursor,c.slice_del())):c.limit_backward=e)}this.setCurrent=function(e){c.setCurrent(e)},this.getCurrent=function(){return c.getCurrent()},this.stem=function(){var r,i=c.cursor;return function(){var r,i=c.cursor+3;if(n=c.limit,0<=i&&i<=c.limit){for(e=i;;){if(r=c.cursor,c.in_grouping(d,97,248)){c.cursor=r;break}if(c.cursor=r,r>=c.limit)return;c.cursor++}for(;!c.out_grouping(d,97,248);){if(c.cursor>=c.limit)return;c.cursor++}(n=c.cursor)=n&&(r=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,e=c.find_among_b(s,32),c.limit_backward=r,e))switch(c.bra=c.cursor,e){case 1:c.slice_del();break;case 2:c.in_grouping_b(u,97,229)&&c.slice_del()}}(),c.cursor=c.limit,l(),c.cursor=c.limit,function(){var e,r,i,t=c.limit-c.cursor;if(c.ket=c.cursor,c.eq_s_b(2,"st")&&(c.bra=c.cursor,c.eq_s_b(2,"ig")&&c.slice_del()),c.cursor=c.limit-t,c.cursor>=n&&(r=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,e=c.find_among_b(a,5),c.limit_backward=r,e))switch(c.bra=c.cursor,e){case 1:c.slice_del(),i=c.limit-c.cursor,l(),c.cursor=c.limit-i;break;case 2:c.slice_from("løs")}}(),c.cursor=c.limit,c.cursor>=n&&(r=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,c.out_grouping_b(d,97,248)?(c.bra=c.cursor,t=c.slice_to(t),c.limit_backward=r,c.eq_v_b(t)&&c.slice_del()):c.limit_backward=r),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.da.stemmer,"stemmer-da"),e.da.stopWordFilter=e.generateStopWordFilter("ad af alle alt anden at blev blive bliver da de dem den denne der deres det dette dig din disse dog du efter eller en end er et for fra ham han hans har havde have hende hendes her hos hun hvad hvis hvor i ikke ind jeg jer jo kunne man mange med meget men mig min mine mit mod ned noget nogle nu når og også om op os over på selv sig sin sine sit skal skulle som sådan thi til ud under var vi vil ville vor være været".split(" ")),e.Pipeline.registerFunction(e.da.stopWordFilter,"stopWordFilter-da")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.de.js b/docs/material/assets/javascripts/lunr/lunr.de.js new file mode 100644 index 00000000..ebd78f28 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.de.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,n,i;e.de=function(){this.pipeline.reset(),this.pipeline.add(e.de.trimmer,e.de.stopWordFilter,e.de.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.de.stemmer))},e.de.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.de.trimmer=e.trimmerSupport.generateTrimmer(e.de.wordCharacters),e.Pipeline.registerFunction(e.de.trimmer,"trimmer-de"),e.de.stemmer=(r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){var e,i,s,t=[new r("",-1,6),new r("U",0,2),new r("Y",0,1),new r("ä",0,3),new r("ö",0,4),new r("ü",0,5)],o=[new r("e",-1,2),new r("em",-1,1),new r("en",-1,2),new r("ern",-1,1),new r("er",-1,1),new r("s",-1,3),new r("es",5,2)],c=[new r("en",-1,1),new r("er",-1,1),new r("st",-1,2),new r("est",2,1)],u=[new r("ig",-1,1),new r("lich",-1,1)],a=[new r("end",-1,1),new r("ig",-1,2),new r("ung",-1,1),new r("lich",-1,3),new r("isch",-1,2),new r("ik",-1,2),new r("heit",-1,3),new r("keit",-1,4)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32,8],l=[117,30,5],m=[117,30,4],h=new n;function w(e,r,n){return!(!h.eq_s(1,e)||(h.ket=h.cursor,!h.in_grouping(d,97,252)))&&(h.slice_from(r),h.cursor=n,!0)}function f(){for(;!h.in_grouping(d,97,252);){if(h.cursor>=h.limit)return!0;h.cursor++}for(;!h.out_grouping(d,97,252);){if(h.cursor>=h.limit)return!0;h.cursor++}return!1}function b(){return s<=h.cursor}function _(){return i<=h.cursor}this.setCurrent=function(e){h.setCurrent(e)},this.getCurrent=function(){return h.getCurrent()},this.stem=function(){var r=h.cursor;return function(){for(var e,r,n,i,s=h.cursor;;)if(e=h.cursor,h.bra=e,h.eq_s(1,"ß"))h.ket=h.cursor,h.slice_from("ss");else{if(e>=h.limit)break;h.cursor=e+1}for(h.cursor=s;;)for(r=h.cursor;;){if(n=h.cursor,h.in_grouping(d,97,252)){if(i=h.cursor,h.bra=i,w("u","U",n))break;if(h.cursor=i,w("y","Y",n))break}if(n>=h.limit)return void(h.cursor=r);h.cursor=n+1}}(),h.cursor=r,function(){s=h.limit,i=s;var r=h.cursor+3;0<=r&&r<=h.limit&&(e=r,f()||((s=h.cursor)=h.limit)return;h.cursor++}}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}),e.Pipeline.registerFunction(e.de.stemmer,"stemmer-de"),e.de.stopWordFilter=e.generateStopWordFilter("aber alle allem allen aller alles als also am an ander andere anderem anderen anderer anderes anderm andern anderr anders auch auf aus bei bin bis bist da damit dann das dasselbe dazu daß dein deine deinem deinen deiner deines dem demselben den denn denselben der derer derselbe derselben des desselben dessen dich die dies diese dieselbe dieselben diesem diesen dieser dieses dir doch dort du durch ein eine einem einen einer eines einig einige einigem einigen einiger einiges einmal er es etwas euch euer eure eurem euren eurer eures für gegen gewesen hab habe haben hat hatte hatten hier hin hinter ich ihm ihn ihnen ihr ihre ihrem ihren ihrer ihres im in indem ins ist jede jedem jeden jeder jedes jene jenem jenen jener jenes jetzt kann kein keine keinem keinen keiner keines können könnte machen man manche manchem manchen mancher manches mein meine meinem meinen meiner meines mich mir mit muss musste nach nicht nichts noch nun nur ob oder ohne sehr sein seine seinem seinen seiner seines selbst sich sie sind so solche solchem solchen solcher solches soll sollte sondern sonst um und uns unse unsem unsen unser unses unter viel vom von vor war waren warst was weg weil weiter welche welchem welchen welcher welches wenn werde werden wie wieder will wir wird wirst wo wollen wollte während würde würden zu zum zur zwar zwischen über".split(" ")),e.Pipeline.registerFunction(e.de.stopWordFilter,"stopWordFilter-de")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.du.js b/docs/material/assets/javascripts/lunr/lunr.du.js new file mode 100644 index 00000000..375c0e76 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.du.js @@ -0,0 +1 @@ +!function(r,e){"function"==typeof define&&define.amd?define(e):"object"==typeof exports?module.exports=e():e()(r.lunr)}(this,function(){return function(r){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var e,i,n;r.du=function(){this.pipeline.reset(),this.pipeline.add(r.du.trimmer,r.du.stopWordFilter,r.du.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(r.du.stemmer))},r.du.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",r.du.trimmer=r.trimmerSupport.generateTrimmer(r.du.wordCharacters),r.Pipeline.registerFunction(r.du.trimmer,"trimmer-du"),r.du.stemmer=(e=r.stemmerSupport.Among,i=r.stemmerSupport.SnowballProgram,n=new function(){var r,n,o,t=[new e("",-1,6),new e("á",0,1),new e("ä",0,1),new e("é",0,2),new e("ë",0,2),new e("í",0,3),new e("ï",0,3),new e("ó",0,4),new e("ö",0,4),new e("ú",0,5),new e("ü",0,5)],s=[new e("",-1,3),new e("I",0,2),new e("Y",0,1)],u=[new e("dd",-1,-1),new e("kk",-1,-1),new e("tt",-1,-1)],c=[new e("ene",-1,2),new e("se",-1,3),new e("en",-1,2),new e("heden",2,1),new e("s",-1,3)],a=[new e("end",-1,1),new e("ig",-1,2),new e("ing",-1,1),new e("lijk",-1,3),new e("baar",-1,4),new e("bar",-1,5)],l=[new e("aa",-1,-1),new e("ee",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1)],m=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],d=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],f=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],_=new i;function w(r){return _.cursor=r,r>=_.limit||(_.cursor++,!1)}function b(){for(;!_.in_grouping(m,97,232);){if(_.cursor>=_.limit)return!0;_.cursor++}for(;!_.out_grouping(m,97,232);){if(_.cursor>=_.limit)return!0;_.cursor++}return!1}function p(){return n<=_.cursor}function g(){return r<=_.cursor}function h(){var r=_.limit-_.cursor;_.find_among_b(u,3)&&(_.cursor=_.limit-r,_.ket=_.cursor,_.cursor>_.limit_backward&&(_.cursor--,_.bra=_.cursor,_.slice_del()))}function k(){var r;o=!1,_.ket=_.cursor,_.eq_s_b(1,"e")&&(_.bra=_.cursor,p()&&(r=_.limit-_.cursor,_.out_grouping_b(m,97,232)&&(_.cursor=_.limit-r,_.slice_del(),o=!0,h())))}function v(){var r;p()&&(r=_.limit-_.cursor,_.out_grouping_b(m,97,232)&&(_.cursor=_.limit-r,_.eq_s_b(3,"gem")||(_.cursor=_.limit-r,_.slice_del(),h())))}this.setCurrent=function(r){_.setCurrent(r)},this.getCurrent=function(){return _.getCurrent()},this.stem=function(){var e=_.cursor;return function(){for(var r,e,i,n=_.cursor;;){if(_.bra=_.cursor,r=_.find_among(t,11))switch(_.ket=_.cursor,r){case 1:_.slice_from("a");continue;case 2:_.slice_from("e");continue;case 3:_.slice_from("i");continue;case 4:_.slice_from("o");continue;case 5:_.slice_from("u");continue;case 6:if(_.cursor>=_.limit)break;_.cursor++;continue}break}for(_.cursor=n,_.bra=n,_.eq_s(1,"y")?(_.ket=_.cursor,_.slice_from("Y")):_.cursor=n;;)if(e=_.cursor,_.in_grouping(m,97,232)){if(i=_.cursor,_.bra=i,_.eq_s(1,"i"))_.ket=_.cursor,_.in_grouping(m,97,232)&&(_.slice_from("I"),_.cursor=e);else if(_.cursor=i,_.eq_s(1,"y"))_.ket=_.cursor,_.slice_from("Y"),_.cursor=e;else if(w(e))break}else if(w(e))break}(),_.cursor=e,n=_.limit,r=n,b()||((n=_.cursor)<3&&(n=3),b()||(r=_.cursor)),_.limit_backward=e,_.cursor=_.limit,function(){var r,e,i,n,t,s,u=_.limit-_.cursor;if(_.ket=_.cursor,r=_.find_among_b(c,5))switch(_.bra=_.cursor,r){case 1:p()&&_.slice_from("heid");break;case 2:v();break;case 3:p()&&_.out_grouping_b(f,97,232)&&_.slice_del()}if(_.cursor=_.limit-u,k(),_.cursor=_.limit-u,_.ket=_.cursor,_.eq_s_b(4,"heid")&&(_.bra=_.cursor,g()&&(e=_.limit-_.cursor,_.eq_s_b(1,"c")||(_.cursor=_.limit-e,_.slice_del(),_.ket=_.cursor,_.eq_s_b(2,"en")&&(_.bra=_.cursor,v())))),_.cursor=_.limit-u,_.ket=_.cursor,r=_.find_among_b(a,6))switch(_.bra=_.cursor,r){case 1:if(g()){if(_.slice_del(),i=_.limit-_.cursor,_.ket=_.cursor,_.eq_s_b(2,"ig")&&(_.bra=_.cursor,g()&&(n=_.limit-_.cursor,!_.eq_s_b(1,"e")))){_.cursor=_.limit-n,_.slice_del();break}_.cursor=_.limit-i,h()}break;case 2:g()&&(t=_.limit-_.cursor,_.eq_s_b(1,"e")||(_.cursor=_.limit-t,_.slice_del()));break;case 3:g()&&(_.slice_del(),k());break;case 4:g()&&_.slice_del();break;case 5:g()&&o&&_.slice_del()}_.cursor=_.limit-u,_.out_grouping_b(d,73,232)&&(s=_.limit-_.cursor,_.find_among_b(l,4)&&_.out_grouping_b(m,97,232)&&(_.cursor=_.limit-s,_.ket=_.cursor,_.cursor>_.limit_backward&&(_.cursor--,_.bra=_.cursor,_.slice_del())))}(),_.cursor=_.limit_backward,function(){for(var r;;)if(_.bra=_.cursor,r=_.find_among(s,3))switch(_.ket=_.cursor,r){case 1:_.slice_from("y");break;case 2:_.slice_from("i");break;case 3:if(_.cursor>=_.limit)return;_.cursor++}}(),!0}},function(r){return"function"==typeof r.update?r.update(function(r){return n.setCurrent(r),n.stem(),n.getCurrent()}):(n.setCurrent(r),n.stem(),n.getCurrent())}),r.Pipeline.registerFunction(r.du.stemmer,"stemmer-du"),r.du.stopWordFilter=r.generateStopWordFilter(" aan al alles als altijd andere ben bij daar dan dat de der deze die dit doch doen door dus een eens en er ge geen geweest haar had heb hebben heeft hem het hier hij hoe hun iemand iets ik in is ja je kan kon kunnen maar me meer men met mij mijn moet na naar niet niets nog nu of om omdat onder ons ook op over reeds te tegen toch toen tot u uit uw van veel voor want waren was wat werd wezen wie wil worden wordt zal ze zelf zich zij zijn zo zonder zou".split(" ")),r.Pipeline.registerFunction(r.du.stopWordFilter,"stopWordFilter-du")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.es.js b/docs/material/assets/javascripts/lunr/lunr.es.js new file mode 100644 index 00000000..4cb634f0 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.es.js @@ -0,0 +1 @@ +!function(e,s){"function"==typeof define&&define.amd?define(s):"object"==typeof exports?module.exports=s():s()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var s,r,n;e.es=function(){this.pipeline.reset(),this.pipeline.add(e.es.trimmer,e.es.stopWordFilter,e.es.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.es.stemmer))},e.es.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.es.trimmer=e.trimmerSupport.generateTrimmer(e.es.wordCharacters),e.Pipeline.registerFunction(e.es.trimmer,"trimmer-es"),e.es.stemmer=(s=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,i,a=[new s("",-1,6),new s("á",0,1),new s("é",0,2),new s("í",0,3),new s("ó",0,4),new s("ú",0,5)],t=[new s("la",-1,-1),new s("sela",0,-1),new s("le",-1,-1),new s("me",-1,-1),new s("se",-1,-1),new s("lo",-1,-1),new s("selo",5,-1),new s("las",-1,-1),new s("selas",7,-1),new s("les",-1,-1),new s("los",-1,-1),new s("selos",10,-1),new s("nos",-1,-1)],o=[new s("ando",-1,6),new s("iendo",-1,6),new s("yendo",-1,7),new s("ándo",-1,2),new s("iéndo",-1,1),new s("ar",-1,6),new s("er",-1,6),new s("ir",-1,6),new s("ár",-1,3),new s("ér",-1,4),new s("ír",-1,5)],u=[new s("ic",-1,-1),new s("ad",-1,-1),new s("os",-1,-1),new s("iv",-1,1)],w=[new s("able",-1,1),new s("ible",-1,1),new s("ante",-1,1)],c=[new s("ic",-1,1),new s("abil",-1,1),new s("iv",-1,1)],m=[new s("ica",-1,1),new s("ancia",-1,2),new s("encia",-1,5),new s("adora",-1,2),new s("osa",-1,1),new s("ista",-1,1),new s("iva",-1,9),new s("anza",-1,1),new s("logía",-1,3),new s("idad",-1,8),new s("able",-1,1),new s("ible",-1,1),new s("ante",-1,2),new s("mente",-1,7),new s("amente",13,6),new s("ación",-1,2),new s("ución",-1,4),new s("ico",-1,1),new s("ismo",-1,1),new s("oso",-1,1),new s("amiento",-1,1),new s("imiento",-1,1),new s("ivo",-1,9),new s("ador",-1,2),new s("icas",-1,1),new s("ancias",-1,2),new s("encias",-1,5),new s("adoras",-1,2),new s("osas",-1,1),new s("istas",-1,1),new s("ivas",-1,9),new s("anzas",-1,1),new s("logías",-1,3),new s("idades",-1,8),new s("ables",-1,1),new s("ibles",-1,1),new s("aciones",-1,2),new s("uciones",-1,4),new s("adores",-1,2),new s("antes",-1,2),new s("icos",-1,1),new s("ismos",-1,1),new s("osos",-1,1),new s("amientos",-1,1),new s("imientos",-1,1),new s("ivos",-1,9)],l=[new s("ya",-1,1),new s("ye",-1,1),new s("yan",-1,1),new s("yen",-1,1),new s("yeron",-1,1),new s("yendo",-1,1),new s("yo",-1,1),new s("yas",-1,1),new s("yes",-1,1),new s("yais",-1,1),new s("yamos",-1,1),new s("yó",-1,1)],d=[new s("aba",-1,2),new s("ada",-1,2),new s("ida",-1,2),new s("ara",-1,2),new s("iera",-1,2),new s("ía",-1,2),new s("aría",5,2),new s("ería",5,2),new s("iría",5,2),new s("ad",-1,2),new s("ed",-1,2),new s("id",-1,2),new s("ase",-1,2),new s("iese",-1,2),new s("aste",-1,2),new s("iste",-1,2),new s("an",-1,2),new s("aban",16,2),new s("aran",16,2),new s("ieran",16,2),new s("ían",16,2),new s("arían",20,2),new s("erían",20,2),new s("irían",20,2),new s("en",-1,1),new s("asen",24,2),new s("iesen",24,2),new s("aron",-1,2),new s("ieron",-1,2),new s("arán",-1,2),new s("erán",-1,2),new s("irán",-1,2),new s("ado",-1,2),new s("ido",-1,2),new s("ando",-1,2),new s("iendo",-1,2),new s("ar",-1,2),new s("er",-1,2),new s("ir",-1,2),new s("as",-1,2),new s("abas",39,2),new s("adas",39,2),new s("idas",39,2),new s("aras",39,2),new s("ieras",39,2),new s("ías",39,2),new s("arías",45,2),new s("erías",45,2),new s("irías",45,2),new s("es",-1,1),new s("ases",49,2),new s("ieses",49,2),new s("abais",-1,2),new s("arais",-1,2),new s("ierais",-1,2),new s("íais",-1,2),new s("aríais",55,2),new s("eríais",55,2),new s("iríais",55,2),new s("aseis",-1,2),new s("ieseis",-1,2),new s("asteis",-1,2),new s("isteis",-1,2),new s("áis",-1,2),new s("éis",-1,1),new s("aréis",64,2),new s("eréis",64,2),new s("iréis",64,2),new s("ados",-1,2),new s("idos",-1,2),new s("amos",-1,2),new s("ábamos",70,2),new s("áramos",70,2),new s("iéramos",70,2),new s("íamos",70,2),new s("aríamos",74,2),new s("eríamos",74,2),new s("iríamos",74,2),new s("emos",-1,1),new s("aremos",78,2),new s("eremos",78,2),new s("iremos",78,2),new s("ásemos",78,2),new s("iésemos",78,2),new s("imos",-1,2),new s("arás",-1,2),new s("erás",-1,2),new s("irás",-1,2),new s("ís",-1,2),new s("ará",-1,2),new s("erá",-1,2),new s("irá",-1,2),new s("aré",-1,2),new s("eré",-1,2),new s("iré",-1,2),new s("ió",-1,2)],b=[new s("a",-1,1),new s("e",-1,2),new s("o",-1,1),new s("os",-1,1),new s("á",-1,1),new s("é",-1,2),new s("í",-1,1),new s("ó",-1,1)],f=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,10],_=new r;function h(){if(_.out_grouping(f,97,252)){for(;!_.in_grouping(f,97,252);){if(_.cursor>=_.limit)return!0;_.cursor++}return!1}return!0}function v(){var e,s=_.cursor;if(function(){if(_.in_grouping(f,97,252)){var e=_.cursor;if(h()){if(_.cursor=e,!_.in_grouping(f,97,252))return!0;for(;!_.out_grouping(f,97,252);){if(_.cursor>=_.limit)return!0;_.cursor++}}return!1}return!0}()){if(_.cursor=s,!_.out_grouping(f,97,252))return;if(e=_.cursor,h()){if(_.cursor=e,!_.in_grouping(f,97,252)||_.cursor>=_.limit)return;_.cursor++}}i=_.cursor}function p(){for(;!_.in_grouping(f,97,252);){if(_.cursor>=_.limit)return!1;_.cursor++}for(;!_.out_grouping(f,97,252);){if(_.cursor>=_.limit)return!1;_.cursor++}return!0}function g(){return i<=_.cursor}function k(){return e<=_.cursor}function y(e,s){if(!k())return!0;_.slice_del(),_.ket=_.cursor;var r=_.find_among_b(e,s);return r&&(_.bra=_.cursor,1==r&&k()&&_.slice_del()),!1}function q(e){return!k()||(_.slice_del(),_.ket=_.cursor,_.eq_s_b(2,e)&&(_.bra=_.cursor,k()&&_.slice_del()),!1)}function C(){var e;if(_.ket=_.cursor,e=_.find_among_b(m,46)){switch(_.bra=_.cursor,e){case 1:if(!k())return!1;_.slice_del();break;case 2:if(q("ic"))return!1;break;case 3:if(!k())return!1;_.slice_from("log");break;case 4:if(!k())return!1;_.slice_from("u");break;case 5:if(!k())return!1;_.slice_from("ente");break;case 6:if(!(n<=_.cursor))return!1;_.slice_del(),_.ket=_.cursor,(e=_.find_among_b(u,4))&&(_.bra=_.cursor,k()&&(_.slice_del(),1==e&&(_.ket=_.cursor,_.eq_s_b(2,"at")&&(_.bra=_.cursor,k()&&_.slice_del()))));break;case 7:if(y(w,3))return!1;break;case 8:if(y(c,3))return!1;break;case 9:if(q("at"))return!1}return!0}return!1}this.setCurrent=function(e){_.setCurrent(e)},this.getCurrent=function(){return _.getCurrent()},this.stem=function(){var s,r=_.cursor;return s=_.cursor,i=_.limit,n=i,e=i,v(),_.cursor=s,p()&&(n=_.cursor,p()&&(e=_.cursor)),_.limit_backward=r,_.cursor=_.limit,function(){var e;if(_.ket=_.cursor,_.find_among_b(t,13)&&(_.bra=_.cursor,(e=_.find_among_b(o,11))&&g()))switch(e){case 1:_.bra=_.cursor,_.slice_from("iendo");break;case 2:_.bra=_.cursor,_.slice_from("ando");break;case 3:_.bra=_.cursor,_.slice_from("ar");break;case 4:_.bra=_.cursor,_.slice_from("er");break;case 5:_.bra=_.cursor,_.slice_from("ir");break;case 6:_.slice_del();break;case 7:_.eq_s_b(1,"u")&&_.slice_del()}}(),_.cursor=_.limit,C()||(_.cursor=_.limit,function(){var e,s;if(_.cursor>=i&&(s=_.limit_backward,_.limit_backward=i,_.ket=_.cursor,e=_.find_among_b(l,12),_.limit_backward=s,e)){if(_.bra=_.cursor,1==e){if(!_.eq_s_b(1,"u"))return!1;_.slice_del()}return!0}return!1}()||(_.cursor=_.limit,function(){var e,s,r,n;if(_.cursor>=i&&(s=_.limit_backward,_.limit_backward=i,_.ket=_.cursor,e=_.find_among_b(d,96),_.limit_backward=s,e))switch(_.bra=_.cursor,e){case 1:r=_.limit-_.cursor,_.eq_s_b(1,"u")?(n=_.limit-_.cursor,_.eq_s_b(1,"g")?_.cursor=_.limit-n:_.cursor=_.limit-r):_.cursor=_.limit-r,_.bra=_.cursor;case 2:_.slice_del()}}())),_.cursor=_.limit,function(){var e,s;if(_.ket=_.cursor,e=_.find_among_b(b,8))switch(_.bra=_.cursor,e){case 1:g()&&_.slice_del();break;case 2:g()&&(_.slice_del(),_.ket=_.cursor,_.eq_s_b(1,"u")&&(_.bra=_.cursor,s=_.limit-_.cursor,_.eq_s_b(1,"g")&&(_.cursor=_.limit-s,g()&&_.slice_del())))}}(),_.cursor=_.limit_backward,function(){for(var e;;){if(_.bra=_.cursor,e=_.find_among(a,6))switch(_.ket=_.cursor,e){case 1:_.slice_from("a");continue;case 2:_.slice_from("e");continue;case 3:_.slice_from("i");continue;case 4:_.slice_from("o");continue;case 5:_.slice_from("u");continue;case 6:if(_.cursor>=_.limit)break;_.cursor++;continue}break}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.es.stemmer,"stemmer-es"),e.es.stopWordFilter=e.generateStopWordFilter("a al algo algunas algunos ante antes como con contra cual cuando de del desde donde durante e el ella ellas ellos en entre era erais eran eras eres es esa esas ese eso esos esta estaba estabais estaban estabas estad estada estadas estado estados estamos estando estar estaremos estará estarán estarás estaré estaréis estaría estaríais estaríamos estarían estarías estas este estemos esto estos estoy estuve estuviera estuvierais estuvieran estuvieras estuvieron estuviese estuvieseis estuviesen estuvieses estuvimos estuviste estuvisteis estuviéramos estuviésemos estuvo está estábamos estáis están estás esté estéis estén estés fue fuera fuerais fueran fueras fueron fuese fueseis fuesen fueses fui fuimos fuiste fuisteis fuéramos fuésemos ha habida habidas habido habidos habiendo habremos habrá habrán habrás habré habréis habría habríais habríamos habrían habrías habéis había habíais habíamos habían habías han has hasta hay haya hayamos hayan hayas hayáis he hemos hube hubiera hubierais hubieran hubieras hubieron hubiese hubieseis hubiesen hubieses hubimos hubiste hubisteis hubiéramos hubiésemos hubo la las le les lo los me mi mis mucho muchos muy más mí mía mías mío míos nada ni no nos nosotras nosotros nuestra nuestras nuestro nuestros o os otra otras otro otros para pero poco por porque que quien quienes qué se sea seamos sean seas seremos será serán serás seré seréis sería seríais seríamos serían serías seáis sido siendo sin sobre sois somos son soy su sus suya suyas suyo suyos sí también tanto te tendremos tendrá tendrán tendrás tendré tendréis tendría tendríais tendríamos tendrían tendrías tened tenemos tenga tengamos tengan tengas tengo tengáis tenida tenidas tenido tenidos teniendo tenéis tenía teníais teníamos tenían tenías ti tiene tienen tienes todo todos tu tus tuve tuviera tuvierais tuvieran tuvieras tuvieron tuviese tuvieseis tuviesen tuvieses tuvimos tuviste tuvisteis tuviéramos tuviésemos tuvo tuya tuyas tuyo tuyos tú un una uno unos vosotras vosotros vuestra vuestras vuestro vuestros y ya yo él éramos".split(" ")),e.Pipeline.registerFunction(e.es.stopWordFilter,"stopWordFilter-es")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.fi.js b/docs/material/assets/javascripts/lunr/lunr.fi.js new file mode 100644 index 00000000..0200b1fc --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.fi.js @@ -0,0 +1 @@ +!function(i,e){"function"==typeof define&&define.amd?define(e):"object"==typeof exports?module.exports=e():e()(i.lunr)}(this,function(){return function(i){if(void 0===i)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===i.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var e,r,n;i.fi=function(){this.pipeline.reset(),this.pipeline.add(i.fi.trimmer,i.fi.stopWordFilter,i.fi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(i.fi.stemmer))},i.fi.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",i.fi.trimmer=i.trimmerSupport.generateTrimmer(i.fi.wordCharacters),i.Pipeline.registerFunction(i.fi.trimmer,"trimmer-fi"),i.fi.stemmer=(e=i.stemmerSupport.Among,r=i.stemmerSupport.SnowballProgram,n=new function(){var i,n,t,s,l=[new e("pa",-1,1),new e("sti",-1,2),new e("kaan",-1,1),new e("han",-1,1),new e("kin",-1,1),new e("hän",-1,1),new e("kään",-1,1),new e("ko",-1,1),new e("pä",-1,1),new e("kö",-1,1)],o=[new e("lla",-1,-1),new e("na",-1,-1),new e("ssa",-1,-1),new e("ta",-1,-1),new e("lta",3,-1),new e("sta",3,-1)],a=[new e("llä",-1,-1),new e("nä",-1,-1),new e("ssä",-1,-1),new e("tä",-1,-1),new e("ltä",3,-1),new e("stä",3,-1)],u=[new e("lle",-1,-1),new e("ine",-1,-1)],c=[new e("nsa",-1,3),new e("mme",-1,3),new e("nne",-1,3),new e("ni",-1,2),new e("si",-1,1),new e("an",-1,4),new e("en",-1,6),new e("än",-1,5),new e("nsä",-1,3)],m=[new e("aa",-1,-1),new e("ee",-1,-1),new e("ii",-1,-1),new e("oo",-1,-1),new e("uu",-1,-1),new e("ää",-1,-1),new e("öö",-1,-1)],w=[new e("a",-1,8),new e("lla",0,-1),new e("na",0,-1),new e("ssa",0,-1),new e("ta",0,-1),new e("lta",4,-1),new e("sta",4,-1),new e("tta",4,9),new e("lle",-1,-1),new e("ine",-1,-1),new e("ksi",-1,-1),new e("n",-1,7),new e("han",11,1),new e("den",11,-1,C),new e("seen",11,-1,v),new e("hen",11,2),new e("tten",11,-1,C),new e("hin",11,3),new e("siin",11,-1,C),new e("hon",11,4),new e("hän",11,5),new e("hön",11,6),new e("ä",-1,8),new e("llä",22,-1),new e("nä",22,-1),new e("ssä",22,-1),new e("tä",22,-1),new e("ltä",26,-1),new e("stä",26,-1),new e("ttä",26,9)],_=[new e("eja",-1,-1),new e("mma",-1,1),new e("imma",1,-1),new e("mpa",-1,1),new e("impa",3,-1),new e("mmi",-1,1),new e("immi",5,-1),new e("mpi",-1,1),new e("impi",7,-1),new e("ejä",-1,-1),new e("mmä",-1,1),new e("immä",10,-1),new e("mpä",-1,1),new e("impä",12,-1)],k=[new e("i",-1,-1),new e("j",-1,-1)],b=[new e("mma",-1,1),new e("imma",0,-1)],d=[17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8],f=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],h=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],p=[17,97,24,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32],g=new r;function j(){for(var i;i=g.cursor,!g.in_grouping(f,97,246);){if(g.cursor=i,i>=g.limit)return!0;g.cursor++}for(g.cursor=i;!g.out_grouping(f,97,246);){if(g.cursor>=g.limit)return!0;g.cursor++}return!1}function q(){var i,e;if(g.cursor>=s)if(e=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,i=g.find_among_b(l,10)){switch(g.bra=g.cursor,g.limit_backward=e,i){case 1:if(!g.in_grouping_b(p,97,246))return;break;case 2:if(!(t<=g.cursor))return}g.slice_del()}else g.limit_backward=e}function v(){return g.find_among_b(m,7)}function C(){return g.eq_s_b(1,"i")&&g.in_grouping_b(h,97,246)}this.setCurrent=function(i){g.setCurrent(i)},this.getCurrent=function(){return g.getCurrent()},this.stem=function(){var e,r=g.cursor;return s=g.limit,t=s,j()||(s=g.cursor,j()||(t=g.cursor)),i=!1,g.limit_backward=r,g.cursor=g.limit,q(),g.cursor=g.limit,function(){var i,e,r;if(g.cursor>=s)if(e=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,i=g.find_among_b(c,9))switch(g.bra=g.cursor,g.limit_backward=e,i){case 1:r=g.limit-g.cursor,g.eq_s_b(1,"k")||(g.cursor=g.limit-r,g.slice_del());break;case 2:g.slice_del(),g.ket=g.cursor,g.eq_s_b(3,"kse")&&(g.bra=g.cursor,g.slice_from("ksi"));break;case 3:g.slice_del();break;case 4:g.find_among_b(o,6)&&g.slice_del();break;case 5:g.find_among_b(a,6)&&g.slice_del();break;case 6:g.find_among_b(u,2)&&g.slice_del()}else g.limit_backward=e}(),g.cursor=g.limit,function(){var e,r,n;if(g.cursor>=s)if(r=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,e=g.find_among_b(w,30)){switch(g.bra=g.cursor,g.limit_backward=r,e){case 1:if(!g.eq_s_b(1,"a"))return;break;case 2:case 9:if(!g.eq_s_b(1,"e"))return;break;case 3:if(!g.eq_s_b(1,"i"))return;break;case 4:if(!g.eq_s_b(1,"o"))return;break;case 5:if(!g.eq_s_b(1,"ä"))return;break;case 6:if(!g.eq_s_b(1,"ö"))return;break;case 7:if(n=g.limit-g.cursor,!v()&&(g.cursor=g.limit-n,!g.eq_s_b(2,"ie"))){g.cursor=g.limit-n;break}if(g.cursor=g.limit-n,g.cursor<=g.limit_backward){g.cursor=g.limit-n;break}g.cursor--,g.bra=g.cursor;break;case 8:if(!g.in_grouping_b(f,97,246)||!g.out_grouping_b(f,97,246))return}g.slice_del(),i=!0}else g.limit_backward=r}(),g.cursor=g.limit,function(){var i,e,r;if(g.cursor>=t)if(e=g.limit_backward,g.limit_backward=t,g.ket=g.cursor,i=g.find_among_b(_,14)){if(g.bra=g.cursor,g.limit_backward=e,1==i){if(r=g.limit-g.cursor,g.eq_s_b(2,"po"))return;g.cursor=g.limit-r}g.slice_del()}else g.limit_backward=e}(),g.cursor=g.limit,i?(g.cursor>=s&&(e=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,g.find_among_b(k,2)?(g.bra=g.cursor,g.limit_backward=e,g.slice_del()):g.limit_backward=e),g.cursor=g.limit):(g.cursor=g.limit,function(){var i,e,r,n,l,o;if(g.cursor>=s){if(e=g.limit_backward,g.limit_backward=s,g.ket=g.cursor,g.eq_s_b(1,"t")&&(g.bra=g.cursor,r=g.limit-g.cursor,g.in_grouping_b(f,97,246)&&(g.cursor=g.limit-r,g.slice_del(),g.limit_backward=e,n=g.limit-g.cursor,g.cursor>=t&&(g.cursor=t,l=g.limit_backward,g.limit_backward=g.cursor,g.cursor=g.limit-n,g.ket=g.cursor,i=g.find_among_b(b,2))))){if(g.bra=g.cursor,g.limit_backward=l,1==i){if(o=g.limit-g.cursor,g.eq_s_b(2,"po"))return;g.cursor=g.limit-o}return void g.slice_del()}g.limit_backward=e}}(),g.cursor=g.limit),function(){var i,e,r,t;if(g.cursor>=s){for(i=g.limit_backward,g.limit_backward=s,e=g.limit-g.cursor,v()&&(g.cursor=g.limit-e,g.ket=g.cursor,g.cursor>g.limit_backward&&(g.cursor--,g.bra=g.cursor,g.slice_del())),g.cursor=g.limit-e,g.ket=g.cursor,g.in_grouping_b(d,97,228)&&(g.bra=g.cursor,g.out_grouping_b(f,97,246)&&g.slice_del()),g.cursor=g.limit-e,g.ket=g.cursor,g.eq_s_b(1,"j")&&(g.bra=g.cursor,r=g.limit-g.cursor,g.eq_s_b(1,"o")?g.slice_del():(g.cursor=g.limit-r,g.eq_s_b(1,"u")&&g.slice_del())),g.cursor=g.limit-e,g.ket=g.cursor,g.eq_s_b(1,"o")&&(g.bra=g.cursor,g.eq_s_b(1,"j")&&g.slice_del()),g.cursor=g.limit-e,g.limit_backward=i;;){if(t=g.limit-g.cursor,g.out_grouping_b(f,97,246)){g.cursor=g.limit-t;break}if(g.cursor=g.limit-t,g.cursor<=g.limit_backward)return;g.cursor--}g.ket=g.cursor,g.cursor>g.limit_backward&&(g.cursor--,g.bra=g.cursor,n=g.slice_to(),g.eq_v_b(n)&&g.slice_del())}}(),!0}},function(i){return"function"==typeof i.update?i.update(function(i){return n.setCurrent(i),n.stem(),n.getCurrent()}):(n.setCurrent(i),n.stem(),n.getCurrent())}),i.Pipeline.registerFunction(i.fi.stemmer,"stemmer-fi"),i.fi.stopWordFilter=i.generateStopWordFilter("ei eivät emme en et ette että he heidän heidät heihin heille heillä heiltä heissä heistä heitä hän häneen hänelle hänellä häneltä hänen hänessä hänestä hänet häntä itse ja johon joiden joihin joiksi joilla joille joilta joina joissa joista joita joka joksi jolla jolle jolta jona jonka jos jossa josta jota jotka kanssa keiden keihin keiksi keille keillä keiltä keinä keissä keistä keitä keneen keneksi kenelle kenellä keneltä kenen kenenä kenessä kenestä kenet ketkä ketkä ketä koska kuin kuka kun me meidän meidät meihin meille meillä meiltä meissä meistä meitä mihin miksi mikä mille millä miltä minkä minkä minua minulla minulle minulta minun minussa minusta minut minuun minä minä missä mistä mitkä mitä mukaan mutta ne niiden niihin niiksi niille niillä niiltä niin niin niinä niissä niistä niitä noiden noihin noiksi noilla noille noilta noin noina noissa noista noita nuo nyt näiden näihin näiksi näille näillä näiltä näinä näissä näistä näitä nämä ole olemme olen olet olette oli olimme olin olisi olisimme olisin olisit olisitte olisivat olit olitte olivat olla olleet ollut on ovat poikki se sekä sen siihen siinä siitä siksi sille sillä sillä siltä sinua sinulla sinulle sinulta sinun sinussa sinusta sinut sinuun sinä sinä sitä tai te teidän teidät teihin teille teillä teiltä teissä teistä teitä tuo tuohon tuoksi tuolla tuolle tuolta tuon tuona tuossa tuosta tuota tähän täksi tälle tällä tältä tämä tämän tänä tässä tästä tätä vaan vai vaikka yli".split(" ")),i.Pipeline.registerFunction(i.fi.stopWordFilter,"stopWordFilter-fi")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.fr.js b/docs/material/assets/javascripts/lunr/lunr.fr.js new file mode 100644 index 00000000..3a9b9b17 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.fr.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,s,i;e.fr=function(){this.pipeline.reset(),this.pipeline.add(e.fr.trimmer,e.fr.stopWordFilter,e.fr.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.fr.stemmer))},e.fr.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.fr.trimmer=e.trimmerSupport.generateTrimmer(e.fr.wordCharacters),e.Pipeline.registerFunction(e.fr.trimmer,"trimmer-fr"),e.fr.stemmer=(r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,i=new function(){var e,i,n,t=[new r("col",-1,-1),new r("par",-1,-1),new r("tap",-1,-1)],u=[new r("",-1,4),new r("I",0,1),new r("U",0,2),new r("Y",0,3)],o=[new r("iqU",-1,3),new r("abl",-1,3),new r("Ièr",-1,4),new r("ièr",-1,4),new r("eus",-1,2),new r("iv",-1,1)],c=[new r("ic",-1,2),new r("abil",-1,1),new r("iv",-1,3)],a=[new r("iqUe",-1,1),new r("atrice",-1,2),new r("ance",-1,1),new r("ence",-1,5),new r("logie",-1,3),new r("able",-1,1),new r("isme",-1,1),new r("euse",-1,11),new r("iste",-1,1),new r("ive",-1,8),new r("if",-1,8),new r("usion",-1,4),new r("ation",-1,2),new r("ution",-1,4),new r("ateur",-1,2),new r("iqUes",-1,1),new r("atrices",-1,2),new r("ances",-1,1),new r("ences",-1,5),new r("logies",-1,3),new r("ables",-1,1),new r("ismes",-1,1),new r("euses",-1,11),new r("istes",-1,1),new r("ives",-1,8),new r("ifs",-1,8),new r("usions",-1,4),new r("ations",-1,2),new r("utions",-1,4),new r("ateurs",-1,2),new r("ments",-1,15),new r("ements",30,6),new r("issements",31,12),new r("ités",-1,7),new r("ment",-1,15),new r("ement",34,6),new r("issement",35,12),new r("amment",34,13),new r("emment",34,14),new r("aux",-1,10),new r("eaux",39,9),new r("eux",-1,1),new r("ité",-1,7)],l=[new r("ira",-1,1),new r("ie",-1,1),new r("isse",-1,1),new r("issante",-1,1),new r("i",-1,1),new r("irai",4,1),new r("ir",-1,1),new r("iras",-1,1),new r("ies",-1,1),new r("îmes",-1,1),new r("isses",-1,1),new r("issantes",-1,1),new r("îtes",-1,1),new r("is",-1,1),new r("irais",13,1),new r("issais",13,1),new r("irions",-1,1),new r("issions",-1,1),new r("irons",-1,1),new r("issons",-1,1),new r("issants",-1,1),new r("it",-1,1),new r("irait",21,1),new r("issait",21,1),new r("issant",-1,1),new r("iraIent",-1,1),new r("issaIent",-1,1),new r("irent",-1,1),new r("issent",-1,1),new r("iront",-1,1),new r("ît",-1,1),new r("iriez",-1,1),new r("issiez",-1,1),new r("irez",-1,1),new r("issez",-1,1)],w=[new r("a",-1,3),new r("era",0,2),new r("asse",-1,3),new r("ante",-1,3),new r("ée",-1,2),new r("ai",-1,3),new r("erai",5,2),new r("er",-1,2),new r("as",-1,3),new r("eras",8,2),new r("âmes",-1,3),new r("asses",-1,3),new r("antes",-1,3),new r("âtes",-1,3),new r("ées",-1,2),new r("ais",-1,3),new r("erais",15,2),new r("ions",-1,1),new r("erions",17,2),new r("assions",17,3),new r("erons",-1,2),new r("ants",-1,3),new r("és",-1,2),new r("ait",-1,3),new r("erait",23,2),new r("ant",-1,3),new r("aIent",-1,3),new r("eraIent",26,2),new r("èrent",-1,2),new r("assent",-1,3),new r("eront",-1,2),new r("ât",-1,3),new r("ez",-1,2),new r("iez",32,2),new r("eriez",33,2),new r("assiez",33,3),new r("erez",32,2),new r("é",-1,2)],f=[new r("e",-1,3),new r("Ière",0,2),new r("ière",0,2),new r("ion",-1,1),new r("Ier",-1,2),new r("ier",-1,2),new r("ë",-1,4)],m=[new r("ell",-1,-1),new r("eill",-1,-1),new r("enn",-1,-1),new r("onn",-1,-1),new r("ett",-1,-1)],_=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,130,103,8,5],b=[1,65,20,0,0,0,0,0,0,0,0,0,0,0,0,0,128],d=new s;function k(e,r,s){return!(!d.eq_s(1,e)||(d.ket=d.cursor,!d.in_grouping(_,97,251)))&&(d.slice_from(r),d.cursor=s,!0)}function p(e,r,s){return!!d.eq_s(1,e)&&(d.ket=d.cursor,d.slice_from(r),d.cursor=s,!0)}function g(){for(;!d.in_grouping(_,97,251);){if(d.cursor>=d.limit)return!0;d.cursor++}for(;!d.out_grouping(_,97,251);){if(d.cursor>=d.limit)return!0;d.cursor++}return!1}function q(){return n<=d.cursor}function v(){return i<=d.cursor}function h(){return e<=d.cursor}function z(){if(!function(){var e,r;if(d.ket=d.cursor,e=d.find_among_b(a,43)){switch(d.bra=d.cursor,e){case 1:if(!h())return!1;d.slice_del();break;case 2:if(!h())return!1;d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"ic")&&(d.bra=d.cursor,h()?d.slice_del():d.slice_from("iqU"));break;case 3:if(!h())return!1;d.slice_from("log");break;case 4:if(!h())return!1;d.slice_from("u");break;case 5:if(!h())return!1;d.slice_from("ent");break;case 6:if(!q())return!1;if(d.slice_del(),d.ket=d.cursor,e=d.find_among_b(o,6))switch(d.bra=d.cursor,e){case 1:h()&&(d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"at")&&(d.bra=d.cursor,h()&&d.slice_del()));break;case 2:h()?d.slice_del():v()&&d.slice_from("eux");break;case 3:h()&&d.slice_del();break;case 4:q()&&d.slice_from("i")}break;case 7:if(!h())return!1;if(d.slice_del(),d.ket=d.cursor,e=d.find_among_b(c,3))switch(d.bra=d.cursor,e){case 1:h()?d.slice_del():d.slice_from("abl");break;case 2:h()?d.slice_del():d.slice_from("iqU");break;case 3:h()&&d.slice_del()}break;case 8:if(!h())return!1;if(d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"at")&&(d.bra=d.cursor,h()&&(d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"ic")))){d.bra=d.cursor,h()?d.slice_del():d.slice_from("iqU");break}break;case 9:d.slice_from("eau");break;case 10:if(!v())return!1;d.slice_from("al");break;case 11:if(h())d.slice_del();else{if(!v())return!1;d.slice_from("eux")}break;case 12:if(!v()||!d.out_grouping_b(_,97,251))return!1;d.slice_del();break;case 13:return q()&&d.slice_from("ant"),!1;case 14:return q()&&d.slice_from("ent"),!1;case 15:return r=d.limit-d.cursor,d.in_grouping_b(_,97,251)&&q()&&(d.cursor=d.limit-r,d.slice_del()),!1}return!0}return!1}()&&(d.cursor=d.limit,!function(){var e,r;if(d.cursor=n){if(s=d.limit_backward,d.limit_backward=n,d.ket=d.cursor,e=d.find_among_b(f,7))switch(d.bra=d.cursor,e){case 1:if(h()){if(i=d.limit-d.cursor,!d.eq_s_b(1,"s")&&(d.cursor=d.limit-i,!d.eq_s_b(1,"t")))break;d.slice_del()}break;case 2:d.slice_from("i");break;case 3:d.slice_del();break;case 4:d.eq_s_b(2,"gu")&&d.slice_del()}d.limit_backward=s}}();d.cursor=d.limit,d.ket=d.cursor,d.eq_s_b(1,"Y")?(d.bra=d.cursor,d.slice_from("i")):(d.cursor=d.limit,d.eq_s_b(1,"ç")&&(d.bra=d.cursor,d.slice_from("c")))}this.setCurrent=function(e){d.setCurrent(e)},this.getCurrent=function(){return d.getCurrent()},this.stem=function(){var r,s=d.cursor;return function(){for(var e,r;;){if(e=d.cursor,d.in_grouping(_,97,251)){if(d.bra=d.cursor,r=d.cursor,k("u","U",e))continue;if(d.cursor=r,k("i","I",e))continue;if(d.cursor=r,p("y","Y",e))continue}if(d.cursor=e,d.bra=e,!k("y","Y",e)){if(d.cursor=e,d.eq_s(1,"q")&&(d.bra=d.cursor,p("u","U",e)))continue;if(d.cursor=e,e>=d.limit)return;d.cursor++}}}(),d.cursor=s,function(){var r=d.cursor;if(n=d.limit,i=n,e=n,d.in_grouping(_,97,251)&&d.in_grouping(_,97,251)&&d.cursor=d.limit){d.cursor=n;break}d.cursor++}while(!d.in_grouping(_,97,251))}n=d.cursor,d.cursor=r,g()||(i=d.cursor,g()||(e=d.cursor))}(),d.limit_backward=s,d.cursor=d.limit,z(),d.cursor=d.limit,r=d.limit-d.cursor,d.find_among_b(m,5)&&(d.cursor=d.limit-r,d.ket=d.cursor,d.cursor>d.limit_backward&&(d.cursor--,d.bra=d.cursor,d.slice_del())),d.cursor=d.limit,function(){for(var e,r=1;d.out_grouping_b(_,97,251);)r--;if(r<=0){if(d.ket=d.cursor,e=d.limit-d.cursor,!d.eq_s_b(1,"é")&&(d.cursor=d.limit-e,!d.eq_s_b(1,"è")))return;d.bra=d.cursor,d.slice_from("e")}}(),d.cursor=d.limit_backward,function(){for(var e,r;r=d.cursor,d.bra=r,e=d.find_among(u,4);)switch(d.ket=d.cursor,e){case 1:d.slice_from("i");break;case 2:d.slice_from("u");break;case 3:d.slice_from("y");break;case 4:if(d.cursor>=d.limit)return;d.cursor++}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}),e.Pipeline.registerFunction(e.fr.stemmer,"stemmer-fr"),e.fr.stopWordFilter=e.generateStopWordFilter("ai aie aient aies ait as au aura aurai auraient aurais aurait auras aurez auriez aurions aurons auront aux avaient avais avait avec avez aviez avions avons ayant ayez ayons c ce ceci celà ces cet cette d dans de des du elle en es est et eu eue eues eurent eus eusse eussent eusses eussiez eussions eut eux eûmes eût eûtes furent fus fusse fussent fusses fussiez fussions fut fûmes fût fûtes ici il ils j je l la le les leur leurs lui m ma mais me mes moi mon même n ne nos notre nous on ont ou par pas pour qu que quel quelle quelles quels qui s sa sans se sera serai seraient serais serait seras serez seriez serions serons seront ses soi soient sois soit sommes son sont soyez soyons suis sur t ta te tes toi ton tu un une vos votre vous y à étaient étais était étant étiez étions été étée étées étés êtes".split(" ")),e.Pipeline.registerFunction(e.fr.stopWordFilter,"stopWordFilter-fr")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.hu.js b/docs/material/assets/javascripts/lunr/lunr.hu.js new file mode 100644 index 00000000..fa704a69 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.hu.js @@ -0,0 +1 @@ +!function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var n,r,i;e.hu=function(){this.pipeline.reset(),this.pipeline.add(e.hu.trimmer,e.hu.stopWordFilter,e.hu.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hu.stemmer))},e.hu.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.hu.trimmer=e.trimmerSupport.generateTrimmer(e.hu.wordCharacters),e.Pipeline.registerFunction(e.hu.trimmer,"trimmer-hu"),e.hu.stemmer=(n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,i=new function(){var e,i=[new n("cs",-1,-1),new n("dzs",-1,-1),new n("gy",-1,-1),new n("ly",-1,-1),new n("ny",-1,-1),new n("sz",-1,-1),new n("ty",-1,-1),new n("zs",-1,-1)],a=[new n("á",-1,1),new n("é",-1,2)],t=[new n("bb",-1,-1),new n("cc",-1,-1),new n("dd",-1,-1),new n("ff",-1,-1),new n("gg",-1,-1),new n("jj",-1,-1),new n("kk",-1,-1),new n("ll",-1,-1),new n("mm",-1,-1),new n("nn",-1,-1),new n("pp",-1,-1),new n("rr",-1,-1),new n("ccs",-1,-1),new n("ss",-1,-1),new n("zzs",-1,-1),new n("tt",-1,-1),new n("vv",-1,-1),new n("ggy",-1,-1),new n("lly",-1,-1),new n("nny",-1,-1),new n("tty",-1,-1),new n("ssz",-1,-1),new n("zz",-1,-1)],s=[new n("al",-1,1),new n("el",-1,2)],c=[new n("ba",-1,-1),new n("ra",-1,-1),new n("be",-1,-1),new n("re",-1,-1),new n("ig",-1,-1),new n("nak",-1,-1),new n("nek",-1,-1),new n("val",-1,-1),new n("vel",-1,-1),new n("ul",-1,-1),new n("nál",-1,-1),new n("nél",-1,-1),new n("ból",-1,-1),new n("ról",-1,-1),new n("tól",-1,-1),new n("bõl",-1,-1),new n("rõl",-1,-1),new n("tõl",-1,-1),new n("ül",-1,-1),new n("n",-1,-1),new n("an",19,-1),new n("ban",20,-1),new n("en",19,-1),new n("ben",22,-1),new n("képpen",22,-1),new n("on",19,-1),new n("ön",19,-1),new n("képp",-1,-1),new n("kor",-1,-1),new n("t",-1,-1),new n("at",29,-1),new n("et",29,-1),new n("ként",29,-1),new n("anként",32,-1),new n("enként",32,-1),new n("onként",32,-1),new n("ot",29,-1),new n("ért",29,-1),new n("öt",29,-1),new n("hez",-1,-1),new n("hoz",-1,-1),new n("höz",-1,-1),new n("vá",-1,-1),new n("vé",-1,-1)],w=[new n("án",-1,2),new n("én",-1,1),new n("ánként",-1,3)],o=[new n("stul",-1,2),new n("astul",0,1),new n("ástul",0,3),new n("stül",-1,2),new n("estül",3,1),new n("éstül",3,4)],l=[new n("á",-1,1),new n("é",-1,2)],u=[new n("k",-1,7),new n("ak",0,4),new n("ek",0,6),new n("ok",0,5),new n("ák",0,1),new n("ék",0,2),new n("ök",0,3)],m=[new n("éi",-1,7),new n("áéi",0,6),new n("ééi",0,5),new n("é",-1,9),new n("ké",3,4),new n("aké",4,1),new n("eké",4,1),new n("oké",4,1),new n("áké",4,3),new n("éké",4,2),new n("öké",4,1),new n("éé",3,8)],k=[new n("a",-1,18),new n("ja",0,17),new n("d",-1,16),new n("ad",2,13),new n("ed",2,13),new n("od",2,13),new n("ád",2,14),new n("éd",2,15),new n("öd",2,13),new n("e",-1,18),new n("je",9,17),new n("nk",-1,4),new n("unk",11,1),new n("ánk",11,2),new n("énk",11,3),new n("ünk",11,1),new n("uk",-1,8),new n("juk",16,7),new n("ájuk",17,5),new n("ük",-1,8),new n("jük",19,7),new n("éjük",20,6),new n("m",-1,12),new n("am",22,9),new n("em",22,9),new n("om",22,9),new n("ám",22,10),new n("ém",22,11),new n("o",-1,18),new n("á",-1,19),new n("é",-1,20)],f=[new n("id",-1,10),new n("aid",0,9),new n("jaid",1,6),new n("eid",0,9),new n("jeid",3,6),new n("áid",0,7),new n("éid",0,8),new n("i",-1,15),new n("ai",7,14),new n("jai",8,11),new n("ei",7,14),new n("jei",10,11),new n("ái",7,12),new n("éi",7,13),new n("itek",-1,24),new n("eitek",14,21),new n("jeitek",15,20),new n("éitek",14,23),new n("ik",-1,29),new n("aik",18,26),new n("jaik",19,25),new n("eik",18,26),new n("jeik",21,25),new n("áik",18,27),new n("éik",18,28),new n("ink",-1,20),new n("aink",25,17),new n("jaink",26,16),new n("eink",25,17),new n("jeink",28,16),new n("áink",25,18),new n("éink",25,19),new n("aitok",-1,21),new n("jaitok",32,20),new n("áitok",-1,22),new n("im",-1,5),new n("aim",35,4),new n("jaim",36,1),new n("eim",35,4),new n("jeim",38,1),new n("áim",35,2),new n("éim",35,3)],b=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,52,14],d=new r;function g(){return e<=d.cursor}function h(){var e=d.limit-d.cursor;return!!d.find_among_b(t,23)&&(d.cursor=d.limit-e,!0)}function p(){if(d.cursor>d.limit_backward){d.cursor--,d.ket=d.cursor;var e=d.cursor-1;d.limit_backward<=e&&e<=d.limit&&(d.cursor=e,d.bra=e,d.slice_del())}}function _(){d.ket=d.cursor,d.find_among_b(c,44)&&(d.bra=d.cursor,g()&&(d.slice_del(),function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(a,2))&&(d.bra=d.cursor,g()))switch(e){case 1:d.slice_from("a");break;case 2:d.slice_from("e")}}()))}this.setCurrent=function(e){d.setCurrent(e)},this.getCurrent=function(){return d.getCurrent()},this.stem=function(){var n=d.cursor;return function(){var n,r=d.cursor;if(e=d.limit,d.in_grouping(b,97,252))for(;;){if(n=d.cursor,d.out_grouping(b,97,252))return d.cursor=n,d.find_among(i,8)||(d.cursor=n,n=d.limit)return void(e=n);d.cursor++}if(d.cursor=r,d.out_grouping(b,97,252)){for(;!d.in_grouping(b,97,252);){if(d.cursor>=d.limit)return;d.cursor++}e=d.cursor}}(),d.limit_backward=n,d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(s,2))&&(d.bra=d.cursor,g())){if((1==e||2==e)&&!h())return;d.slice_del(),p()}}(),d.cursor=d.limit,_(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(w,3))&&(d.bra=d.cursor,g()))switch(e){case 1:d.slice_from("e");break;case 2:case 3:d.slice_from("a")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(o,6))&&(d.bra=d.cursor,g()))switch(e){case 1:case 2:d.slice_del();break;case 3:d.slice_from("a");break;case 4:d.slice_from("e")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(l,2))&&(d.bra=d.cursor,g())){if((1==e||2==e)&&!h())return;d.slice_del(),p()}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(m,12))&&(d.bra=d.cursor,g()))switch(e){case 1:case 4:case 7:case 9:d.slice_del();break;case 2:case 5:case 8:d.slice_from("e");break;case 3:case 6:d.slice_from("a")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(k,31))&&(d.bra=d.cursor,g()))switch(e){case 1:case 4:case 7:case 8:case 9:case 12:case 13:case 16:case 17:case 18:d.slice_del();break;case 2:case 5:case 10:case 14:case 19:d.slice_from("a");break;case 3:case 6:case 11:case 15:case 20:d.slice_from("e")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(f,42))&&(d.bra=d.cursor,g()))switch(e){case 1:case 4:case 5:case 6:case 9:case 10:case 11:case 14:case 15:case 16:case 17:case 20:case 21:case 24:case 25:case 26:case 29:d.slice_del();break;case 2:case 7:case 12:case 18:case 22:case 27:d.slice_from("a");break;case 3:case 8:case 13:case 19:case 23:case 28:d.slice_from("e")}}(),d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,(e=d.find_among_b(u,7))&&(d.bra=d.cursor,g()))switch(e){case 1:d.slice_from("a");break;case 2:d.slice_from("e");break;case 3:case 4:case 5:case 6:case 7:d.slice_del()}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}),e.Pipeline.registerFunction(e.hu.stemmer,"stemmer-hu"),e.hu.stopWordFilter=e.generateStopWordFilter("a abban ahhoz ahogy ahol aki akik akkor alatt amely amelyek amelyekben amelyeket amelyet amelynek ami amikor amit amolyan amíg annak arra arról az azok azon azonban azt aztán azután azzal azért be belül benne bár cikk cikkek cikkeket csak de e ebben eddig egy egyes egyetlen egyik egyre egyéb egész ehhez ekkor el ellen elsõ elég elõ elõször elõtt emilyen ennek erre ez ezek ezen ezt ezzel ezért fel felé hanem hiszen hogy hogyan igen ill ill. illetve ilyen ilyenkor ismét ison itt jobban jó jól kell kellett keressünk keresztül ki kívül között közül legalább legyen lehet lehetett lenne lenni lesz lett maga magát majd majd meg mellett mely melyek mert mi mikor milyen minden mindenki mindent mindig mint mintha mit mivel miért most már más másik még míg nagy nagyobb nagyon ne nekem neki nem nincs néha néhány nélkül olyan ott pedig persze rá s saját sem semmi sok sokat sokkal szemben szerint szinte számára talán tehát teljes tovább továbbá több ugyanis utolsó után utána vagy vagyis vagyok valaki valami valamint való van vannak vele vissza viszont volna volt voltak voltam voltunk által általában át én éppen és így õ õk õket össze úgy új újabb újra".split(" ")),e.Pipeline.registerFunction(e.hu.stopWordFilter,"stopWordFilter-hu")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.it.js b/docs/material/assets/javascripts/lunr/lunr.it.js new file mode 100644 index 00000000..29307338 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.it.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,i,n;e.it=function(){this.pipeline.reset(),this.pipeline.add(e.it.trimmer,e.it.stopWordFilter,e.it.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.it.stemmer))},e.it.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.it.trimmer=e.trimmerSupport.generateTrimmer(e.it.wordCharacters),e.Pipeline.registerFunction(e.it.trimmer,"trimmer-it"),e.it.stemmer=(r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,o,t=[new r("",-1,7),new r("qu",0,6),new r("á",0,1),new r("é",0,2),new r("í",0,3),new r("ó",0,4),new r("ú",0,5)],s=[new r("",-1,3),new r("I",0,1),new r("U",0,2)],a=[new r("la",-1,-1),new r("cela",0,-1),new r("gliela",0,-1),new r("mela",0,-1),new r("tela",0,-1),new r("vela",0,-1),new r("le",-1,-1),new r("cele",6,-1),new r("gliele",6,-1),new r("mele",6,-1),new r("tele",6,-1),new r("vele",6,-1),new r("ne",-1,-1),new r("cene",12,-1),new r("gliene",12,-1),new r("mene",12,-1),new r("sene",12,-1),new r("tene",12,-1),new r("vene",12,-1),new r("ci",-1,-1),new r("li",-1,-1),new r("celi",20,-1),new r("glieli",20,-1),new r("meli",20,-1),new r("teli",20,-1),new r("veli",20,-1),new r("gli",20,-1),new r("mi",-1,-1),new r("si",-1,-1),new r("ti",-1,-1),new r("vi",-1,-1),new r("lo",-1,-1),new r("celo",31,-1),new r("glielo",31,-1),new r("melo",31,-1),new r("telo",31,-1),new r("velo",31,-1)],u=[new r("ando",-1,1),new r("endo",-1,1),new r("ar",-1,2),new r("er",-1,2),new r("ir",-1,2)],c=[new r("ic",-1,-1),new r("abil",-1,-1),new r("os",-1,-1),new r("iv",-1,1)],w=[new r("ic",-1,1),new r("abil",-1,1),new r("iv",-1,1)],l=[new r("ica",-1,1),new r("logia",-1,3),new r("osa",-1,1),new r("ista",-1,1),new r("iva",-1,9),new r("anza",-1,1),new r("enza",-1,5),new r("ice",-1,1),new r("atrice",7,1),new r("iche",-1,1),new r("logie",-1,3),new r("abile",-1,1),new r("ibile",-1,1),new r("usione",-1,4),new r("azione",-1,2),new r("uzione",-1,4),new r("atore",-1,2),new r("ose",-1,1),new r("ante",-1,1),new r("mente",-1,1),new r("amente",19,7),new r("iste",-1,1),new r("ive",-1,9),new r("anze",-1,1),new r("enze",-1,5),new r("ici",-1,1),new r("atrici",25,1),new r("ichi",-1,1),new r("abili",-1,1),new r("ibili",-1,1),new r("ismi",-1,1),new r("usioni",-1,4),new r("azioni",-1,2),new r("uzioni",-1,4),new r("atori",-1,2),new r("osi",-1,1),new r("anti",-1,1),new r("amenti",-1,6),new r("imenti",-1,6),new r("isti",-1,1),new r("ivi",-1,9),new r("ico",-1,1),new r("ismo",-1,1),new r("oso",-1,1),new r("amento",-1,6),new r("imento",-1,6),new r("ivo",-1,9),new r("ità",-1,8),new r("istà",-1,1),new r("istè",-1,1),new r("istì",-1,1)],m=[new r("isca",-1,1),new r("enda",-1,1),new r("ata",-1,1),new r("ita",-1,1),new r("uta",-1,1),new r("ava",-1,1),new r("eva",-1,1),new r("iva",-1,1),new r("erebbe",-1,1),new r("irebbe",-1,1),new r("isce",-1,1),new r("ende",-1,1),new r("are",-1,1),new r("ere",-1,1),new r("ire",-1,1),new r("asse",-1,1),new r("ate",-1,1),new r("avate",16,1),new r("evate",16,1),new r("ivate",16,1),new r("ete",-1,1),new r("erete",20,1),new r("irete",20,1),new r("ite",-1,1),new r("ereste",-1,1),new r("ireste",-1,1),new r("ute",-1,1),new r("erai",-1,1),new r("irai",-1,1),new r("isci",-1,1),new r("endi",-1,1),new r("erei",-1,1),new r("irei",-1,1),new r("assi",-1,1),new r("ati",-1,1),new r("iti",-1,1),new r("eresti",-1,1),new r("iresti",-1,1),new r("uti",-1,1),new r("avi",-1,1),new r("evi",-1,1),new r("ivi",-1,1),new r("isco",-1,1),new r("ando",-1,1),new r("endo",-1,1),new r("Yamo",-1,1),new r("iamo",-1,1),new r("avamo",-1,1),new r("evamo",-1,1),new r("ivamo",-1,1),new r("eremo",-1,1),new r("iremo",-1,1),new r("assimo",-1,1),new r("ammo",-1,1),new r("emmo",-1,1),new r("eremmo",54,1),new r("iremmo",54,1),new r("immo",-1,1),new r("ano",-1,1),new r("iscano",58,1),new r("avano",58,1),new r("evano",58,1),new r("ivano",58,1),new r("eranno",-1,1),new r("iranno",-1,1),new r("ono",-1,1),new r("iscono",65,1),new r("arono",65,1),new r("erono",65,1),new r("irono",65,1),new r("erebbero",-1,1),new r("irebbero",-1,1),new r("assero",-1,1),new r("essero",-1,1),new r("issero",-1,1),new r("ato",-1,1),new r("ito",-1,1),new r("uto",-1,1),new r("avo",-1,1),new r("evo",-1,1),new r("ivo",-1,1),new r("ar",-1,1),new r("ir",-1,1),new r("erà",-1,1),new r("irà",-1,1),new r("erò",-1,1),new r("irò",-1,1)],f=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2,1],v=[17,65,0,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2],b=[17],d=new i;function _(e,r,i){return!(!d.eq_s(1,e)||(d.ket=d.cursor,!d.in_grouping(f,97,249)))&&(d.slice_from(r),d.cursor=i,!0)}function g(e){if(d.cursor=e,!d.in_grouping(f,97,249))return!1;for(;!d.out_grouping(f,97,249);){if(d.cursor>=d.limit)return!1;d.cursor++}return!0}function p(){var e,r=d.cursor;if(!function(){if(d.in_grouping(f,97,249)){var e=d.cursor;if(d.out_grouping(f,97,249)){for(;!d.in_grouping(f,97,249);){if(d.cursor>=d.limit)return g(e);d.cursor++}return!0}return g(e)}return!1}()){if(d.cursor=r,!d.out_grouping(f,97,249))return;if(e=d.cursor,d.out_grouping(f,97,249)){for(;!d.in_grouping(f,97,249);){if(d.cursor>=d.limit)return d.cursor=e,void(d.in_grouping(f,97,249)&&d.cursor=d.limit)return;d.cursor++}o=d.cursor}function k(){for(;!d.in_grouping(f,97,249);){if(d.cursor>=d.limit)return!1;d.cursor++}for(;!d.out_grouping(f,97,249);){if(d.cursor>=d.limit)return!1;d.cursor++}return!0}function h(){return o<=d.cursor}function q(){return e<=d.cursor}function C(){var e;if(d.ket=d.cursor,!(e=d.find_among_b(l,51)))return!1;switch(d.bra=d.cursor,e){case 1:if(!q())return!1;d.slice_del();break;case 2:if(!q())return!1;d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"ic")&&(d.bra=d.cursor,q()&&d.slice_del());break;case 3:if(!q())return!1;d.slice_from("log");break;case 4:if(!q())return!1;d.slice_from("u");break;case 5:if(!q())return!1;d.slice_from("ente");break;case 6:if(!h())return!1;d.slice_del();break;case 7:if(!(n<=d.cursor))return!1;d.slice_del(),d.ket=d.cursor,(e=d.find_among_b(c,4))&&(d.bra=d.cursor,q()&&(d.slice_del(),1==e&&(d.ket=d.cursor,d.eq_s_b(2,"at")&&(d.bra=d.cursor,q()&&d.slice_del()))));break;case 8:if(!q())return!1;d.slice_del(),d.ket=d.cursor,(e=d.find_among_b(w,3))&&(d.bra=d.cursor,1==e&&q()&&d.slice_del());break;case 9:if(!q())return!1;d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"at")&&(d.bra=d.cursor,q()&&(d.slice_del(),d.ket=d.cursor,d.eq_s_b(2,"ic")&&(d.bra=d.cursor,q()&&d.slice_del())))}return!0}function z(){var e;e=d.limit-d.cursor,d.ket=d.cursor,d.in_grouping_b(v,97,242)&&(d.bra=d.cursor,h()&&(d.slice_del(),d.ket=d.cursor,d.eq_s_b(1,"i")&&(d.bra=d.cursor,h())))?d.slice_del():d.cursor=d.limit-e,d.ket=d.cursor,d.eq_s_b(1,"h")&&(d.bra=d.cursor,d.in_grouping_b(b,99,103)&&h()&&d.slice_del())}this.setCurrent=function(e){d.setCurrent(e)},this.getCurrent=function(){return d.getCurrent()},this.stem=function(){var r,i,c,w=d.cursor;return function(){for(var e,r,i,n,o=d.cursor;;){if(d.bra=d.cursor,e=d.find_among(t,7))switch(d.ket=d.cursor,e){case 1:d.slice_from("à");continue;case 2:d.slice_from("è");continue;case 3:d.slice_from("ì");continue;case 4:d.slice_from("ò");continue;case 5:d.slice_from("ù");continue;case 6:d.slice_from("qU");continue;case 7:if(d.cursor>=d.limit)break;d.cursor++;continue}break}for(d.cursor=o;;)for(r=d.cursor;;){if(i=d.cursor,d.in_grouping(f,97,249)){if(d.bra=d.cursor,n=d.cursor,_("u","U",i))break;if(d.cursor=n,_("i","I",i))break}if(d.cursor=i,d.cursor>=d.limit)return void(d.cursor=r);d.cursor++}}(),d.cursor=w,r=d.cursor,o=d.limit,n=o,e=o,p(),d.cursor=r,k()&&(n=d.cursor,k()&&(e=d.cursor)),d.limit_backward=w,d.cursor=d.limit,function(){var e;if(d.ket=d.cursor,d.find_among_b(a,37)&&(d.bra=d.cursor,(e=d.find_among_b(u,5))&&h()))switch(e){case 1:d.slice_del();break;case 2:d.slice_from("e")}}(),d.cursor=d.limit,C()||(d.cursor=d.limit,d.cursor>=o&&(c=d.limit_backward,d.limit_backward=o,d.ket=d.cursor,(i=d.find_among_b(m,87))&&(d.bra=d.cursor,1==i&&d.slice_del()),d.limit_backward=c)),d.cursor=d.limit,z(),d.cursor=d.limit_backward,function(){for(var e;d.bra=d.cursor,e=d.find_among(s,3);)switch(d.ket=d.cursor,e){case 1:d.slice_from("i");break;case 2:d.slice_from("u");break;case 3:if(d.cursor>=d.limit)return;d.cursor++}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.it.stemmer,"stemmer-it"),e.it.stopWordFilter=e.generateStopWordFilter("a abbia abbiamo abbiano abbiate ad agl agli ai al all alla alle allo anche avemmo avendo avesse avessero avessi avessimo aveste avesti avete aveva avevamo avevano avevate avevi avevo avrai avranno avrebbe avrebbero avrei avremmo avremo avreste avresti avrete avrà avrò avuta avute avuti avuto c che chi ci coi col come con contro cui da dagl dagli dai dal dall dalla dalle dallo degl degli dei del dell della delle dello di dov dove e ebbe ebbero ebbi ed era erano eravamo eravate eri ero essendo faccia facciamo facciano facciate faccio facemmo facendo facesse facessero facessi facessimo faceste facesti faceva facevamo facevano facevate facevi facevo fai fanno farai faranno farebbe farebbero farei faremmo faremo fareste faresti farete farà farò fece fecero feci fosse fossero fossi fossimo foste fosti fu fui fummo furono gli ha hai hanno ho i il in io l la le lei li lo loro lui ma mi mia mie miei mio ne negl negli nei nel nell nella nelle nello noi non nostra nostre nostri nostro o per perché più quale quanta quante quanti quanto quella quelle quelli quello questa queste questi questo sarai saranno sarebbe sarebbero sarei saremmo saremo sareste saresti sarete sarà sarò se sei si sia siamo siano siate siete sono sta stai stando stanno starai staranno starebbe starebbero starei staremmo staremo stareste staresti starete starà starò stava stavamo stavano stavate stavi stavo stemmo stesse stessero stessi stessimo steste stesti stette stettero stetti stia stiamo stiano stiate sto su sua sue sugl sugli sui sul sull sulla sulle sullo suo suoi ti tra tu tua tue tuo tuoi tutti tutto un una uno vi voi vostra vostre vostri vostro è".split(" ")),e.Pipeline.registerFunction(e.it.stopWordFilter,"stopWordFilter-it")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.jp.js b/docs/material/assets/javascripts/lunr/lunr.jp.js new file mode 100644 index 00000000..a33c3c71 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.jp.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.jp=function(){this.pipeline.reset(),this.pipeline.add(e.jp.stopWordFilter,e.jp.stemmer),r?this.tokenizer=e.jp.tokenizer:(e.tokenizer&&(e.tokenizer=e.jp.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.jp.tokenizer))};var t=new e.TinySegmenter;e.jp.tokenizer=function(n){if(!arguments.length||null==n||null==n)return[];if(Array.isArray(n))return n.map(function(t){return r?new e.Token(t.toLowerCase()):t.toLowerCase()});for(var i=n.toString().toLowerCase().replace(/^\s+/,""),o=i.length-1;o>=0;o--)if(/\S/.test(i.charAt(o))){i=i.substring(0,o+1);break}return t.segment(i).filter(function(e){return!!e}).map(function(t){return r?new e.Token(t):t})},e.jp.stemmer=function(e){return e},e.Pipeline.registerFunction(e.jp.stemmer,"stemmer-jp"),e.jp.wordCharacters="一二三四五六七八九十百千万億兆一-龠々〆ヵヶぁ-んァ-ヴーア-ン゙a-zA-Za-zA-Z0-90-9",e.jp.stopWordFilter=function(t){if(-1===e.jp.stopWordFilter.stopWords.indexOf(r?t.toString():t))return t},e.jp.stopWordFilter=e.generateStopWordFilter("これ それ あれ この その あの ここ そこ あそこ こちら どこ だれ なに なん 何 私 貴方 貴方方 我々 私達 あの人 あのかた 彼女 彼 です あります おります います は が の に を で え から まで より も どの と し それで しかし".split(" ")),e.Pipeline.registerFunction(e.jp.stopWordFilter,"stopWordFilter-jp")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.multi.js b/docs/material/assets/javascripts/lunr/lunr.multi.js new file mode 100644 index 00000000..d3dbc860 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.multi.js @@ -0,0 +1 @@ +!function(e,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(e.lunr)}(this,function(){return function(e){e.multiLanguage=function(){for(var i=Array.prototype.slice.call(arguments),t=i.join("-"),r="",n=[],s=[],p=0;p=l.limit)return;l.cursor=r+1}for(;!l.out_grouping(a,97,248);){if(l.cursor>=l.limit)return;l.cursor++}(i=l.cursor)=i&&(r=l.limit_backward,l.limit_backward=i,l.ket=l.cursor,e=l.find_among_b(t,29),l.limit_backward=r,e))switch(l.bra=l.cursor,e){case 1:l.slice_del();break;case 2:n=l.limit-l.cursor,l.in_grouping_b(m,98,122)?l.slice_del():(l.cursor=l.limit-n,l.eq_s_b(1,"k")&&l.out_grouping_b(a,97,248)&&l.slice_del());break;case 3:l.slice_from("er")}}(),l.cursor=l.limit,n=l.limit-l.cursor,l.cursor>=i&&(r=l.limit_backward,l.limit_backward=i,l.ket=l.cursor,l.find_among_b(o,2)?(l.bra=l.cursor,l.limit_backward=r,l.cursor=l.limit-n,l.cursor>l.limit_backward&&(l.cursor--,l.bra=l.cursor,l.slice_del())):l.limit_backward=r),l.cursor=l.limit,l.cursor>=i&&(d=l.limit_backward,l.limit_backward=i,l.ket=l.cursor,(u=l.find_among_b(s,11))?(l.bra=l.cursor,l.limit_backward=d,1==u&&l.slice_del()):l.limit_backward=d),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}),e.Pipeline.registerFunction(e.no.stemmer,"stemmer-no"),e.no.stopWordFilter=e.generateStopWordFilter("alle at av bare begge ble blei bli blir blitt både båe da de deg dei deim deira deires dem den denne der dere deres det dette di din disse ditt du dykk dykkar då eg ein eit eitt eller elles en enn er et ett etter for fordi fra før ha hadde han hans har hennar henne hennes her hjå ho hoe honom hoss hossen hun hva hvem hver hvilke hvilken hvis hvor hvordan hvorfor i ikke ikkje ikkje ingen ingi inkje inn inni ja jeg kan kom korleis korso kun kunne kva kvar kvarhelst kven kvi kvifor man mange me med medan meg meget mellom men mi min mine mitt mot mykje ned no noe noen noka noko nokon nokor nokre nå når og også om opp oss over på samme seg selv si si sia sidan siden sin sine sitt sjøl skal skulle slik so som som somme somt så sånn til um upp ut uten var vart varte ved vere verte vi vil ville vore vors vort vår være være vært å".split(" ")),e.Pipeline.registerFunction(e.no.stopWordFilter,"stopWordFilter-no")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.pt.js b/docs/material/assets/javascripts/lunr/lunr.pt.js new file mode 100644 index 00000000..51035c96 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.pt.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,s,n;e.pt=function(){this.pipeline.reset(),this.pipeline.add(e.pt.trimmer,e.pt.stopWordFilter,e.pt.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.pt.stemmer))},e.pt.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.pt.trimmer=e.trimmerSupport.generateTrimmer(e.pt.wordCharacters),e.Pipeline.registerFunction(e.pt.trimmer,"trimmer-pt"),e.pt.stemmer=(r=e.stemmerSupport.Among,s=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,i,o=[new r("",-1,3),new r("ã",0,1),new r("õ",0,2)],a=[new r("",-1,3),new r("a~",0,1),new r("o~",0,2)],t=[new r("ic",-1,-1),new r("ad",-1,-1),new r("os",-1,-1),new r("iv",-1,1)],u=[new r("ante",-1,1),new r("avel",-1,1),new r("ível",-1,1)],w=[new r("ic",-1,1),new r("abil",-1,1),new r("iv",-1,1)],m=[new r("ica",-1,1),new r("ância",-1,1),new r("ência",-1,4),new r("ira",-1,9),new r("adora",-1,1),new r("osa",-1,1),new r("ista",-1,1),new r("iva",-1,8),new r("eza",-1,1),new r("logía",-1,2),new r("idade",-1,7),new r("ante",-1,1),new r("mente",-1,6),new r("amente",12,5),new r("ável",-1,1),new r("ível",-1,1),new r("ución",-1,3),new r("ico",-1,1),new r("ismo",-1,1),new r("oso",-1,1),new r("amento",-1,1),new r("imento",-1,1),new r("ivo",-1,8),new r("aça~o",-1,1),new r("ador",-1,1),new r("icas",-1,1),new r("ências",-1,4),new r("iras",-1,9),new r("adoras",-1,1),new r("osas",-1,1),new r("istas",-1,1),new r("ivas",-1,8),new r("ezas",-1,1),new r("logías",-1,2),new r("idades",-1,7),new r("uciones",-1,3),new r("adores",-1,1),new r("antes",-1,1),new r("aço~es",-1,1),new r("icos",-1,1),new r("ismos",-1,1),new r("osos",-1,1),new r("amentos",-1,1),new r("imentos",-1,1),new r("ivos",-1,8)],c=[new r("ada",-1,1),new r("ida",-1,1),new r("ia",-1,1),new r("aria",2,1),new r("eria",2,1),new r("iria",2,1),new r("ara",-1,1),new r("era",-1,1),new r("ira",-1,1),new r("ava",-1,1),new r("asse",-1,1),new r("esse",-1,1),new r("isse",-1,1),new r("aste",-1,1),new r("este",-1,1),new r("iste",-1,1),new r("ei",-1,1),new r("arei",16,1),new r("erei",16,1),new r("irei",16,1),new r("am",-1,1),new r("iam",20,1),new r("ariam",21,1),new r("eriam",21,1),new r("iriam",21,1),new r("aram",20,1),new r("eram",20,1),new r("iram",20,1),new r("avam",20,1),new r("em",-1,1),new r("arem",29,1),new r("erem",29,1),new r("irem",29,1),new r("assem",29,1),new r("essem",29,1),new r("issem",29,1),new r("ado",-1,1),new r("ido",-1,1),new r("ando",-1,1),new r("endo",-1,1),new r("indo",-1,1),new r("ara~o",-1,1),new r("era~o",-1,1),new r("ira~o",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("ir",-1,1),new r("as",-1,1),new r("adas",47,1),new r("idas",47,1),new r("ias",47,1),new r("arias",50,1),new r("erias",50,1),new r("irias",50,1),new r("aras",47,1),new r("eras",47,1),new r("iras",47,1),new r("avas",47,1),new r("es",-1,1),new r("ardes",58,1),new r("erdes",58,1),new r("irdes",58,1),new r("ares",58,1),new r("eres",58,1),new r("ires",58,1),new r("asses",58,1),new r("esses",58,1),new r("isses",58,1),new r("astes",58,1),new r("estes",58,1),new r("istes",58,1),new r("is",-1,1),new r("ais",71,1),new r("eis",71,1),new r("areis",73,1),new r("ereis",73,1),new r("ireis",73,1),new r("áreis",73,1),new r("éreis",73,1),new r("íreis",73,1),new r("ásseis",73,1),new r("ésseis",73,1),new r("ísseis",73,1),new r("áveis",73,1),new r("íeis",73,1),new r("aríeis",84,1),new r("eríeis",84,1),new r("iríeis",84,1),new r("ados",-1,1),new r("idos",-1,1),new r("amos",-1,1),new r("áramos",90,1),new r("éramos",90,1),new r("íramos",90,1),new r("ávamos",90,1),new r("íamos",90,1),new r("aríamos",95,1),new r("eríamos",95,1),new r("iríamos",95,1),new r("emos",-1,1),new r("aremos",99,1),new r("eremos",99,1),new r("iremos",99,1),new r("ássemos",99,1),new r("êssemos",99,1),new r("íssemos",99,1),new r("imos",-1,1),new r("armos",-1,1),new r("ermos",-1,1),new r("irmos",-1,1),new r("ámos",-1,1),new r("arás",-1,1),new r("erás",-1,1),new r("irás",-1,1),new r("eu",-1,1),new r("iu",-1,1),new r("ou",-1,1),new r("ará",-1,1),new r("erá",-1,1),new r("irá",-1,1)],l=[new r("a",-1,1),new r("i",-1,1),new r("o",-1,1),new r("os",-1,1),new r("á",-1,1),new r("í",-1,1),new r("ó",-1,1)],f=[new r("e",-1,1),new r("ç",-1,2),new r("é",-1,1),new r("ê",-1,1)],d=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,3,19,12,2],v=new s;function p(){if(v.out_grouping(d,97,250)){for(;!v.in_grouping(d,97,250);){if(v.cursor>=v.limit)return!0;v.cursor++}return!1}return!0}function _(){var e,r,s=v.cursor;if(v.in_grouping(d,97,250))if(e=v.cursor,p()){if(v.cursor=e,function(){if(v.in_grouping(d,97,250))for(;!v.out_grouping(d,97,250);){if(v.cursor>=v.limit)return!1;v.cursor++}return i=v.cursor,!0}())return}else i=v.cursor;if(v.cursor=s,v.out_grouping(d,97,250)){if(r=v.cursor,p()){if(v.cursor=r,!v.in_grouping(d,97,250)||v.cursor>=v.limit)return;v.cursor++}i=v.cursor}}function h(){for(;!v.in_grouping(d,97,250);){if(v.cursor>=v.limit)return!1;v.cursor++}for(;!v.out_grouping(d,97,250);){if(v.cursor>=v.limit)return!1;v.cursor++}return!0}function b(){return i<=v.cursor}function g(){return e<=v.cursor}function k(){var e;if(v.ket=v.cursor,!(e=v.find_among_b(m,45)))return!1;switch(v.bra=v.cursor,e){case 1:if(!g())return!1;v.slice_del();break;case 2:if(!g())return!1;v.slice_from("log");break;case 3:if(!g())return!1;v.slice_from("u");break;case 4:if(!g())return!1;v.slice_from("ente");break;case 5:if(!(n<=v.cursor))return!1;v.slice_del(),v.ket=v.cursor,(e=v.find_among_b(t,4))&&(v.bra=v.cursor,g()&&(v.slice_del(),1==e&&(v.ket=v.cursor,v.eq_s_b(2,"at")&&(v.bra=v.cursor,g()&&v.slice_del()))));break;case 6:if(!g())return!1;v.slice_del(),v.ket=v.cursor,(e=v.find_among_b(u,3))&&(v.bra=v.cursor,1==e&&g()&&v.slice_del());break;case 7:if(!g())return!1;v.slice_del(),v.ket=v.cursor,(e=v.find_among_b(w,3))&&(v.bra=v.cursor,1==e&&g()&&v.slice_del());break;case 8:if(!g())return!1;v.slice_del(),v.ket=v.cursor,v.eq_s_b(2,"at")&&(v.bra=v.cursor,g()&&v.slice_del());break;case 9:if(!b()||!v.eq_s_b(1,"e"))return!1;v.slice_from("ir")}return!0}function q(e,r){if(v.eq_s_b(1,e)){v.bra=v.cursor;var s=v.limit-v.cursor;if(v.eq_s_b(1,r))return v.cursor=v.limit-s,b()&&v.slice_del(),!1}return!0}function j(){if(!k()&&(v.cursor=v.limit,!function(){var e,r;if(v.cursor>=i){if(r=v.limit_backward,v.limit_backward=i,v.ket=v.cursor,e=v.find_among_b(c,120))return v.bra=v.cursor,1==e&&v.slice_del(),v.limit_backward=r,!0;v.limit_backward=r}return!1}()))return v.cursor=v.limit,v.ket=v.cursor,void((e=v.find_among_b(l,7))&&(v.bra=v.cursor,1==e&&b()&&v.slice_del()));var e;v.cursor=v.limit,v.ket=v.cursor,v.eq_s_b(1,"i")&&(v.bra=v.cursor,v.eq_s_b(1,"c")&&(v.cursor=v.limit,b()&&v.slice_del()))}this.setCurrent=function(e){v.setCurrent(e)},this.getCurrent=function(){return v.getCurrent()},this.stem=function(){var r,s=v.cursor;return function(){for(var e;;){if(v.bra=v.cursor,e=v.find_among(o,3))switch(v.ket=v.cursor,e){case 1:v.slice_from("a~");continue;case 2:v.slice_from("o~");continue;case 3:if(v.cursor>=v.limit)break;v.cursor++;continue}break}}(),v.cursor=s,r=v.cursor,i=v.limit,n=i,e=i,_(),v.cursor=r,h()&&(n=v.cursor,h()&&(e=v.cursor)),v.limit_backward=s,v.cursor=v.limit,j(),v.cursor=v.limit,function(){var e;if(v.ket=v.cursor,e=v.find_among_b(f,4))switch(v.bra=v.cursor,e){case 1:b()&&(v.slice_del(),v.ket=v.cursor,v.limit,v.cursor,q("u","g")&&q("i","c"));break;case 2:v.slice_from("c")}}(),v.cursor=v.limit_backward,function(){for(var e;;){if(v.bra=v.cursor,e=v.find_among(a,3))switch(v.ket=v.cursor,e){case 1:v.slice_from("ã");continue;case 2:v.slice_from("õ");continue;case 3:if(v.cursor>=v.limit)break;v.cursor++;continue}break}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.pt.stemmer,"stemmer-pt"),e.pt.stopWordFilter=e.generateStopWordFilter("a ao aos aquela aquelas aquele aqueles aquilo as até com como da das de dela delas dele deles depois do dos e ela elas ele eles em entre era eram essa essas esse esses esta estamos estas estava estavam este esteja estejam estejamos estes esteve estive estivemos estiver estivera estiveram estiverem estivermos estivesse estivessem estivéramos estivéssemos estou está estávamos estão eu foi fomos for fora foram forem formos fosse fossem fui fôramos fôssemos haja hajam hajamos havemos hei houve houvemos houver houvera houveram houverei houverem houveremos houveria houveriam houvermos houverá houverão houveríamos houvesse houvessem houvéramos houvéssemos há hão isso isto já lhe lhes mais mas me mesmo meu meus minha minhas muito na nas nem no nos nossa nossas nosso nossos num numa não nós o os ou para pela pelas pelo pelos por qual quando que quem se seja sejam sejamos sem serei seremos seria seriam será serão seríamos seu seus somos sou sua suas são só também te tem temos tenha tenham tenhamos tenho terei teremos teria teriam terá terão teríamos teu teus teve tinha tinham tive tivemos tiver tivera tiveram tiverem tivermos tivesse tivessem tivéramos tivéssemos tu tua tuas tém tínhamos um uma você vocês vos à às éramos".split(" ")),e.Pipeline.registerFunction(e.pt.stopWordFilter,"stopWordFilter-pt")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.ro.js b/docs/material/assets/javascripts/lunr/lunr.ro.js new file mode 100644 index 00000000..155cb562 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.ro.js @@ -0,0 +1 @@ +!function(e,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var i,r,n;e.ro=function(){this.pipeline.reset(),this.pipeline.add(e.ro.trimmer,e.ro.stopWordFilter,e.ro.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ro.stemmer))},e.ro.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.ro.trimmer=e.trimmerSupport.generateTrimmer(e.ro.wordCharacters),e.Pipeline.registerFunction(e.ro.trimmer,"trimmer-ro"),e.ro.stemmer=(i=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,t,a,o=[new i("",-1,3),new i("I",0,1),new i("U",0,2)],s=[new i("ea",-1,3),new i("aţia",-1,7),new i("aua",-1,2),new i("iua",-1,4),new i("aţie",-1,7),new i("ele",-1,3),new i("ile",-1,5),new i("iile",6,4),new i("iei",-1,4),new i("atei",-1,6),new i("ii",-1,4),new i("ului",-1,1),new i("ul",-1,1),new i("elor",-1,3),new i("ilor",-1,4),new i("iilor",14,4)],c=[new i("icala",-1,4),new i("iciva",-1,4),new i("ativa",-1,5),new i("itiva",-1,6),new i("icale",-1,4),new i("aţiune",-1,5),new i("iţiune",-1,6),new i("atoare",-1,5),new i("itoare",-1,6),new i("ătoare",-1,5),new i("icitate",-1,4),new i("abilitate",-1,1),new i("ibilitate",-1,2),new i("ivitate",-1,3),new i("icive",-1,4),new i("ative",-1,5),new i("itive",-1,6),new i("icali",-1,4),new i("atori",-1,5),new i("icatori",18,4),new i("itori",-1,6),new i("ători",-1,5),new i("icitati",-1,4),new i("abilitati",-1,1),new i("ivitati",-1,3),new i("icivi",-1,4),new i("ativi",-1,5),new i("itivi",-1,6),new i("icităi",-1,4),new i("abilităi",-1,1),new i("ivităi",-1,3),new i("icităţi",-1,4),new i("abilităţi",-1,1),new i("ivităţi",-1,3),new i("ical",-1,4),new i("ator",-1,5),new i("icator",35,4),new i("itor",-1,6),new i("ător",-1,5),new i("iciv",-1,4),new i("ativ",-1,5),new i("itiv",-1,6),new i("icală",-1,4),new i("icivă",-1,4),new i("ativă",-1,5),new i("itivă",-1,6)],u=[new i("ica",-1,1),new i("abila",-1,1),new i("ibila",-1,1),new i("oasa",-1,1),new i("ata",-1,1),new i("ita",-1,1),new i("anta",-1,1),new i("ista",-1,3),new i("uta",-1,1),new i("iva",-1,1),new i("ic",-1,1),new i("ice",-1,1),new i("abile",-1,1),new i("ibile",-1,1),new i("isme",-1,3),new i("iune",-1,2),new i("oase",-1,1),new i("ate",-1,1),new i("itate",17,1),new i("ite",-1,1),new i("ante",-1,1),new i("iste",-1,3),new i("ute",-1,1),new i("ive",-1,1),new i("ici",-1,1),new i("abili",-1,1),new i("ibili",-1,1),new i("iuni",-1,2),new i("atori",-1,1),new i("osi",-1,1),new i("ati",-1,1),new i("itati",30,1),new i("iti",-1,1),new i("anti",-1,1),new i("isti",-1,3),new i("uti",-1,1),new i("işti",-1,3),new i("ivi",-1,1),new i("ităi",-1,1),new i("oşi",-1,1),new i("ităţi",-1,1),new i("abil",-1,1),new i("ibil",-1,1),new i("ism",-1,3),new i("ator",-1,1),new i("os",-1,1),new i("at",-1,1),new i("it",-1,1),new i("ant",-1,1),new i("ist",-1,3),new i("ut",-1,1),new i("iv",-1,1),new i("ică",-1,1),new i("abilă",-1,1),new i("ibilă",-1,1),new i("oasă",-1,1),new i("ată",-1,1),new i("ită",-1,1),new i("antă",-1,1),new i("istă",-1,3),new i("ută",-1,1),new i("ivă",-1,1)],w=[new i("ea",-1,1),new i("ia",-1,1),new i("esc",-1,1),new i("ăsc",-1,1),new i("ind",-1,1),new i("ând",-1,1),new i("are",-1,1),new i("ere",-1,1),new i("ire",-1,1),new i("âre",-1,1),new i("se",-1,2),new i("ase",10,1),new i("sese",10,2),new i("ise",10,1),new i("use",10,1),new i("âse",10,1),new i("eşte",-1,1),new i("ăşte",-1,1),new i("eze",-1,1),new i("ai",-1,1),new i("eai",19,1),new i("iai",19,1),new i("sei",-1,2),new i("eşti",-1,1),new i("ăşti",-1,1),new i("ui",-1,1),new i("ezi",-1,1),new i("âi",-1,1),new i("aşi",-1,1),new i("seşi",-1,2),new i("aseşi",29,1),new i("seseşi",29,2),new i("iseşi",29,1),new i("useşi",29,1),new i("âseşi",29,1),new i("işi",-1,1),new i("uşi",-1,1),new i("âşi",-1,1),new i("aţi",-1,2),new i("eaţi",38,1),new i("iaţi",38,1),new i("eţi",-1,2),new i("iţi",-1,2),new i("âţi",-1,2),new i("arăţi",-1,1),new i("serăţi",-1,2),new i("aserăţi",45,1),new i("seserăţi",45,2),new i("iserăţi",45,1),new i("userăţi",45,1),new i("âserăţi",45,1),new i("irăţi",-1,1),new i("urăţi",-1,1),new i("ârăţi",-1,1),new i("am",-1,1),new i("eam",54,1),new i("iam",54,1),new i("em",-1,2),new i("asem",57,1),new i("sesem",57,2),new i("isem",57,1),new i("usem",57,1),new i("âsem",57,1),new i("im",-1,2),new i("âm",-1,2),new i("ăm",-1,2),new i("arăm",65,1),new i("serăm",65,2),new i("aserăm",67,1),new i("seserăm",67,2),new i("iserăm",67,1),new i("userăm",67,1),new i("âserăm",67,1),new i("irăm",65,1),new i("urăm",65,1),new i("ârăm",65,1),new i("au",-1,1),new i("eau",76,1),new i("iau",76,1),new i("indu",-1,1),new i("ându",-1,1),new i("ez",-1,1),new i("ească",-1,1),new i("ară",-1,1),new i("seră",-1,2),new i("aseră",84,1),new i("seseră",84,2),new i("iseră",84,1),new i("useră",84,1),new i("âseră",84,1),new i("iră",-1,1),new i("ură",-1,1),new i("âră",-1,1),new i("ează",-1,1)],m=[new i("a",-1,1),new i("e",-1,1),new i("ie",1,1),new i("i",-1,1),new i("ă",-1,1)],l=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,2,32,0,0,4],f=new r;function p(e,i){f.eq_s(1,e)&&(f.ket=f.cursor,f.in_grouping(l,97,259)&&f.slice_from(i))}function d(){if(f.out_grouping(l,97,259)){for(;!f.in_grouping(l,97,259);){if(f.cursor>=f.limit)return!0;f.cursor++}return!1}return!0}function b(){var e,i,r=f.cursor;if(f.in_grouping(l,97,259)){if(e=f.cursor,!d())return void(a=f.cursor);if(f.cursor=e,!function(){if(f.in_grouping(l,97,259))for(;!f.out_grouping(l,97,259);){if(f.cursor>=f.limit)return!0;f.cursor++}return!1}())return void(a=f.cursor)}f.cursor=r,f.out_grouping(l,97,259)&&(i=f.cursor,d()&&(f.cursor=i,f.in_grouping(l,97,259)&&f.cursor=f.limit)return!1;f.cursor++}for(;!f.out_grouping(l,97,259);){if(f.cursor>=f.limit)return!1;f.cursor++}return!0}function _(){return t<=f.cursor}function g(){var i,r=f.limit-f.cursor;if(f.ket=f.cursor,(i=f.find_among_b(c,46))&&(f.bra=f.cursor,_())){switch(i){case 1:f.slice_from("abil");break;case 2:f.slice_from("ibil");break;case 3:f.slice_from("iv");break;case 4:f.slice_from("ic");break;case 5:f.slice_from("at");break;case 6:f.slice_from("it")}return e=!0,f.cursor=f.limit-r,!0}return!1}function k(){var i,r;for(e=!1;;)if(r=f.limit-f.cursor,!g()){f.cursor=f.limit-r;break}if(f.ket=f.cursor,(i=f.find_among_b(u,62))&&(f.bra=f.cursor,n<=f.cursor)){switch(i){case 1:f.slice_del();break;case 2:f.eq_s_b(1,"ţ")&&(f.bra=f.cursor,f.slice_from("t"));break;case 3:f.slice_from("ist")}e=!0}}function h(){var e;f.ket=f.cursor,(e=f.find_among_b(m,5))&&(f.bra=f.cursor,a<=f.cursor&&1==e&&f.slice_del())}this.setCurrent=function(e){f.setCurrent(e)},this.getCurrent=function(){return f.getCurrent()},this.stem=function(){var i,r=f.cursor;return function(){for(var e,i;e=f.cursor,f.in_grouping(l,97,259)&&(i=f.cursor,f.bra=i,p("u","U"),f.cursor=i,p("i","I")),f.cursor=e,!(f.cursor>=f.limit);)f.cursor++}(),f.cursor=r,i=f.cursor,a=f.limit,t=a,n=a,b(),f.cursor=i,v()&&(t=f.cursor,v()&&(n=f.cursor)),f.limit_backward=r,f.cursor=f.limit,function(){var e,i;if(f.ket=f.cursor,(e=f.find_among_b(s,16))&&(f.bra=f.cursor,_()))switch(e){case 1:f.slice_del();break;case 2:f.slice_from("a");break;case 3:f.slice_from("e");break;case 4:f.slice_from("i");break;case 5:i=f.limit-f.cursor,f.eq_s_b(2,"ab")||(f.cursor=f.limit-i,f.slice_from("i"));break;case 6:f.slice_from("at");break;case 7:f.slice_from("aţi")}}(),f.cursor=f.limit,k(),f.cursor=f.limit,e||(f.cursor=f.limit,function(){var e,i,r;if(f.cursor>=a){if(i=f.limit_backward,f.limit_backward=a,f.ket=f.cursor,e=f.find_among_b(w,94))switch(f.bra=f.cursor,e){case 1:if(r=f.limit-f.cursor,!f.out_grouping_b(l,97,259)&&(f.cursor=f.limit-r,!f.eq_s_b(1,"u")))break;case 2:f.slice_del()}f.limit_backward=i}}(),f.cursor=f.limit),h(),f.cursor=f.limit_backward,function(){for(var e;;){if(f.bra=f.cursor,e=f.find_among(o,3))switch(f.ket=f.cursor,e){case 1:f.slice_from("i");continue;case 2:f.slice_from("u");continue;case 3:if(f.cursor>=f.limit)break;f.cursor++;continue}break}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.ro.stemmer,"stemmer-ro"),e.ro.stopWordFilter=e.generateStopWordFilter("acea aceasta această aceea acei aceia acel acela acele acelea acest acesta aceste acestea aceşti aceştia acolo acord acum ai aia aibă aici al ale alea altceva altcineva am ar are asemenea asta astea astăzi asupra au avea avem aveţi azi aş aşadar aţi bine bucur bună ca care caut ce cel ceva chiar cinci cine cineva contra cu cum cumva curând curînd când cât câte câtva câţi cînd cît cîte cîtva cîţi că căci cărei căror cărui către da dacă dar datorită dată dau de deci deja deoarece departe deşi din dinaintea dintr- dintre doi doilea două drept după dă ea ei el ele eram este eu eşti face fata fi fie fiecare fii fim fiu fiţi frumos fără graţie halbă iar ieri la le li lor lui lângă lîngă mai mea mei mele mereu meu mi mie mine mult multă mulţi mulţumesc mâine mîine mă ne nevoie nici nicăieri nimeni nimeri nimic nişte noastre noastră noi noroc nostru nouă noştri nu opt ori oricare orice oricine oricum oricând oricât oricînd oricît oriunde patra patru patrulea pe pentru peste pic poate pot prea prima primul prin puţin puţina puţină până pînă rog sa sale sau se spate spre sub sunt suntem sunteţi sută sînt sîntem sînteţi să săi său ta tale te timp tine toate toată tot totuşi toţi trei treia treilea tu tăi tău un una unde undeva unei uneia unele uneori unii unor unora unu unui unuia unul vi voastre voastră voi vostru vouă voştri vreme vreo vreun vă zece zero zi zice îi îl îmi împotriva în înainte înaintea încotro încât încît între întrucât întrucît îţi ăla ălea ăsta ăstea ăştia şapte şase şi ştiu ţi ţie".split(" ")),e.Pipeline.registerFunction(e.ro.stopWordFilter,"stopWordFilter-ro")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.ru.js b/docs/material/assets/javascripts/lunr/lunr.ru.js new file mode 100644 index 00000000..078609ad --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.ru.js @@ -0,0 +1 @@ +!function(e,n){"function"==typeof define&&define.amd?define(n):"object"==typeof exports?module.exports=n():n()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var n,r,t;e.ru=function(){this.pipeline.reset(),this.pipeline.add(e.ru.trimmer,e.ru.stopWordFilter,e.ru.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ru.stemmer))},e.ru.wordCharacters="Ѐ-҄҇-ԯᴫᵸⷠ-ⷿꙀ-ꚟ︮︯",e.ru.trimmer=e.trimmerSupport.generateTrimmer(e.ru.wordCharacters),e.Pipeline.registerFunction(e.ru.trimmer,"trimmer-ru"),e.ru.stemmer=(n=e.stemmerSupport.Among,r=e.stemmerSupport.SnowballProgram,t=new function(){var e,t,w=[new n("в",-1,1),new n("ив",0,2),new n("ыв",0,2),new n("вши",-1,1),new n("ивши",3,2),new n("ывши",3,2),new n("вшись",-1,1),new n("ившись",6,2),new n("ывшись",6,2)],i=[new n("ее",-1,1),new n("ие",-1,1),new n("ое",-1,1),new n("ые",-1,1),new n("ими",-1,1),new n("ыми",-1,1),new n("ей",-1,1),new n("ий",-1,1),new n("ой",-1,1),new n("ый",-1,1),new n("ем",-1,1),new n("им",-1,1),new n("ом",-1,1),new n("ым",-1,1),new n("его",-1,1),new n("ого",-1,1),new n("ему",-1,1),new n("ому",-1,1),new n("их",-1,1),new n("ых",-1,1),new n("ею",-1,1),new n("ою",-1,1),new n("ую",-1,1),new n("юю",-1,1),new n("ая",-1,1),new n("яя",-1,1)],u=[new n("ем",-1,1),new n("нн",-1,1),new n("вш",-1,1),new n("ивш",2,2),new n("ывш",2,2),new n("щ",-1,1),new n("ющ",5,1),new n("ующ",6,2)],s=[new n("сь",-1,1),new n("ся",-1,1)],o=[new n("ла",-1,1),new n("ила",0,2),new n("ыла",0,2),new n("на",-1,1),new n("ена",3,2),new n("ете",-1,1),new n("ите",-1,2),new n("йте",-1,1),new n("ейте",7,2),new n("уйте",7,2),new n("ли",-1,1),new n("или",10,2),new n("ыли",10,2),new n("й",-1,1),new n("ей",13,2),new n("уй",13,2),new n("л",-1,1),new n("ил",16,2),new n("ыл",16,2),new n("ем",-1,1),new n("им",-1,2),new n("ым",-1,2),new n("н",-1,1),new n("ен",22,2),new n("ло",-1,1),new n("ило",24,2),new n("ыло",24,2),new n("но",-1,1),new n("ено",27,2),new n("нно",27,1),new n("ет",-1,1),new n("ует",30,2),new n("ит",-1,2),new n("ыт",-1,2),new n("ют",-1,1),new n("уют",34,2),new n("ят",-1,2),new n("ны",-1,1),new n("ены",37,2),new n("ть",-1,1),new n("ить",39,2),new n("ыть",39,2),new n("ешь",-1,1),new n("ишь",-1,2),new n("ю",-1,2),new n("ую",44,2)],c=[new n("а",-1,1),new n("ев",-1,1),new n("ов",-1,1),new n("е",-1,1),new n("ие",3,1),new n("ье",3,1),new n("и",-1,1),new n("еи",6,1),new n("ии",6,1),new n("ами",6,1),new n("ями",6,1),new n("иями",10,1),new n("й",-1,1),new n("ей",12,1),new n("ией",13,1),new n("ий",12,1),new n("ой",12,1),new n("ам",-1,1),new n("ем",-1,1),new n("ием",18,1),new n("ом",-1,1),new n("ям",-1,1),new n("иям",21,1),new n("о",-1,1),new n("у",-1,1),new n("ах",-1,1),new n("ях",-1,1),new n("иях",26,1),new n("ы",-1,1),new n("ь",-1,1),new n("ю",-1,1),new n("ию",30,1),new n("ью",30,1),new n("я",-1,1),new n("ия",33,1),new n("ья",33,1)],m=[new n("ост",-1,1),new n("ость",-1,1)],l=[new n("ейше",-1,1),new n("н",-1,2),new n("ейш",-1,1),new n("ь",-1,3)],f=[33,65,8,232],a=new r;function p(){for(;!a.in_grouping(f,1072,1103);){if(a.cursor>=a.limit)return!1;a.cursor++}return!0}function d(){for(;!a.out_grouping(f,1072,1103);){if(a.cursor>=a.limit)return!1;a.cursor++}return!0}function _(e,n){var r,t;if(a.ket=a.cursor,r=a.find_among_b(e,n)){switch(a.bra=a.cursor,r){case 1:if(t=a.limit-a.cursor,!a.eq_s_b(1,"а")&&(a.cursor=a.limit-t,!a.eq_s_b(1,"я")))return!1;case 2:a.slice_del()}return!0}return!1}function b(e,n){var r;return a.ket=a.cursor,!!(r=a.find_among_b(e,n))&&(a.bra=a.cursor,1==r&&a.slice_del(),!0)}function h(){return!!b(i,26)&&(_(u,8),!0)}function g(){var n;a.ket=a.cursor,(n=a.find_among_b(m,2))&&(a.bra=a.cursor,e<=a.cursor&&1==n&&a.slice_del())}this.setCurrent=function(e){a.setCurrent(e)},this.getCurrent=function(){return a.getCurrent()},this.stem=function(){return t=a.limit,e=t,p()&&(t=a.cursor,d()&&p()&&d()&&(e=a.cursor)),a.cursor=a.limit,!(a.cursor=i&&t[(e-=i)>>3]&1<<(7&e))return this.cursor++,!0}return!1},in_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e<=s&&e>=i&&t[(e-=i)>>3]&1<<(7&e))return this.cursor--,!0}return!1},out_grouping:function(t,i,s){if(this.cursors||e>3]&1<<(7&e)))return this.cursor++,!0}return!1},out_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e>s||e>3]&1<<(7&e)))return this.cursor--,!0}return!1},eq_s:function(t,i){if(this.limit-this.cursor>1),f=0,l=o0||e==s||c)break;c=!0}}for(;;){if(o>=(_=t[s]).s_size){if(this.cursor=n+_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n+_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},find_among_b:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit_backward,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o=0;_--){if(n-l==u){f=-1;break}if(f=r.charCodeAt(n-1-l)-m.s[_])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var m;if(o>=(m=t[s]).s_size){if(this.cursor=n-m.s_size,!m.method)return m.result;var b=m.method();if(this.cursor=n-m.s_size,b)return m.result}if((s=m.substring_i)<0)return 0}},replace_s:function(t,i,s){var e=s.length-(i-t),n=r.substring(0,t),u=r.substring(i);return r=n+s+u,this.limit+=e,this.cursor>=i?this.cursor+=e:this.cursor>t&&(this.cursor=t),e},slice_check:function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>r.length)throw"faulty slice operation"},slice_from:function(r){this.slice_check(),this.replace_s(this.bra,this.ket,r)},slice_del:function(){this.slice_from("")},insert:function(r,t,i){var s=this.replace_s(r,t,i);r<=this.bra&&(this.bra+=s),r<=this.ket&&(this.ket+=s)},slice_to:function(){return this.slice_check(),r.substring(this.bra,this.ket)},eq_v_b:function(r){return this.eq_s_b(r.length,r)}}}},r.trimmerSupport={generateTrimmer:function(r){var t=new RegExp("^[^"+r+"]+"),i=new RegExp("[^"+r+"]+$");return function(r){return"function"==typeof r.update?r.update(function(r){return r.replace(t,"").replace(i,"")}):r.replace(t,"").replace(i,"")}}}}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.sv.js b/docs/material/assets/javascripts/lunr/lunr.sv.js new file mode 100644 index 00000000..4bb0f9f9 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.sv.js @@ -0,0 +1 @@ +!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,n,t;e.sv=function(){this.pipeline.reset(),this.pipeline.add(e.sv.trimmer,e.sv.stopWordFilter,e.sv.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sv.stemmer))},e.sv.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",e.sv.trimmer=e.trimmerSupport.generateTrimmer(e.sv.wordCharacters),e.Pipeline.registerFunction(e.sv.trimmer,"trimmer-sv"),e.sv.stemmer=(r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,t=new function(){var e,t,i=[new r("a",-1,1),new r("arna",0,1),new r("erna",0,1),new r("heterna",2,1),new r("orna",0,1),new r("ad",-1,1),new r("e",-1,1),new r("ade",6,1),new r("ande",6,1),new r("arne",6,1),new r("are",6,1),new r("aste",6,1),new r("en",-1,1),new r("anden",12,1),new r("aren",12,1),new r("heten",12,1),new r("ern",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",18,1),new r("or",-1,1),new r("s",-1,2),new r("as",21,1),new r("arnas",22,1),new r("ernas",22,1),new r("ornas",22,1),new r("es",21,1),new r("ades",26,1),new r("andes",26,1),new r("ens",21,1),new r("arens",29,1),new r("hetens",29,1),new r("erns",21,1),new r("at",-1,1),new r("andet",-1,1),new r("het",-1,1),new r("ast",-1,1)],s=[new r("dd",-1,-1),new r("gd",-1,-1),new r("nn",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1),new r("tt",-1,-1)],a=[new r("ig",-1,1),new r("lig",0,1),new r("els",-1,1),new r("fullt",-1,3),new r("löst",-1,2)],o=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32],u=[119,127,149],m=new n;this.setCurrent=function(e){m.setCurrent(e)},this.getCurrent=function(){return m.getCurrent()},this.stem=function(){var r,n=m.cursor;return function(){var r,n=m.cursor+3;if(t=m.limit,0<=n||n<=m.limit){for(e=n;;){if(r=m.cursor,m.in_grouping(o,97,246)){m.cursor=r;break}if(m.cursor=r,m.cursor>=m.limit)return;m.cursor++}for(;!m.out_grouping(o,97,246);){if(m.cursor>=m.limit)return;m.cursor++}(t=m.cursor)=t&&(m.limit_backward=t,m.cursor=m.limit,m.ket=m.cursor,e=m.find_among_b(i,37),m.limit_backward=r,e))switch(m.bra=m.cursor,e){case 1:m.slice_del();break;case 2:m.in_grouping_b(u,98,121)&&m.slice_del()}}(),m.cursor=m.limit,r=m.limit_backward,m.cursor>=t&&(m.limit_backward=t,m.cursor=m.limit,m.find_among_b(s,7)&&(m.cursor=m.limit,m.ket=m.cursor,m.cursor>m.limit_backward&&(m.bra=--m.cursor,m.slice_del())),m.limit_backward=r),m.cursor=m.limit,function(){var e,r;if(m.cursor>=t){if(r=m.limit_backward,m.limit_backward=t,m.cursor=m.limit,m.ket=m.cursor,e=m.find_among_b(a,5))switch(m.bra=m.cursor,e){case 1:m.slice_del();break;case 2:m.slice_from("lös");break;case 3:m.slice_from("full")}m.limit_backward=r}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return t.setCurrent(e),t.stem(),t.getCurrent()}):(t.setCurrent(e),t.stem(),t.getCurrent())}),e.Pipeline.registerFunction(e.sv.stemmer,"stemmer-sv"),e.sv.stopWordFilter=e.generateStopWordFilter("alla allt att av blev bli blir blivit de dem den denna deras dess dessa det detta dig din dina ditt du där då efter ej eller en er era ert ett från för ha hade han hans har henne hennes hon honom hur här i icke ingen inom inte jag ju kan kunde man med mellan men mig min mina mitt mot mycket ni nu när någon något några och om oss på samma sedan sig sin sina sitta själv skulle som så sådan sådana sådant till under upp ut utan vad var vara varför varit varje vars vart vem vi vid vilka vilkas vilken vilket vår våra vårt än är åt över".split(" ")),e.Pipeline.registerFunction(e.sv.stopWordFilter,"stopWordFilter-sv")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/lunr.tr.js b/docs/material/assets/javascripts/lunr/lunr.tr.js new file mode 100644 index 00000000..c42b349e --- /dev/null +++ b/docs/material/assets/javascripts/lunr/lunr.tr.js @@ -0,0 +1 @@ +!function(r,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(r.lunr)}(this,function(){return function(r){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var i,e,n;r.tr=function(){this.pipeline.reset(),this.pipeline.add(r.tr.trimmer,r.tr.stopWordFilter,r.tr.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(r.tr.stemmer))},r.tr.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA-Za-z",r.tr.trimmer=r.trimmerSupport.generateTrimmer(r.tr.wordCharacters),r.Pipeline.registerFunction(r.tr.trimmer,"trimmer-tr"),r.tr.stemmer=(i=r.stemmerSupport.Among,e=r.stemmerSupport.SnowballProgram,n=new function(){var r,n=[new i("m",-1,-1),new i("n",-1,-1),new i("miz",-1,-1),new i("niz",-1,-1),new i("muz",-1,-1),new i("nuz",-1,-1),new i("müz",-1,-1),new i("nüz",-1,-1),new i("mız",-1,-1),new i("nız",-1,-1)],t=[new i("leri",-1,-1),new i("ları",-1,-1)],u=[new i("ni",-1,-1),new i("nu",-1,-1),new i("nü",-1,-1),new i("nı",-1,-1)],o=[new i("in",-1,-1),new i("un",-1,-1),new i("ün",-1,-1),new i("ın",-1,-1)],s=[new i("a",-1,-1),new i("e",-1,-1)],c=[new i("na",-1,-1),new i("ne",-1,-1)],l=[new i("da",-1,-1),new i("ta",-1,-1),new i("de",-1,-1),new i("te",-1,-1)],a=[new i("nda",-1,-1),new i("nde",-1,-1)],m=[new i("dan",-1,-1),new i("tan",-1,-1),new i("den",-1,-1),new i("ten",-1,-1)],d=[new i("ndan",-1,-1),new i("nden",-1,-1)],f=[new i("la",-1,-1),new i("le",-1,-1)],b=[new i("ca",-1,-1),new i("ce",-1,-1)],w=[new i("im",-1,-1),new i("um",-1,-1),new i("üm",-1,-1),new i("ım",-1,-1)],_=[new i("sin",-1,-1),new i("sun",-1,-1),new i("sün",-1,-1),new i("sın",-1,-1)],k=[new i("iz",-1,-1),new i("uz",-1,-1),new i("üz",-1,-1),new i("ız",-1,-1)],p=[new i("siniz",-1,-1),new i("sunuz",-1,-1),new i("sünüz",-1,-1),new i("sınız",-1,-1)],g=[new i("lar",-1,-1),new i("ler",-1,-1)],y=[new i("niz",-1,-1),new i("nuz",-1,-1),new i("nüz",-1,-1),new i("nız",-1,-1)],z=[new i("dir",-1,-1),new i("tir",-1,-1),new i("dur",-1,-1),new i("tur",-1,-1),new i("dür",-1,-1),new i("tür",-1,-1),new i("dır",-1,-1),new i("tır",-1,-1)],h=[new i("casına",-1,-1),new i("cesine",-1,-1)],v=[new i("di",-1,-1),new i("ti",-1,-1),new i("dik",-1,-1),new i("tik",-1,-1),new i("duk",-1,-1),new i("tuk",-1,-1),new i("dük",-1,-1),new i("tük",-1,-1),new i("dık",-1,-1),new i("tık",-1,-1),new i("dim",-1,-1),new i("tim",-1,-1),new i("dum",-1,-1),new i("tum",-1,-1),new i("düm",-1,-1),new i("tüm",-1,-1),new i("dım",-1,-1),new i("tım",-1,-1),new i("din",-1,-1),new i("tin",-1,-1),new i("dun",-1,-1),new i("tun",-1,-1),new i("dün",-1,-1),new i("tün",-1,-1),new i("dın",-1,-1),new i("tın",-1,-1),new i("du",-1,-1),new i("tu",-1,-1),new i("dü",-1,-1),new i("tü",-1,-1),new i("dı",-1,-1),new i("tı",-1,-1)],q=[new i("sa",-1,-1),new i("se",-1,-1),new i("sak",-1,-1),new i("sek",-1,-1),new i("sam",-1,-1),new i("sem",-1,-1),new i("san",-1,-1),new i("sen",-1,-1)],C=[new i("miş",-1,-1),new i("muş",-1,-1),new i("müş",-1,-1),new i("mış",-1,-1)],P=[new i("b",-1,1),new i("c",-1,2),new i("d",-1,3),new i("ğ",-1,4)],F=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1],S=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1],W=[65],L=[65],x=[["a",[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],97,305],["e",[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130],101,252],["ı",[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],97,305],["i",[17],101,105],["o",W,111,117],["ö",L,246,252],["u",W,111,117]],A=new e;function E(r,i,e){for(;;){var n=A.limit-A.cursor;if(A.in_grouping_b(r,i,e)){A.cursor=A.limit-n;break}if(A.cursor=A.limit-n,A.cursor<=A.limit_backward)return!1;A.cursor--}return!0}function j(){var r,i;r=A.limit-A.cursor,E(F,97,305);for(var e=0;eA.limit_backward&&(A.cursor--,e=A.limit-A.cursor,i()))?(A.cursor=A.limit-e,!0):(A.cursor=A.limit-n,r()?(A.cursor=A.limit-n,!1):(A.cursor=A.limit-n,!(A.cursor<=A.limit_backward)&&(A.cursor--,!!i()&&(A.cursor=A.limit-n,!0))))}function Z(r){return T(r,function(){return A.in_grouping_b(F,97,305)})}function B(){return Z(function(){return A.eq_s_b(1,"n")})}function D(){return Z(function(){return A.eq_s_b(1,"y")})}function G(){return A.find_among_b(n,10)&&T(function(){return A.in_grouping_b(S,105,305)},function(){return A.out_grouping_b(F,97,305)})}function H(){return j()&&A.in_grouping_b(S,105,305)&&Z(function(){return A.eq_s_b(1,"s")})}function I(){return A.find_among_b(t,2)}function J(){return j()&&A.find_among_b(o,4)&&B()}function K(){return j()&&A.find_among_b(l,4)}function M(){return j()&&A.find_among_b(a,2)}function N(){return j()&&A.find_among_b(w,4)&&D()}function O(){return j()&&A.find_among_b(_,4)}function Q(){return j()&&A.find_among_b(k,4)&&D()}function R(){return A.find_among_b(p,4)}function U(){return j()&&A.find_among_b(g,2)}function V(){return j()&&A.find_among_b(z,8)}function X(){return j()&&A.find_among_b(v,32)&&D()}function Y(){return A.find_among_b(q,8)&&D()}function $(){return j()&&A.find_among_b(C,4)&&D()}function rr(){var r=A.limit-A.cursor;return!($()||(A.cursor=A.limit-r,X()||(A.cursor=A.limit-r,Y()||(A.cursor=A.limit-r,A.eq_s_b(3,"ken")&&D()))))}function ir(){if(A.find_among_b(h,2)){var r=A.limit-A.cursor;if(R()||(A.cursor=A.limit-r,U()||(A.cursor=A.limit-r,N()||(A.cursor=A.limit-r,O()||(A.cursor=A.limit-r,Q()||(A.cursor=A.limit-r))))),$())return!1}return!0}function er(){if(!j()||!A.find_among_b(y,4))return!0;var r=A.limit-A.cursor;return!X()&&(A.cursor=A.limit-r,!Y())}function nr(){var i,e,n,t=A.limit-A.cursor;if(A.ket=A.cursor,r=!0,rr()&&(A.cursor=A.limit-t,ir()&&(A.cursor=A.limit-t,function(){if(U()){A.bra=A.cursor,A.slice_del();var i=A.limit-A.cursor;return A.ket=A.cursor,V()||(A.cursor=A.limit-i,X()||(A.cursor=A.limit-i,Y()||(A.cursor=A.limit-i,$()||(A.cursor=A.limit-i)))),r=!1,!1}return!0}()&&(A.cursor=A.limit-t,er()&&(A.cursor=A.limit-t,n=A.limit-A.cursor,!(R()||(A.cursor=A.limit-n,Q()||(A.cursor=A.limit-n,O()||(A.cursor=A.limit-n,N()))))||(A.bra=A.cursor,A.slice_del(),e=A.limit-A.cursor,A.ket=A.cursor,$()||(A.cursor=A.limit-e),0)))))){if(A.cursor=A.limit-t,!V())return;A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,i=A.limit-A.cursor,R()||(A.cursor=A.limit-i,U()||(A.cursor=A.limit-i,N()||(A.cursor=A.limit-i,O()||(A.cursor=A.limit-i,Q()||(A.cursor=A.limit-i))))),$()||(A.cursor=A.limit-i)}A.bra=A.cursor,A.slice_del()}function tr(){var r,i,e,n;if(A.ket=A.cursor,A.eq_s_b(2,"ki")){if(r=A.limit-A.cursor,K())return A.bra=A.cursor,A.slice_del(),i=A.limit-A.cursor,A.ket=A.cursor,U()?(A.bra=A.cursor,A.slice_del(),tr()):(A.cursor=A.limit-i,G()&&(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr()))),!0;if(A.cursor=A.limit-r,J()){if(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,e=A.limit-A.cursor,I())A.bra=A.cursor,A.slice_del();else{if(A.cursor=A.limit-e,A.ket=A.cursor,!G()&&(A.cursor=A.limit-e,!H()&&(A.cursor=A.limit-e,!tr())))return!0;A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr())}return!0}if(A.cursor=A.limit-r,M()){if(n=A.limit-A.cursor,I())A.bra=A.cursor,A.slice_del();else if(A.cursor=A.limit-n,H())A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr());else if(A.cursor=A.limit-n,!tr())return!1;return!0}}return!1}function ur(r){if(A.ket=A.cursor,!M()&&(A.cursor=A.limit-r,!j()||!A.find_among_b(c,2)))return!1;var i=A.limit-A.cursor;if(I())A.bra=A.cursor,A.slice_del();else if(A.cursor=A.limit-i,H())A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr());else if(A.cursor=A.limit-i,!tr())return!1;return!0}function or(r){if(A.ket=A.cursor,!(j()&&A.find_among_b(d,2)||(A.cursor=A.limit-r,j()&&A.find_among_b(u,4))))return!1;var i=A.limit-A.cursor;return!(!H()&&(A.cursor=A.limit-i,!I()))&&(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr()),!0)}function sr(){var r,i=A.limit-A.cursor;return A.ket=A.cursor,!!(J()||(A.cursor=A.limit-i,j()&&A.find_among_b(f,2)&&D()))&&(A.bra=A.cursor,A.slice_del(),r=A.limit-A.cursor,A.ket=A.cursor,!(!U()||(A.bra=A.cursor,A.slice_del(),!tr()))||(A.cursor=A.limit-r,A.ket=A.cursor,!(G()||(A.cursor=A.limit-r,H()||(A.cursor=A.limit-r,tr())))||(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr()),!0)))}function cr(){var r,i,e=A.limit-A.cursor;if(A.ket=A.cursor,!(K()||(A.cursor=A.limit-e,j()&&A.in_grouping_b(S,105,305)&&D()||(A.cursor=A.limit-e,j()&&A.find_among_b(s,2)&&D()))))return!1;if(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,r=A.limit-A.cursor,G())A.bra=A.cursor,A.slice_del(),i=A.limit-A.cursor,A.ket=A.cursor,U()||(A.cursor=A.limit-i);else if(A.cursor=A.limit-r,!U())return!0;return A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,tr(),!0}function lr(){var r,i,e=A.limit-A.cursor;if(A.ket=A.cursor,U())return A.bra=A.cursor,A.slice_del(),void tr();if(A.cursor=A.limit-e,A.ket=A.cursor,j()&&A.find_among_b(b,2)&&B())if(A.bra=A.cursor,A.slice_del(),r=A.limit-A.cursor,A.ket=A.cursor,I())A.bra=A.cursor,A.slice_del();else{if(A.cursor=A.limit-r,A.ket=A.cursor,!G()&&(A.cursor=A.limit-r,!H())){if(A.cursor=A.limit-r,A.ket=A.cursor,!U())return;if(A.bra=A.cursor,A.slice_del(),!tr())return}A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr())}else if(A.cursor=A.limit-e,!ur(e)&&(A.cursor=A.limit-e,!or(e))){if(A.cursor=A.limit-e,A.ket=A.cursor,j()&&A.find_among_b(m,4))return A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,i=A.limit-A.cursor,void(G()?(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr())):(A.cursor=A.limit-i,U()?(A.bra=A.cursor,A.slice_del(),tr()):(A.cursor=A.limit-i,tr())));if(A.cursor=A.limit-e,!sr()){if(A.cursor=A.limit-e,I())return A.bra=A.cursor,void A.slice_del();A.cursor=A.limit-e,tr()||(A.cursor=A.limit-e,cr()||(A.cursor=A.limit-e,A.ket=A.cursor,(G()||(A.cursor=A.limit-e,H()))&&(A.bra=A.cursor,A.slice_del(),A.ket=A.cursor,U()&&(A.bra=A.cursor,A.slice_del(),tr()))))}}}function ar(r,i,e){if(A.cursor=A.limit-r,function(){for(;;){var r=A.limit-A.cursor;if(A.in_grouping_b(F,97,305)){A.cursor=A.limit-r;break}if(A.cursor=A.limit-r,A.cursor<=A.limit_backward)return!1;A.cursor--}return!0}()){var n=A.limit-A.cursor;if(!A.eq_s_b(1,i)&&(A.cursor=A.limit-n,!A.eq_s_b(1,e)))return!0;A.cursor=A.limit-r;var t=A.cursor;return A.insert(A.cursor,A.cursor,e),A.cursor=t,!1}return!0}function mr(r,i,e){for(;!A.eq_s(i,e);){if(A.cursor>=A.limit)return!0;A.cursor++}return i!=A.limit||(A.cursor=r,!1)}function dr(){var r,i,e=A.cursor;return!(!mr(r=A.cursor,2,"ad")||(A.cursor=r,!mr(r,5,"soyad")))&&(A.limit_backward=e,A.cursor=A.limit,i=A.limit-A.cursor,(A.eq_s_b(1,"d")||(A.cursor=A.limit-i,A.eq_s_b(1,"g")))&&ar(i,"a","ı")&&ar(i,"e","i")&&ar(i,"o","u")&&ar(i,"ö","ü"),A.cursor=A.limit,function(){var r;if(A.ket=A.cursor,r=A.find_among_b(P,4))switch(A.bra=A.cursor,r){case 1:A.slice_from("p");break;case 2:A.slice_from("ç");break;case 3:A.slice_from("t");break;case 4:A.slice_from("k")}}(),!0)}this.setCurrent=function(r){A.setCurrent(r)},this.getCurrent=function(){return A.getCurrent()},this.stem=function(){return!!(function(){for(var r,i=A.cursor,e=2;;){for(r=A.cursor;!A.in_grouping(F,97,305);){if(A.cursor>=A.limit)return A.cursor=r,!(e>0||(A.cursor=i,0));A.cursor++}e--}}()&&(A.limit_backward=A.cursor,A.cursor=A.limit,nr(),A.cursor=A.limit,r&&(lr(),A.cursor=A.limit_backward,dr())))}},function(r){return"function"==typeof r.update?r.update(function(r){return n.setCurrent(r),n.stem(),n.getCurrent()}):(n.setCurrent(r),n.stem(),n.getCurrent())}),r.Pipeline.registerFunction(r.tr.stemmer,"stemmer-tr"),r.tr.stopWordFilter=r.generateStopWordFilter("acaba altmış altı ama ancak arada aslında ayrıca bana bazı belki ben benden beni benim beri beş bile bin bir biri birkaç birkez birçok birşey birşeyi biz bizden bize bizi bizim bu buna bunda bundan bunlar bunları bunların bunu bunun burada böyle böylece da daha dahi de defa değil diye diğer doksan dokuz dolayı dolayısıyla dört edecek eden ederek edilecek ediliyor edilmesi ediyor elli en etmesi etti ettiği ettiğini eğer gibi göre halen hangi hatta hem henüz hep hepsi her herhangi herkesin hiç hiçbir iki ile ilgili ise itibaren itibariyle için işte kadar karşın katrilyon kendi kendilerine kendini kendisi kendisine kendisini kez ki kim kimden kime kimi kimse kırk milyar milyon mu mü mı nasıl ne neden nedenle nerde nerede nereye niye niçin o olan olarak oldu olduklarını olduğu olduğunu olmadı olmadığı olmak olması olmayan olmaz olsa olsun olup olur olursa oluyor on ona ondan onlar onlardan onları onların onu onun otuz oysa pek rağmen sadece sanki sekiz seksen sen senden seni senin siz sizden sizi sizin tarafından trilyon tüm var vardı ve veya ya yani yapacak yapmak yaptı yaptıkları yaptığı yaptığını yapılan yapılması yapıyor yedi yerine yetmiş yine yirmi yoksa yüz zaten çok çünkü öyle üzere üç şey şeyden şeyi şeyler şu şuna şunda şundan şunları şunu şöyle".split(" ")),r.Pipeline.registerFunction(r.tr.stopWordFilter,"stopWordFilter-tr")}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/lunr/tinyseg.js b/docs/material/assets/javascripts/lunr/tinyseg.js new file mode 100644 index 00000000..f7ec6032 --- /dev/null +++ b/docs/material/assets/javascripts/lunr/tinyseg.js @@ -0,0 +1 @@ +!function(_,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(_.lunr)}(this,function(){return function(_){function t(){var _={"[一二三四五六七八九十百千万億兆]":"M","[一-龠々〆ヵヶ]":"H","[ぁ-ん]":"I","[ァ-ヴーア-ン゙ー]":"K","[a-zA-Za-zA-Z]":"A","[0-90-9]":"N"};for(var t in this.chartype_=[],_){var H=new RegExp;H.compile(t),this.chartype_.push([H,_[t]])}return this.BIAS__=-332,this.BC1__={HH:6,II:2461,KH:406,OH:-1378},this.BC2__={AA:-3267,AI:2744,AN:-878,HH:-4070,HM:-1711,HN:4012,HO:3761,IA:1327,IH:-1184,II:-1332,IK:1721,IO:5492,KI:3831,KK:-8741,MH:-3132,MK:3334,OO:-2920},this.BC3__={HH:996,HI:626,HK:-721,HN:-1307,HO:-836,IH:-301,KK:2762,MK:1079,MM:4034,OA:-1652,OH:266},this.BP1__={BB:295,OB:304,OO:-125,UB:352},this.BP2__={BO:60,OO:-1762},this.BQ1__={BHH:1150,BHM:1521,BII:-1158,BIM:886,BMH:1208,BNH:449,BOH:-91,BOO:-2597,OHI:451,OIH:-296,OKA:1851,OKH:-1020,OKK:904,OOO:2965},this.BQ2__={BHH:118,BHI:-1159,BHM:466,BIH:-919,BKK:-1720,BKO:864,OHH:-1139,OHM:-181,OIH:153,UHI:-1146},this.BQ3__={BHH:-792,BHI:2664,BII:-299,BKI:419,BMH:937,BMM:8335,BNN:998,BOH:775,OHH:2174,OHM:439,OII:280,OKH:1798,OKI:-793,OKO:-2242,OMH:-2402,OOO:11699},this.BQ4__={BHH:-3895,BIH:3761,BII:-4654,BIK:1348,BKK:-1806,BMI:-3385,BOO:-12396,OAH:926,OHH:266,OHK:-2036,ONN:-973},this.BW1__={",と":660,",同":727,"B1あ":1404,"B1同":542,"、と":660,"、同":727,"」と":1682,"あっ":1505,"いう":1743,"いっ":-2055,"いる":672,"うし":-4817,"うん":665,"から":3472,"がら":600,"こう":-790,"こと":2083,"こん":-1262,"さら":-4143,"さん":4573,"した":2641,"して":1104,"すで":-3399,"そこ":1977,"それ":-871,"たち":1122,"ため":601,"った":3463,"つい":-802,"てい":805,"てき":1249,"でき":1127,"です":3445,"では":844,"とい":-4915,"とみ":1922,"どこ":3887,"ない":5713,"なっ":3015,"など":7379,"なん":-1113,"にし":2468,"には":1498,"にも":1671,"に対":-912,"の一":-501,"の中":741,"ませ":2448,"まで":1711,"まま":2600,"まる":-2155,"やむ":-1947,"よっ":-2565,"れた":2369,"れで":-913,"をし":1860,"を見":731,"亡く":-1886,"京都":2558,"取り":-2784,"大き":-2604,"大阪":1497,"平方":-2314,"引き":-1336,"日本":-195,"本当":-2423,"毎日":-2113,"目指":-724,"B1あ":1404,"B1同":542,"」と":1682},this.BW2__={"..":-11822,11:-669,"――":-5730,"−−":-13175,"いう":-1609,"うか":2490,"かし":-1350,"かも":-602,"から":-7194,"かれ":4612,"がい":853,"がら":-3198,"きた":1941,"くな":-1597,"こと":-8392,"この":-4193,"させ":4533,"され":13168,"さん":-3977,"しい":-1819,"しか":-545,"した":5078,"して":972,"しな":939,"その":-3744,"たい":-1253,"たた":-662,"ただ":-3857,"たち":-786,"たと":1224,"たは":-939,"った":4589,"って":1647,"っと":-2094,"てい":6144,"てき":3640,"てく":2551,"ては":-3110,"ても":-3065,"でい":2666,"でき":-1528,"でし":-3828,"です":-4761,"でも":-4203,"とい":1890,"とこ":-1746,"とと":-2279,"との":720,"とみ":5168,"とも":-3941,"ない":-2488,"なが":-1313,"など":-6509,"なの":2614,"なん":3099,"にお":-1615,"にし":2748,"にな":2454,"によ":-7236,"に対":-14943,"に従":-4688,"に関":-11388,"のか":2093,"ので":-7059,"のに":-6041,"のの":-6125,"はい":1073,"はが":-1033,"はず":-2532,"ばれ":1813,"まし":-1316,"まで":-6621,"まれ":5409,"めて":-3153,"もい":2230,"もの":-10713,"らか":-944,"らし":-1611,"らに":-1897,"りし":651,"りま":1620,"れた":4270,"れて":849,"れば":4114,"ろう":6067,"われ":7901,"を通":-11877,"んだ":728,"んな":-4115,"一人":602,"一方":-1375,"一日":970,"一部":-1051,"上が":-4479,"会社":-1116,"出て":2163,"分の":-7758,"同党":970,"同日":-913,"大阪":-2471,"委員":-1250,"少な":-1050,"年度":-8669,"年間":-1626,"府県":-2363,"手権":-1982,"新聞":-4066,"日新":-722,"日本":-7068,"日米":3372,"曜日":-601,"朝鮮":-2355,"本人":-2697,"東京":-1543,"然と":-1384,"社会":-1276,"立て":-990,"第に":-1612,"米国":-4268,"11":-669},this.BW3__={"あた":-2194,"あり":719,"ある":3846,"い.":-1185,"い。":-1185,"いい":5308,"いえ":2079,"いく":3029,"いた":2056,"いっ":1883,"いる":5600,"いわ":1527,"うち":1117,"うと":4798,"えと":1454,"か.":2857,"か。":2857,"かけ":-743,"かっ":-4098,"かに":-669,"から":6520,"かり":-2670,"が,":1816,"が、":1816,"がき":-4855,"がけ":-1127,"がっ":-913,"がら":-4977,"がり":-2064,"きた":1645,"けど":1374,"こと":7397,"この":1542,"ころ":-2757,"さい":-714,"さを":976,"し,":1557,"し、":1557,"しい":-3714,"した":3562,"して":1449,"しな":2608,"しま":1200,"す.":-1310,"す。":-1310,"する":6521,"ず,":3426,"ず、":3426,"ずに":841,"そう":428,"た.":8875,"た。":8875,"たい":-594,"たの":812,"たり":-1183,"たる":-853,"だ.":4098,"だ。":4098,"だっ":1004,"った":-4748,"って":300,"てい":6240,"てお":855,"ても":302,"です":1437,"でに":-1482,"では":2295,"とう":-1387,"とし":2266,"との":541,"とも":-3543,"どう":4664,"ない":1796,"なく":-903,"など":2135,"に,":-1021,"に、":-1021,"にし":1771,"にな":1906,"には":2644,"の,":-724,"の、":-724,"の子":-1e3,"は,":1337,"は、":1337,"べき":2181,"まし":1113,"ます":6943,"まっ":-1549,"まで":6154,"まれ":-793,"らし":1479,"られ":6820,"るる":3818,"れ,":854,"れ、":854,"れた":1850,"れて":1375,"れば":-3246,"れる":1091,"われ":-605,"んだ":606,"んで":798,"カ月":990,"会議":860,"入り":1232,"大会":2217,"始め":1681,"市":965,"新聞":-5055,"日,":974,"日、":974,"社会":2024,"カ月":990},this.TC1__={AAA:1093,HHH:1029,HHM:580,HII:998,HOH:-390,HOM:-331,IHI:1169,IOH:-142,IOI:-1015,IOM:467,MMH:187,OOI:-1832},this.TC2__={HHO:2088,HII:-1023,HMM:-1154,IHI:-1965,KKH:703,OII:-2649},this.TC3__={AAA:-294,HHH:346,HHI:-341,HII:-1088,HIK:731,HOH:-1486,IHH:128,IHI:-3041,IHO:-1935,IIH:-825,IIM:-1035,IOI:-542,KHH:-1216,KKA:491,KKH:-1217,KOK:-1009,MHH:-2694,MHM:-457,MHO:123,MMH:-471,NNH:-1689,NNO:662,OHO:-3393},this.TC4__={HHH:-203,HHI:1344,HHK:365,HHM:-122,HHN:182,HHO:669,HIH:804,HII:679,HOH:446,IHH:695,IHO:-2324,IIH:321,III:1497,IIO:656,IOO:54,KAK:4845,KKA:3386,KKK:3065,MHH:-405,MHI:201,MMH:-241,MMM:661,MOM:841},this.TQ1__={BHHH:-227,BHHI:316,BHIH:-132,BIHH:60,BIII:1595,BNHH:-744,BOHH:225,BOOO:-908,OAKK:482,OHHH:281,OHIH:249,OIHI:200,OIIH:-68},this.TQ2__={BIHH:-1401,BIII:-1033,BKAK:-543,BOOO:-5591},this.TQ3__={BHHH:478,BHHM:-1073,BHIH:222,BHII:-504,BIIH:-116,BIII:-105,BMHI:-863,BMHM:-464,BOMH:620,OHHH:346,OHHI:1729,OHII:997,OHMH:481,OIHH:623,OIIH:1344,OKAK:2792,OKHH:587,OKKA:679,OOHH:110,OOII:-685},this.TQ4__={BHHH:-721,BHHM:-3604,BHII:-966,BIIH:-607,BIII:-2181,OAAA:-2763,OAKK:180,OHHH:-294,OHHI:2446,OHHO:480,OHIH:-1573,OIHH:1935,OIHI:-493,OIIH:626,OIII:-4007,OKAK:-8156},this.TW1__={"につい":-4681,"東京都":2026},this.TW2__={"ある程":-2049,"いった":-1256,"ころが":-2434,"しょう":3873,"その後":-4430,"だって":-1049,"ていた":1833,"として":-4657,"ともに":-4517,"もので":1882,"一気に":-792,"初めて":-1512,"同時に":-8097,"大きな":-1255,"対して":-2721,"社会党":-3216},this.TW3__={"いただ":-1734,"してい":1314,"として":-4314,"につい":-5483,"にとっ":-5989,"に当た":-6247,"ので,":-727,"ので、":-727,"のもの":-600,"れから":-3752,"十二月":-2287},this.TW4__={"いう.":8576,"いう。":8576,"からな":-2348,"してい":2958,"たが,":1516,"たが、":1516,"ている":1538,"という":1349,"ました":5543,"ません":1097,"ようと":-4258,"よると":5865},this.UC1__={A:484,K:93,M:645,O:-505},this.UC2__={A:819,H:1059,I:409,M:3987,N:5775,O:646},this.UC3__={A:-1370,I:2311},this.UC4__={A:-2643,H:1809,I:-1032,K:-3450,M:3565,N:3876,O:6646},this.UC5__={H:313,I:-1238,K:-799,M:539,O:-831},this.UC6__={H:-506,I:-253,K:87,M:247,O:-387},this.UP1__={O:-214},this.UP2__={B:69,O:935},this.UP3__={B:189},this.UQ1__={BH:21,BI:-12,BK:-99,BN:142,BO:-56,OH:-95,OI:477,OK:410,OO:-2422},this.UQ2__={BH:216,BI:113,OK:1759},this.UQ3__={BA:-479,BH:42,BI:1913,BK:-7198,BM:3160,BN:6427,BO:14761,OI:-827,ON:-3212},this.UW1__={",":156,"、":156,"「":-463,"あ":-941,"う":-127,"が":-553,"き":121,"こ":505,"で":-201,"と":-547,"ど":-123,"に":-789,"の":-185,"は":-847,"も":-466,"や":-470,"よ":182,"ら":-292,"り":208,"れ":169,"を":-446,"ん":-137,"・":-135,"主":-402,"京":-268,"区":-912,"午":871,"国":-460,"大":561,"委":729,"市":-411,"日":-141,"理":361,"生":-408,"県":-386,"都":-718,"「":-463,"・":-135},this.UW2__={",":-829,"、":-829,"〇":892,"「":-645,"」":3145,"あ":-538,"い":505,"う":134,"お":-502,"か":1454,"が":-856,"く":-412,"こ":1141,"さ":878,"ざ":540,"し":1529,"す":-675,"せ":300,"そ":-1011,"た":188,"だ":1837,"つ":-949,"て":-291,"で":-268,"と":-981,"ど":1273,"な":1063,"に":-1764,"の":130,"は":-409,"ひ":-1273,"べ":1261,"ま":600,"も":-1263,"や":-402,"よ":1639,"り":-579,"る":-694,"れ":571,"を":-2516,"ん":2095,"ア":-587,"カ":306,"キ":568,"ッ":831,"三":-758,"不":-2150,"世":-302,"中":-968,"主":-861,"事":492,"人":-123,"会":978,"保":362,"入":548,"初":-3025,"副":-1566,"北":-3414,"区":-422,"大":-1769,"天":-865,"太":-483,"子":-1519,"学":760,"実":1023,"小":-2009,"市":-813,"年":-1060,"強":1067,"手":-1519,"揺":-1033,"政":1522,"文":-1355,"新":-1682,"日":-1815,"明":-1462,"最":-630,"朝":-1843,"本":-1650,"東":-931,"果":-665,"次":-2378,"民":-180,"気":-1740,"理":752,"発":529,"目":-1584,"相":-242,"県":-1165,"立":-763,"第":810,"米":509,"自":-1353,"行":838,"西":-744,"見":-3874,"調":1010,"議":1198,"込":3041,"開":1758,"間":-1257,"「":-645,"」":3145,"ッ":831,"ア":-587,"カ":306,"キ":568},this.UW3__={",":4889,1:-800,"−":-1723,"、":4889,"々":-2311,"〇":5827,"」":2670,"〓":-3573,"あ":-2696,"い":1006,"う":2342,"え":1983,"お":-4864,"か":-1163,"が":3271,"く":1004,"け":388,"げ":401,"こ":-3552,"ご":-3116,"さ":-1058,"し":-395,"す":584,"せ":3685,"そ":-5228,"た":842,"ち":-521,"っ":-1444,"つ":-1081,"て":6167,"で":2318,"と":1691,"ど":-899,"な":-2788,"に":2745,"の":4056,"は":4555,"ひ":-2171,"ふ":-1798,"へ":1199,"ほ":-5516,"ま":-4384,"み":-120,"め":1205,"も":2323,"や":-788,"よ":-202,"ら":727,"り":649,"る":5905,"れ":2773,"わ":-1207,"を":6620,"ん":-518,"ア":551,"グ":1319,"ス":874,"ッ":-1350,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278,"・":-3794,"一":-1619,"下":-1759,"世":-2087,"両":3815,"中":653,"主":-758,"予":-1193,"二":974,"人":2742,"今":792,"他":1889,"以":-1368,"低":811,"何":4265,"作":-361,"保":-2439,"元":4858,"党":3593,"全":1574,"公":-3030,"六":755,"共":-1880,"円":5807,"再":3095,"分":457,"初":2475,"別":1129,"前":2286,"副":4437,"力":365,"動":-949,"務":-1872,"化":1327,"北":-1038,"区":4646,"千":-2309,"午":-783,"協":-1006,"口":483,"右":1233,"各":3588,"合":-241,"同":3906,"和":-837,"員":4513,"国":642,"型":1389,"場":1219,"外":-241,"妻":2016,"学":-1356,"安":-423,"実":-1008,"家":1078,"小":-513,"少":-3102,"州":1155,"市":3197,"平":-1804,"年":2416,"広":-1030,"府":1605,"度":1452,"建":-2352,"当":-3885,"得":1905,"思":-1291,"性":1822,"戸":-488,"指":-3973,"政":-2013,"教":-1479,"数":3222,"文":-1489,"新":1764,"日":2099,"旧":5792,"昨":-661,"時":-1248,"曜":-951,"最":-937,"月":4125,"期":360,"李":3094,"村":364,"東":-805,"核":5156,"森":2438,"業":484,"氏":2613,"民":-1694,"決":-1073,"法":1868,"海":-495,"無":979,"物":461,"特":-3850,"生":-273,"用":914,"町":1215,"的":7313,"直":-1835,"省":792,"県":6293,"知":-1528,"私":4231,"税":401,"立":-960,"第":1201,"米":7767,"系":3066,"約":3663,"級":1384,"統":-4229,"総":1163,"線":1255,"者":6457,"能":725,"自":-2869,"英":785,"見":1044,"調":-562,"財":-733,"費":1777,"車":1835,"軍":1375,"込":-1504,"通":-1136,"選":-681,"郎":1026,"郡":4404,"部":1200,"金":2163,"長":421,"開":-1432,"間":1302,"関":-1282,"雨":2009,"電":-1045,"非":2066,"駅":1620,"1":-800,"」":2670,"・":-3794,"ッ":-1350,"ア":551,"グ":1319,"ス":874,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278},this.UW4__={",":3930,".":3508,"―":-4841,"、":3930,"。":3508,"〇":4999,"「":1895,"」":3798,"〓":-5156,"あ":4752,"い":-3435,"う":-640,"え":-2514,"お":2405,"か":530,"が":6006,"き":-4482,"ぎ":-3821,"く":-3788,"け":-4376,"げ":-4734,"こ":2255,"ご":1979,"さ":2864,"し":-843,"じ":-2506,"す":-731,"ず":1251,"せ":181,"そ":4091,"た":5034,"だ":5408,"ち":-3654,"っ":-5882,"つ":-1659,"て":3994,"で":7410,"と":4547,"な":5433,"に":6499,"ぬ":1853,"ね":1413,"の":7396,"は":8578,"ば":1940,"ひ":4249,"び":-4134,"ふ":1345,"へ":6665,"べ":-744,"ほ":1464,"ま":1051,"み":-2082,"む":-882,"め":-5046,"も":4169,"ゃ":-2666,"や":2795,"ょ":-1544,"よ":3351,"ら":-2922,"り":-9726,"る":-14896,"れ":-2613,"ろ":-4570,"わ":-1783,"を":13150,"ん":-2352,"カ":2145,"コ":1789,"セ":1287,"ッ":-724,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637,"・":-4371,"ー":-11870,"一":-2069,"中":2210,"予":782,"事":-190,"井":-1768,"人":1036,"以":544,"会":950,"体":-1286,"作":530,"側":4292,"先":601,"党":-2006,"共":-1212,"内":584,"円":788,"初":1347,"前":1623,"副":3879,"力":-302,"動":-740,"務":-2715,"化":776,"区":4517,"協":1013,"参":1555,"合":-1834,"和":-681,"員":-910,"器":-851,"回":1500,"国":-619,"園":-1200,"地":866,"場":-1410,"塁":-2094,"士":-1413,"多":1067,"大":571,"子":-4802,"学":-1397,"定":-1057,"寺":-809,"小":1910,"屋":-1328,"山":-1500,"島":-2056,"川":-2667,"市":2771,"年":374,"庁":-4556,"後":456,"性":553,"感":916,"所":-1566,"支":856,"改":787,"政":2182,"教":704,"文":522,"方":-856,"日":1798,"時":1829,"最":845,"月":-9066,"木":-485,"来":-442,"校":-360,"業":-1043,"氏":5388,"民":-2716,"気":-910,"沢":-939,"済":-543,"物":-735,"率":672,"球":-1267,"生":-1286,"産":-1101,"田":-2900,"町":1826,"的":2586,"目":922,"省":-3485,"県":2997,"空":-867,"立":-2112,"第":788,"米":2937,"系":786,"約":2171,"経":1146,"統":-1169,"総":940,"線":-994,"署":749,"者":2145,"能":-730,"般":-852,"行":-792,"規":792,"警":-1184,"議":-244,"谷":-1e3,"賞":730,"車":-1481,"軍":1158,"輪":-1433,"込":-3370,"近":929,"道":-1291,"選":2596,"郎":-4866,"都":1192,"野":-1100,"銀":-2213,"長":357,"間":-2344,"院":-2297,"際":-2604,"電":-878,"領":-1659,"題":-792,"館":-1984,"首":1749,"高":2120,"「":1895,"」":3798,"・":-4371,"ッ":-724,"ー":-11870,"カ":2145,"コ":1789,"セ":1287,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637},this.UW5__={",":465,".":-299,1:-514,E2:-32768,"]":-2762,"、":465,"。":-299,"「":363,"あ":1655,"い":331,"う":-503,"え":1199,"お":527,"か":647,"が":-421,"き":1624,"ぎ":1971,"く":312,"げ":-983,"さ":-1537,"し":-1371,"す":-852,"だ":-1186,"ち":1093,"っ":52,"つ":921,"て":-18,"で":-850,"と":-127,"ど":1682,"な":-787,"に":-1224,"の":-635,"は":-578,"べ":1001,"み":502,"め":865,"ゃ":3350,"ょ":854,"り":-208,"る":429,"れ":504,"わ":419,"を":-1264,"ん":327,"イ":241,"ル":451,"ン":-343,"中":-871,"京":722,"会":-1153,"党":-654,"務":3519,"区":-901,"告":848,"員":2104,"大":-1296,"学":-548,"定":1785,"嵐":-1304,"市":-2991,"席":921,"年":1763,"思":872,"所":-814,"挙":1618,"新":-1682,"日":218,"月":-4353,"査":932,"格":1356,"機":-1508,"氏":-1347,"田":240,"町":-3912,"的":-3149,"相":1319,"省":-1052,"県":-4003,"研":-997,"社":-278,"空":-813,"統":1955,"者":-2233,"表":663,"語":-1073,"議":1219,"選":-1018,"郎":-368,"長":786,"間":1191,"題":2368,"館":-689,"1":-514,"E2":-32768,"「":363,"イ":241,"ル":451,"ン":-343},this.UW6__={",":227,".":808,1:-270,E1:306,"、":227,"。":808,"あ":-307,"う":189,"か":241,"が":-73,"く":-121,"こ":-200,"じ":1782,"す":383,"た":-428,"っ":573,"て":-1014,"で":101,"と":-105,"な":-253,"に":-149,"の":-417,"は":-236,"も":-206,"り":187,"る":-135,"を":195,"ル":-673,"ン":-496,"一":-277,"中":201,"件":-800,"会":624,"前":302,"区":1792,"員":-1212,"委":798,"学":-960,"市":887,"広":-695,"後":535,"業":-697,"相":753,"社":-507,"福":974,"空":-822,"者":1811,"連":463,"郎":1082,"1":-270,"E1":306,"ル":-673,"ン":-496},this}t.prototype.ctype_=function(_){for(var t in this.chartype_)if(_.match(this.chartype_[t][0]))return this.chartype_[t][1];return"O"},t.prototype.ts_=function(_){return _||0},t.prototype.segment=function(_){if(null==_||null==_||""==_)return[];var t=[],H=["B3","B2","B1"],s=["O","O","O"],h=_.split("");for(K=0;K0&&(t.push(i),i="",N="B"),I=O,O=B,B=N,i+=H[K]}return t.push(i),t},_.TinySegmenter=t}}); \ No newline at end of file diff --git a/docs/material/assets/javascripts/modernizr.1aa3b519.js b/docs/material/assets/javascripts/modernizr.1aa3b519.js new file mode 100644 index 00000000..14e111fc --- /dev/null +++ b/docs/material/assets/javascripts/modernizr.1aa3b519.js @@ -0,0 +1 @@ +!function(e,t){for(var n in t)e[n]=t[n]}(window,function(e){function t(r){if(n[r])return n[r].exports;var o=n[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,t),o.l=!0,o.exports}var n={};return t.m=e,t.c=n,t.d=function(e,n,r){t.o(e,n)||Object.defineProperty(e,n,{configurable:!1,enumerable:!0,get:r})},t.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return t.d(n,"a",n),n},t.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},t.p="",t(t.s=4)}({4:function(e,t,n){"use strict";n(5)},5:function(e,t){!function(t){!function(e,t,n){function r(e,t){return typeof e===t}function o(e){var t=_.className,n=C._config.classPrefix||"";if(T&&(t=t.baseVal),C._config.enableJSClass){var r=new RegExp("(^|\\s)"+n+"no-js(\\s|$)");t=t.replace(r,"$1"+n+"js$2")}C._config.enableClasses&&(t+=" "+n+e.join(" "+n),T?_.className.baseVal=t:_.className=t)}function i(e,t){if("object"==typeof e)for(var n in e)b(e,n)&&i(n,e[n]);else{e=e.toLowerCase();var r=e.split("."),s=C[r[0]];if(2==r.length&&(s=s[r[1]]),void 0!==s)return C;t="function"==typeof t?t():t,1==r.length?C[r[0]]=t:(!C[r[0]]||C[r[0]]instanceof Boolean||(C[r[0]]=new Boolean(C[r[0]])),C[r[0]][r[1]]=t),o([(t&&0!=t?"":"no-")+r.join("-")]),C._trigger(e,t)}return C}function s(){return"function"!=typeof t.createElement?t.createElement(arguments[0]):T?t.createElementNS.call(t,"http://www.w3.org/2000/svg",arguments[0]):t.createElement.apply(t,arguments)}function a(){var e=t.body;return e||(e=s(T?"svg":"body"),e.fake=!0),e}function u(e,n,r,o){var i,u,l,f,c="modernizr",d=s("div"),p=a();if(parseInt(r,10))for(;r--;)l=s("div"),l.id=o?o[r]:c+(r+1),d.appendChild(l);return i=s("style"),i.type="text/css",i.id="s"+c,(p.fake?p:d).appendChild(i),p.appendChild(d),i.styleSheet?i.styleSheet.cssText=e:i.appendChild(t.createTextNode(e)),d.id=c,p.fake&&(p.style.background="",p.style.overflow="hidden",f=_.style.overflow,_.style.overflow="hidden",_.appendChild(p)),u=n(d,e),p.fake?(p.parentNode.removeChild(p),_.style.overflow=f,_.offsetHeight):d.parentNode.removeChild(d),!!u}function l(e,t){return!!~(""+e).indexOf(t)}function f(e){return e.replace(/([A-Z])/g,function(e,t){return"-"+t.toLowerCase()}).replace(/^ms-/,"-ms-")}function c(t,n,r){var o;if("getComputedStyle"in e){o=getComputedStyle.call(e,t,n);var i=e.console;if(null!==o)r&&(o=o.getPropertyValue(r));else if(i){var s=i.error?"error":"log";i[s].call(i,"getComputedStyle returning null, its possible modernizr test results are inaccurate")}}else o=!n&&t.currentStyle&&t.currentStyle[r];return o}function d(t,r){var o=t.length;if("CSS"in e&&"supports"in e.CSS){for(;o--;)if(e.CSS.supports(f(t[o]),r))return!0;return!1}if("CSSSupportsRule"in e){for(var i=[];o--;)i.push("("+f(t[o])+":"+r+")");return i=i.join(" or "),u("@supports ("+i+") { #modernizr { position: absolute; } }",function(e){return"absolute"==c(e,null,"position")})}return n}function p(e){return e.replace(/([a-z])-([a-z])/g,function(e,t,n){return t+n.toUpperCase()}).replace(/^-/,"")}function h(e,t,o,i){function a(){f&&(delete j.style,delete j.modElem)}if(i=!r(i,"undefined")&&i,!r(o,"undefined")){var u=d(e,o);if(!r(u,"undefined"))return u}for(var f,c,h,m,v,g=["modernizr","tspan","samp"];!j.style&&g.length;)f=!0,j.modElem=s(g.shift()),j.style=j.modElem.style;for(h=e.length,c=0;h>c;c++)if(m=e[c],v=j.style[m],l(m,"-")&&(m=p(m)),j.style[m]!==n){if(i||r(o,"undefined"))return a(),"pfx"!=t||m;try{j.style[m]=o}catch(e){}if(j.style[m]!=v)return a(),"pfx"!=t||m}return a(),!1}function m(e,t){return function(){return e.apply(t,arguments)}}function v(e,t,n){var o;for(var i in e)if(e[i]in t)return!1===n?e[i]:(o=t[e[i]],r(o,"function")?m(o,n||t):o);return!1}function g(e,t,n,o,i){var s=e.charAt(0).toUpperCase()+e.slice(1),a=(e+" "+k.join(s+" ")+s).split(" ");return r(t,"string")||r(t,"undefined")?h(a,t,o,i):(a=(e+" "+A.join(s+" ")+s).split(" "),v(a,t,n))}function y(e,t,r){return g(e,n,n,t,r)}var w=[],S={_version:"3.5.0",_config:{classPrefix:"",enableClasses:!0,enableJSClass:!0,usePrefixes:!0},_q:[],on:function(e,t){var n=this;setTimeout(function(){t(n[e])},0)},addTest:function(e,t,n){w.push({name:e,fn:t,options:n})},addAsyncTest:function(e){w.push({name:null,fn:e})}},C=function(){};C.prototype=S,C=new C;var b,x=[],_=t.documentElement,T="svg"===_.nodeName.toLowerCase();!function(){var e={}.hasOwnProperty;b=r(e,"undefined")||r(e.call,"undefined")?function(e,t){return t in e&&r(e.constructor.prototype[t],"undefined")}:function(t,n){return e.call(t,n)}}(),S._l={},S.on=function(e,t){this._l[e]||(this._l[e]=[]),this._l[e].push(t),C.hasOwnProperty(e)&&setTimeout(function(){C._trigger(e,C[e])},0)},S._trigger=function(e,t){if(this._l[e]){var n=this._l[e];setTimeout(function(){var e;for(e=0;e .md-nav__link { + color: inherit; } + +button[data-md-color-primary="pink"] { + background-color: #e91e63; } + +[data-md-color-primary="pink"] .md-typeset a { + color: #e91e63; } + +[data-md-color-primary="pink"] .md-header { + background-color: #e91e63; } + +[data-md-color-primary="pink"] .md-hero { + background-color: #e91e63; } + +[data-md-color-primary="pink"] .md-nav__link:active, +[data-md-color-primary="pink"] .md-nav__link--active { + color: #e91e63; } + +[data-md-color-primary="pink"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="purple"] { + background-color: #ab47bc; } + +[data-md-color-primary="purple"] .md-typeset a { + color: #ab47bc; } + +[data-md-color-primary="purple"] .md-header { + background-color: #ab47bc; } + +[data-md-color-primary="purple"] .md-hero { + background-color: #ab47bc; } + +[data-md-color-primary="purple"] .md-nav__link:active, +[data-md-color-primary="purple"] .md-nav__link--active { + color: #ab47bc; } + +[data-md-color-primary="purple"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="deep-purple"] { + background-color: #7e57c2; } + +[data-md-color-primary="deep-purple"] .md-typeset a { + color: #7e57c2; } + +[data-md-color-primary="deep-purple"] .md-header { + background-color: #7e57c2; } + +[data-md-color-primary="deep-purple"] .md-hero { + background-color: #7e57c2; } + +[data-md-color-primary="deep-purple"] .md-nav__link:active, +[data-md-color-primary="deep-purple"] .md-nav__link--active { + color: #7e57c2; } + +[data-md-color-primary="deep-purple"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="indigo"] { + background-color: #3f51b5; } + +[data-md-color-primary="indigo"] .md-typeset a { + color: #3f51b5; } + +[data-md-color-primary="indigo"] .md-header { + background-color: #3f51b5; } + +[data-md-color-primary="indigo"] .md-hero { + background-color: #3f51b5; } + +[data-md-color-primary="indigo"] .md-nav__link:active, +[data-md-color-primary="indigo"] .md-nav__link--active { + color: #3f51b5; } + +[data-md-color-primary="indigo"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="blue"] { + background-color: #2196f3; } + +[data-md-color-primary="blue"] .md-typeset a { + color: #2196f3; } + +[data-md-color-primary="blue"] .md-header { + background-color: #2196f3; } + +[data-md-color-primary="blue"] .md-hero { + background-color: #2196f3; } + +[data-md-color-primary="blue"] .md-nav__link:active, +[data-md-color-primary="blue"] .md-nav__link--active { + color: #2196f3; } + +[data-md-color-primary="blue"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="light-blue"] { + background-color: #03a9f4; } + +[data-md-color-primary="light-blue"] .md-typeset a { + color: #03a9f4; } + +[data-md-color-primary="light-blue"] .md-header { + background-color: #03a9f4; } + +[data-md-color-primary="light-blue"] .md-hero { + background-color: #03a9f4; } + +[data-md-color-primary="light-blue"] .md-nav__link:active, +[data-md-color-primary="light-blue"] .md-nav__link--active { + color: #03a9f4; } + +[data-md-color-primary="light-blue"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="cyan"] { + background-color: #00bcd4; } + +[data-md-color-primary="cyan"] .md-typeset a { + color: #00bcd4; } + +[data-md-color-primary="cyan"] .md-header { + background-color: #00bcd4; } + +[data-md-color-primary="cyan"] .md-hero { + background-color: #00bcd4; } + +[data-md-color-primary="cyan"] .md-nav__link:active, +[data-md-color-primary="cyan"] .md-nav__link--active { + color: #00bcd4; } + +[data-md-color-primary="cyan"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="teal"] { + background-color: #009688; } + +[data-md-color-primary="teal"] .md-typeset a { + color: #009688; } + +[data-md-color-primary="teal"] .md-header { + background-color: #009688; } + +[data-md-color-primary="teal"] .md-hero { + background-color: #009688; } + +[data-md-color-primary="teal"] .md-nav__link:active, +[data-md-color-primary="teal"] .md-nav__link--active { + color: #009688; } + +[data-md-color-primary="teal"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="green"] { + background-color: #4caf50; } + +[data-md-color-primary="green"] .md-typeset a { + color: #4caf50; } + +[data-md-color-primary="green"] .md-header { + background-color: #4caf50; } + +[data-md-color-primary="green"] .md-hero { + background-color: #4caf50; } + +[data-md-color-primary="green"] .md-nav__link:active, +[data-md-color-primary="green"] .md-nav__link--active { + color: #4caf50; } + +[data-md-color-primary="green"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="light-green"] { + background-color: #7cb342; } + +[data-md-color-primary="light-green"] .md-typeset a { + color: #7cb342; } + +[data-md-color-primary="light-green"] .md-header { + background-color: #7cb342; } + +[data-md-color-primary="light-green"] .md-hero { + background-color: #7cb342; } + +[data-md-color-primary="light-green"] .md-nav__link:active, +[data-md-color-primary="light-green"] .md-nav__link--active { + color: #7cb342; } + +[data-md-color-primary="light-green"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="lime"] { + background-color: #c0ca33; } + +[data-md-color-primary="lime"] .md-typeset a { + color: #c0ca33; } + +[data-md-color-primary="lime"] .md-header { + background-color: #c0ca33; } + +[data-md-color-primary="lime"] .md-hero { + background-color: #c0ca33; } + +[data-md-color-primary="lime"] .md-nav__link:active, +[data-md-color-primary="lime"] .md-nav__link--active { + color: #c0ca33; } + +[data-md-color-primary="lime"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="yellow"] { + background-color: #f9a825; } + +[data-md-color-primary="yellow"] .md-typeset a { + color: #f9a825; } + +[data-md-color-primary="yellow"] .md-header { + background-color: #f9a825; } + +[data-md-color-primary="yellow"] .md-hero { + background-color: #f9a825; } + +[data-md-color-primary="yellow"] .md-nav__link:active, +[data-md-color-primary="yellow"] .md-nav__link--active { + color: #f9a825; } + +[data-md-color-primary="yellow"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="amber"] { + background-color: #ffa000; } + +[data-md-color-primary="amber"] .md-typeset a { + color: #ffa000; } + +[data-md-color-primary="amber"] .md-header { + background-color: #ffa000; } + +[data-md-color-primary="amber"] .md-hero { + background-color: #ffa000; } + +[data-md-color-primary="amber"] .md-nav__link:active, +[data-md-color-primary="amber"] .md-nav__link--active { + color: #ffa000; } + +[data-md-color-primary="amber"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="orange"] { + background-color: #fb8c00; } + +[data-md-color-primary="orange"] .md-typeset a { + color: #fb8c00; } + +[data-md-color-primary="orange"] .md-header { + background-color: #fb8c00; } + +[data-md-color-primary="orange"] .md-hero { + background-color: #fb8c00; } + +[data-md-color-primary="orange"] .md-nav__link:active, +[data-md-color-primary="orange"] .md-nav__link--active { + color: #fb8c00; } + +[data-md-color-primary="orange"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="deep-orange"] { + background-color: #ff7043; } + +[data-md-color-primary="deep-orange"] .md-typeset a { + color: #ff7043; } + +[data-md-color-primary="deep-orange"] .md-header { + background-color: #ff7043; } + +[data-md-color-primary="deep-orange"] .md-hero { + background-color: #ff7043; } + +[data-md-color-primary="deep-orange"] .md-nav__link:active, +[data-md-color-primary="deep-orange"] .md-nav__link--active { + color: #ff7043; } + +[data-md-color-primary="deep-orange"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="brown"] { + background-color: #795548; } + +[data-md-color-primary="brown"] .md-typeset a { + color: #795548; } + +[data-md-color-primary="brown"] .md-header { + background-color: #795548; } + +[data-md-color-primary="brown"] .md-hero { + background-color: #795548; } + +[data-md-color-primary="brown"] .md-nav__link:active, +[data-md-color-primary="brown"] .md-nav__link--active { + color: #795548; } + +[data-md-color-primary="brown"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="grey"] { + background-color: #757575; } + +[data-md-color-primary="grey"] .md-typeset a { + color: #757575; } + +[data-md-color-primary="grey"] .md-header { + background-color: #757575; } + +[data-md-color-primary="grey"] .md-hero { + background-color: #757575; } + +[data-md-color-primary="grey"] .md-nav__link:active, +[data-md-color-primary="grey"] .md-nav__link--active { + color: #757575; } + +[data-md-color-primary="grey"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="blue-grey"] { + background-color: #546e7a; } + +[data-md-color-primary="blue-grey"] .md-typeset a { + color: #546e7a; } + +[data-md-color-primary="blue-grey"] .md-header { + background-color: #546e7a; } + +[data-md-color-primary="blue-grey"] .md-hero { + background-color: #546e7a; } + +[data-md-color-primary="blue-grey"] .md-nav__link:active, +[data-md-color-primary="blue-grey"] .md-nav__link--active { + color: #546e7a; } + +[data-md-color-primary="blue-grey"] .md-nav__item--nested > .md-nav__link { + color: inherit; } + +button[data-md-color-primary="white"] { + background-color: white; + color: rgba(0, 0, 0, 0.87); + box-shadow: 0 0 0.1rem rgba(0, 0, 0, 0.54) inset; } + +[data-md-color-primary="white"] .md-header { + background-color: white; + color: rgba(0, 0, 0, 0.87); } + +[data-md-color-primary="white"] .md-hero { + background-color: white; + color: rgba(0, 0, 0, 0.87); } + [data-md-color-primary="white"] .md-hero--expand { + border-bottom: 0.1rem solid rgba(0, 0, 0, 0.07); } + +button[data-md-color-accent="red"] { + background-color: #ff1744; } + +[data-md-color-accent="red"] .md-typeset a:hover, +[data-md-color-accent="red"] .md-typeset a:active { + color: #ff1744; } + +[data-md-color-accent="red"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="red"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #ff1744; } + +[data-md-color-accent="red"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="red"] .md-typeset .md-clipboard:active::before { + color: #ff1744; } + +[data-md-color-accent="red"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="red"] .md-typeset .footnote li:target .footnote-backref { + color: #ff1744; } + +[data-md-color-accent="red"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="red"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="red"] .md-typeset [id] .headerlink:focus { + color: #ff1744; } + +[data-md-color-accent="red"] .md-nav__link:focus, +[data-md-color-accent="red"] .md-nav__link:hover { + color: #ff1744; } + +[data-md-color-accent="red"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ff1744; } + +[data-md-color-accent="red"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="red"] .md-search-result__link:hover { + background-color: rgba(255, 23, 68, 0.1); } + +[data-md-color-accent="red"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ff1744; } + +[data-md-color-accent="red"] .md-source-file:hover::before { + background-color: #ff1744; } + +button[data-md-color-accent="pink"] { + background-color: #f50057; } + +[data-md-color-accent="pink"] .md-typeset a:hover, +[data-md-color-accent="pink"] .md-typeset a:active { + color: #f50057; } + +[data-md-color-accent="pink"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="pink"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #f50057; } + +[data-md-color-accent="pink"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="pink"] .md-typeset .md-clipboard:active::before { + color: #f50057; } + +[data-md-color-accent="pink"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="pink"] .md-typeset .footnote li:target .footnote-backref { + color: #f50057; } + +[data-md-color-accent="pink"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="pink"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="pink"] .md-typeset [id] .headerlink:focus { + color: #f50057; } + +[data-md-color-accent="pink"] .md-nav__link:focus, +[data-md-color-accent="pink"] .md-nav__link:hover { + color: #f50057; } + +[data-md-color-accent="pink"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #f50057; } + +[data-md-color-accent="pink"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="pink"] .md-search-result__link:hover { + background-color: rgba(245, 0, 87, 0.1); } + +[data-md-color-accent="pink"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #f50057; } + +[data-md-color-accent="pink"] .md-source-file:hover::before { + background-color: #f50057; } + +button[data-md-color-accent="purple"] { + background-color: #e040fb; } + +[data-md-color-accent="purple"] .md-typeset a:hover, +[data-md-color-accent="purple"] .md-typeset a:active { + color: #e040fb; } + +[data-md-color-accent="purple"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="purple"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #e040fb; } + +[data-md-color-accent="purple"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="purple"] .md-typeset .md-clipboard:active::before { + color: #e040fb; } + +[data-md-color-accent="purple"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="purple"] .md-typeset .footnote li:target .footnote-backref { + color: #e040fb; } + +[data-md-color-accent="purple"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="purple"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="purple"] .md-typeset [id] .headerlink:focus { + color: #e040fb; } + +[data-md-color-accent="purple"] .md-nav__link:focus, +[data-md-color-accent="purple"] .md-nav__link:hover { + color: #e040fb; } + +[data-md-color-accent="purple"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #e040fb; } + +[data-md-color-accent="purple"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="purple"] .md-search-result__link:hover { + background-color: rgba(224, 64, 251, 0.1); } + +[data-md-color-accent="purple"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #e040fb; } + +[data-md-color-accent="purple"] .md-source-file:hover::before { + background-color: #e040fb; } + +button[data-md-color-accent="deep-purple"] { + background-color: #7c4dff; } + +[data-md-color-accent="deep-purple"] .md-typeset a:hover, +[data-md-color-accent="deep-purple"] .md-typeset a:active { + color: #7c4dff; } + +[data-md-color-accent="deep-purple"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="deep-purple"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #7c4dff; } + +[data-md-color-accent="deep-purple"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="deep-purple"] .md-typeset .md-clipboard:active::before { + color: #7c4dff; } + +[data-md-color-accent="deep-purple"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="deep-purple"] .md-typeset .footnote li:target .footnote-backref { + color: #7c4dff; } + +[data-md-color-accent="deep-purple"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="deep-purple"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="deep-purple"] .md-typeset [id] .headerlink:focus { + color: #7c4dff; } + +[data-md-color-accent="deep-purple"] .md-nav__link:focus, +[data-md-color-accent="deep-purple"] .md-nav__link:hover { + color: #7c4dff; } + +[data-md-color-accent="deep-purple"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #7c4dff; } + +[data-md-color-accent="deep-purple"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="deep-purple"] .md-search-result__link:hover { + background-color: rgba(124, 77, 255, 0.1); } + +[data-md-color-accent="deep-purple"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #7c4dff; } + +[data-md-color-accent="deep-purple"] .md-source-file:hover::before { + background-color: #7c4dff; } + +button[data-md-color-accent="indigo"] { + background-color: #536dfe; } + +[data-md-color-accent="indigo"] .md-typeset a:hover, +[data-md-color-accent="indigo"] .md-typeset a:active { + color: #536dfe; } + +[data-md-color-accent="indigo"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="indigo"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #536dfe; } + +[data-md-color-accent="indigo"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="indigo"] .md-typeset .md-clipboard:active::before { + color: #536dfe; } + +[data-md-color-accent="indigo"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="indigo"] .md-typeset .footnote li:target .footnote-backref { + color: #536dfe; } + +[data-md-color-accent="indigo"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="indigo"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="indigo"] .md-typeset [id] .headerlink:focus { + color: #536dfe; } + +[data-md-color-accent="indigo"] .md-nav__link:focus, +[data-md-color-accent="indigo"] .md-nav__link:hover { + color: #536dfe; } + +[data-md-color-accent="indigo"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #536dfe; } + +[data-md-color-accent="indigo"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="indigo"] .md-search-result__link:hover { + background-color: rgba(83, 109, 254, 0.1); } + +[data-md-color-accent="indigo"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #536dfe; } + +[data-md-color-accent="indigo"] .md-source-file:hover::before { + background-color: #536dfe; } + +button[data-md-color-accent="blue"] { + background-color: #448aff; } + +[data-md-color-accent="blue"] .md-typeset a:hover, +[data-md-color-accent="blue"] .md-typeset a:active { + color: #448aff; } + +[data-md-color-accent="blue"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="blue"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #448aff; } + +[data-md-color-accent="blue"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="blue"] .md-typeset .md-clipboard:active::before { + color: #448aff; } + +[data-md-color-accent="blue"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="blue"] .md-typeset .footnote li:target .footnote-backref { + color: #448aff; } + +[data-md-color-accent="blue"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="blue"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="blue"] .md-typeset [id] .headerlink:focus { + color: #448aff; } + +[data-md-color-accent="blue"] .md-nav__link:focus, +[data-md-color-accent="blue"] .md-nav__link:hover { + color: #448aff; } + +[data-md-color-accent="blue"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #448aff; } + +[data-md-color-accent="blue"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="blue"] .md-search-result__link:hover { + background-color: rgba(68, 138, 255, 0.1); } + +[data-md-color-accent="blue"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #448aff; } + +[data-md-color-accent="blue"] .md-source-file:hover::before { + background-color: #448aff; } + +button[data-md-color-accent="light-blue"] { + background-color: #0091ea; } + +[data-md-color-accent="light-blue"] .md-typeset a:hover, +[data-md-color-accent="light-blue"] .md-typeset a:active { + color: #0091ea; } + +[data-md-color-accent="light-blue"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="light-blue"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #0091ea; } + +[data-md-color-accent="light-blue"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="light-blue"] .md-typeset .md-clipboard:active::before { + color: #0091ea; } + +[data-md-color-accent="light-blue"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="light-blue"] .md-typeset .footnote li:target .footnote-backref { + color: #0091ea; } + +[data-md-color-accent="light-blue"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="light-blue"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="light-blue"] .md-typeset [id] .headerlink:focus { + color: #0091ea; } + +[data-md-color-accent="light-blue"] .md-nav__link:focus, +[data-md-color-accent="light-blue"] .md-nav__link:hover { + color: #0091ea; } + +[data-md-color-accent="light-blue"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #0091ea; } + +[data-md-color-accent="light-blue"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="light-blue"] .md-search-result__link:hover { + background-color: rgba(0, 145, 234, 0.1); } + +[data-md-color-accent="light-blue"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #0091ea; } + +[data-md-color-accent="light-blue"] .md-source-file:hover::before { + background-color: #0091ea; } + +button[data-md-color-accent="cyan"] { + background-color: #00b8d4; } + +[data-md-color-accent="cyan"] .md-typeset a:hover, +[data-md-color-accent="cyan"] .md-typeset a:active { + color: #00b8d4; } + +[data-md-color-accent="cyan"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="cyan"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #00b8d4; } + +[data-md-color-accent="cyan"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="cyan"] .md-typeset .md-clipboard:active::before { + color: #00b8d4; } + +[data-md-color-accent="cyan"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="cyan"] .md-typeset .footnote li:target .footnote-backref { + color: #00b8d4; } + +[data-md-color-accent="cyan"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="cyan"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="cyan"] .md-typeset [id] .headerlink:focus { + color: #00b8d4; } + +[data-md-color-accent="cyan"] .md-nav__link:focus, +[data-md-color-accent="cyan"] .md-nav__link:hover { + color: #00b8d4; } + +[data-md-color-accent="cyan"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #00b8d4; } + +[data-md-color-accent="cyan"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="cyan"] .md-search-result__link:hover { + background-color: rgba(0, 184, 212, 0.1); } + +[data-md-color-accent="cyan"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #00b8d4; } + +[data-md-color-accent="cyan"] .md-source-file:hover::before { + background-color: #00b8d4; } + +button[data-md-color-accent="teal"] { + background-color: #00bfa5; } + +[data-md-color-accent="teal"] .md-typeset a:hover, +[data-md-color-accent="teal"] .md-typeset a:active { + color: #00bfa5; } + +[data-md-color-accent="teal"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="teal"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #00bfa5; } + +[data-md-color-accent="teal"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="teal"] .md-typeset .md-clipboard:active::before { + color: #00bfa5; } + +[data-md-color-accent="teal"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="teal"] .md-typeset .footnote li:target .footnote-backref { + color: #00bfa5; } + +[data-md-color-accent="teal"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="teal"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="teal"] .md-typeset [id] .headerlink:focus { + color: #00bfa5; } + +[data-md-color-accent="teal"] .md-nav__link:focus, +[data-md-color-accent="teal"] .md-nav__link:hover { + color: #00bfa5; } + +[data-md-color-accent="teal"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #00bfa5; } + +[data-md-color-accent="teal"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="teal"] .md-search-result__link:hover { + background-color: rgba(0, 191, 165, 0.1); } + +[data-md-color-accent="teal"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #00bfa5; } + +[data-md-color-accent="teal"] .md-source-file:hover::before { + background-color: #00bfa5; } + +button[data-md-color-accent="green"] { + background-color: #00c853; } + +[data-md-color-accent="green"] .md-typeset a:hover, +[data-md-color-accent="green"] .md-typeset a:active { + color: #00c853; } + +[data-md-color-accent="green"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="green"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #00c853; } + +[data-md-color-accent="green"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="green"] .md-typeset .md-clipboard:active::before { + color: #00c853; } + +[data-md-color-accent="green"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="green"] .md-typeset .footnote li:target .footnote-backref { + color: #00c853; } + +[data-md-color-accent="green"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="green"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="green"] .md-typeset [id] .headerlink:focus { + color: #00c853; } + +[data-md-color-accent="green"] .md-nav__link:focus, +[data-md-color-accent="green"] .md-nav__link:hover { + color: #00c853; } + +[data-md-color-accent="green"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #00c853; } + +[data-md-color-accent="green"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="green"] .md-search-result__link:hover { + background-color: rgba(0, 200, 83, 0.1); } + +[data-md-color-accent="green"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #00c853; } + +[data-md-color-accent="green"] .md-source-file:hover::before { + background-color: #00c853; } + +button[data-md-color-accent="light-green"] { + background-color: #64dd17; } + +[data-md-color-accent="light-green"] .md-typeset a:hover, +[data-md-color-accent="light-green"] .md-typeset a:active { + color: #64dd17; } + +[data-md-color-accent="light-green"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="light-green"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #64dd17; } + +[data-md-color-accent="light-green"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="light-green"] .md-typeset .md-clipboard:active::before { + color: #64dd17; } + +[data-md-color-accent="light-green"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="light-green"] .md-typeset .footnote li:target .footnote-backref { + color: #64dd17; } + +[data-md-color-accent="light-green"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="light-green"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="light-green"] .md-typeset [id] .headerlink:focus { + color: #64dd17; } + +[data-md-color-accent="light-green"] .md-nav__link:focus, +[data-md-color-accent="light-green"] .md-nav__link:hover { + color: #64dd17; } + +[data-md-color-accent="light-green"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #64dd17; } + +[data-md-color-accent="light-green"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="light-green"] .md-search-result__link:hover { + background-color: rgba(100, 221, 23, 0.1); } + +[data-md-color-accent="light-green"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #64dd17; } + +[data-md-color-accent="light-green"] .md-source-file:hover::before { + background-color: #64dd17; } + +button[data-md-color-accent="lime"] { + background-color: #aeea00; } + +[data-md-color-accent="lime"] .md-typeset a:hover, +[data-md-color-accent="lime"] .md-typeset a:active { + color: #aeea00; } + +[data-md-color-accent="lime"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="lime"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #aeea00; } + +[data-md-color-accent="lime"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="lime"] .md-typeset .md-clipboard:active::before { + color: #aeea00; } + +[data-md-color-accent="lime"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="lime"] .md-typeset .footnote li:target .footnote-backref { + color: #aeea00; } + +[data-md-color-accent="lime"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="lime"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="lime"] .md-typeset [id] .headerlink:focus { + color: #aeea00; } + +[data-md-color-accent="lime"] .md-nav__link:focus, +[data-md-color-accent="lime"] .md-nav__link:hover { + color: #aeea00; } + +[data-md-color-accent="lime"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #aeea00; } + +[data-md-color-accent="lime"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="lime"] .md-search-result__link:hover { + background-color: rgba(174, 234, 0, 0.1); } + +[data-md-color-accent="lime"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #aeea00; } + +[data-md-color-accent="lime"] .md-source-file:hover::before { + background-color: #aeea00; } + +button[data-md-color-accent="yellow"] { + background-color: #ffd600; } + +[data-md-color-accent="yellow"] .md-typeset a:hover, +[data-md-color-accent="yellow"] .md-typeset a:active { + color: #ffd600; } + +[data-md-color-accent="yellow"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="yellow"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #ffd600; } + +[data-md-color-accent="yellow"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="yellow"] .md-typeset .md-clipboard:active::before { + color: #ffd600; } + +[data-md-color-accent="yellow"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="yellow"] .md-typeset .footnote li:target .footnote-backref { + color: #ffd600; } + +[data-md-color-accent="yellow"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="yellow"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="yellow"] .md-typeset [id] .headerlink:focus { + color: #ffd600; } + +[data-md-color-accent="yellow"] .md-nav__link:focus, +[data-md-color-accent="yellow"] .md-nav__link:hover { + color: #ffd600; } + +[data-md-color-accent="yellow"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ffd600; } + +[data-md-color-accent="yellow"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="yellow"] .md-search-result__link:hover { + background-color: rgba(255, 214, 0, 0.1); } + +[data-md-color-accent="yellow"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ffd600; } + +[data-md-color-accent="yellow"] .md-source-file:hover::before { + background-color: #ffd600; } + +button[data-md-color-accent="amber"] { + background-color: #ffab00; } + +[data-md-color-accent="amber"] .md-typeset a:hover, +[data-md-color-accent="amber"] .md-typeset a:active { + color: #ffab00; } + +[data-md-color-accent="amber"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="amber"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #ffab00; } + +[data-md-color-accent="amber"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="amber"] .md-typeset .md-clipboard:active::before { + color: #ffab00; } + +[data-md-color-accent="amber"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="amber"] .md-typeset .footnote li:target .footnote-backref { + color: #ffab00; } + +[data-md-color-accent="amber"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="amber"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="amber"] .md-typeset [id] .headerlink:focus { + color: #ffab00; } + +[data-md-color-accent="amber"] .md-nav__link:focus, +[data-md-color-accent="amber"] .md-nav__link:hover { + color: #ffab00; } + +[data-md-color-accent="amber"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ffab00; } + +[data-md-color-accent="amber"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="amber"] .md-search-result__link:hover { + background-color: rgba(255, 171, 0, 0.1); } + +[data-md-color-accent="amber"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ffab00; } + +[data-md-color-accent="amber"] .md-source-file:hover::before { + background-color: #ffab00; } + +button[data-md-color-accent="orange"] { + background-color: #ff9100; } + +[data-md-color-accent="orange"] .md-typeset a:hover, +[data-md-color-accent="orange"] .md-typeset a:active { + color: #ff9100; } + +[data-md-color-accent="orange"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="orange"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #ff9100; } + +[data-md-color-accent="orange"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="orange"] .md-typeset .md-clipboard:active::before { + color: #ff9100; } + +[data-md-color-accent="orange"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="orange"] .md-typeset .footnote li:target .footnote-backref { + color: #ff9100; } + +[data-md-color-accent="orange"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="orange"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="orange"] .md-typeset [id] .headerlink:focus { + color: #ff9100; } + +[data-md-color-accent="orange"] .md-nav__link:focus, +[data-md-color-accent="orange"] .md-nav__link:hover { + color: #ff9100; } + +[data-md-color-accent="orange"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ff9100; } + +[data-md-color-accent="orange"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="orange"] .md-search-result__link:hover { + background-color: rgba(255, 145, 0, 0.1); } + +[data-md-color-accent="orange"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ff9100; } + +[data-md-color-accent="orange"] .md-source-file:hover::before { + background-color: #ff9100; } + +button[data-md-color-accent="deep-orange"] { + background-color: #ff6e40; } + +[data-md-color-accent="deep-orange"] .md-typeset a:hover, +[data-md-color-accent="deep-orange"] .md-typeset a:active { + color: #ff6e40; } + +[data-md-color-accent="deep-orange"] .md-typeset pre code::-webkit-scrollbar-thumb:hover, +[data-md-color-accent="deep-orange"] .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover { + background-color: #ff6e40; } + +[data-md-color-accent="deep-orange"] .md-typeset .md-clipboard:hover::before, +[data-md-color-accent="deep-orange"] .md-typeset .md-clipboard:active::before { + color: #ff6e40; } + +[data-md-color-accent="deep-orange"] .md-typeset .footnote li:hover .footnote-backref:hover, +[data-md-color-accent="deep-orange"] .md-typeset .footnote li:target .footnote-backref { + color: #ff6e40; } + +[data-md-color-accent="deep-orange"] .md-typeset [id]:hover .headerlink:hover, +[data-md-color-accent="deep-orange"] .md-typeset [id]:target .headerlink, +[data-md-color-accent="deep-orange"] .md-typeset [id] .headerlink:focus { + color: #ff6e40; } + +[data-md-color-accent="deep-orange"] .md-nav__link:focus, +[data-md-color-accent="deep-orange"] .md-nav__link:hover { + color: #ff6e40; } + +[data-md-color-accent="deep-orange"] .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ff6e40; } + +[data-md-color-accent="deep-orange"] .md-search-result__link[data-md-state="active"], [data-md-color-accent="deep-orange"] .md-search-result__link:hover { + background-color: rgba(255, 110, 64, 0.1); } + +[data-md-color-accent="deep-orange"] .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #ff6e40; } + +[data-md-color-accent="deep-orange"] .md-source-file:hover::before { + background-color: #ff6e40; } + +@media only screen and (max-width: 59.9375em) { + [data-md-color-primary="red"] .md-nav__source { + background-color: rgba(190, 66, 64, 0.9675); } + [data-md-color-primary="pink"] .md-nav__source { + background-color: rgba(185, 24, 79, 0.9675); } + [data-md-color-primary="purple"] .md-nav__source { + background-color: rgba(136, 57, 150, 0.9675); } + [data-md-color-primary="deep-purple"] .md-nav__source { + background-color: rgba(100, 69, 154, 0.9675); } + [data-md-color-primary="indigo"] .md-nav__source { + background-color: rgba(50, 64, 144, 0.9675); } + [data-md-color-primary="blue"] .md-nav__source { + background-color: rgba(26, 119, 193, 0.9675); } + [data-md-color-primary="light-blue"] .md-nav__source { + background-color: rgba(2, 134, 194, 0.9675); } + [data-md-color-primary="cyan"] .md-nav__source { + background-color: rgba(0, 150, 169, 0.9675); } + [data-md-color-primary="teal"] .md-nav__source { + background-color: rgba(0, 119, 108, 0.9675); } + [data-md-color-primary="green"] .md-nav__source { + background-color: rgba(60, 139, 64, 0.9675); } + [data-md-color-primary="light-green"] .md-nav__source { + background-color: rgba(99, 142, 53, 0.9675); } + [data-md-color-primary="lime"] .md-nav__source { + background-color: rgba(153, 161, 41, 0.9675); } + [data-md-color-primary="yellow"] .md-nav__source { + background-color: rgba(198, 134, 29, 0.9675); } + [data-md-color-primary="amber"] .md-nav__source { + background-color: rgba(203, 127, 0, 0.9675); } + [data-md-color-primary="orange"] .md-nav__source { + background-color: rgba(200, 111, 0, 0.9675); } + [data-md-color-primary="deep-orange"] .md-nav__source { + background-color: rgba(203, 89, 53, 0.9675); } + [data-md-color-primary="brown"] .md-nav__source { + background-color: rgba(96, 68, 57, 0.9675); } + [data-md-color-primary="grey"] .md-nav__source { + background-color: rgba(93, 93, 93, 0.9675); } + [data-md-color-primary="blue-grey"] .md-nav__source { + background-color: rgba(67, 88, 97, 0.9675); } + [data-md-color-primary="white"] .md-nav__source { + background-color: rgba(0, 0, 0, 0.07); + color: rgba(0, 0, 0, 0.87); } } + +@media only screen and (max-width: 76.1875em) { + html [data-md-color-primary="red"] .md-nav--primary .md-nav__title--site { + background-color: #ef5350; } + html [data-md-color-primary="pink"] .md-nav--primary .md-nav__title--site { + background-color: #e91e63; } + html [data-md-color-primary="purple"] .md-nav--primary .md-nav__title--site { + background-color: #ab47bc; } + html [data-md-color-primary="deep-purple"] .md-nav--primary .md-nav__title--site { + background-color: #7e57c2; } + html [data-md-color-primary="indigo"] .md-nav--primary .md-nav__title--site { + background-color: #3f51b5; } + html [data-md-color-primary="blue"] .md-nav--primary .md-nav__title--site { + background-color: #2196f3; } + html [data-md-color-primary="light-blue"] .md-nav--primary .md-nav__title--site { + background-color: #03a9f4; } + html [data-md-color-primary="cyan"] .md-nav--primary .md-nav__title--site { + background-color: #00bcd4; } + html [data-md-color-primary="teal"] .md-nav--primary .md-nav__title--site { + background-color: #009688; } + html [data-md-color-primary="green"] .md-nav--primary .md-nav__title--site { + background-color: #4caf50; } + html [data-md-color-primary="light-green"] .md-nav--primary .md-nav__title--site { + background-color: #7cb342; } + html [data-md-color-primary="lime"] .md-nav--primary .md-nav__title--site { + background-color: #c0ca33; } + html [data-md-color-primary="yellow"] .md-nav--primary .md-nav__title--site { + background-color: #f9a825; } + html [data-md-color-primary="amber"] .md-nav--primary .md-nav__title--site { + background-color: #ffa000; } + html [data-md-color-primary="orange"] .md-nav--primary .md-nav__title--site { + background-color: #fb8c00; } + html [data-md-color-primary="deep-orange"] .md-nav--primary .md-nav__title--site { + background-color: #ff7043; } + html [data-md-color-primary="brown"] .md-nav--primary .md-nav__title--site { + background-color: #795548; } + html [data-md-color-primary="grey"] .md-nav--primary .md-nav__title--site { + background-color: #757575; } + html [data-md-color-primary="blue-grey"] .md-nav--primary .md-nav__title--site { + background-color: #546e7a; } + html [data-md-color-primary="white"] .md-nav--primary .md-nav__title--site { + background-color: white; + color: rgba(0, 0, 0, 0.87); } + [data-md-color-primary="white"] .md-hero { + border-bottom: 0.1rem solid rgba(0, 0, 0, 0.07); } } + +@media only screen and (min-width: 76.25em) { + [data-md-color-primary="red"] .md-tabs { + background-color: #ef5350; } + [data-md-color-primary="pink"] .md-tabs { + background-color: #e91e63; } + [data-md-color-primary="purple"] .md-tabs { + background-color: #ab47bc; } + [data-md-color-primary="deep-purple"] .md-tabs { + background-color: #7e57c2; } + [data-md-color-primary="indigo"] .md-tabs { + background-color: #3f51b5; } + [data-md-color-primary="blue"] .md-tabs { + background-color: #2196f3; } + [data-md-color-primary="light-blue"] .md-tabs { + background-color: #03a9f4; } + [data-md-color-primary="cyan"] .md-tabs { + background-color: #00bcd4; } + [data-md-color-primary="teal"] .md-tabs { + background-color: #009688; } + [data-md-color-primary="green"] .md-tabs { + background-color: #4caf50; } + [data-md-color-primary="light-green"] .md-tabs { + background-color: #7cb342; } + [data-md-color-primary="lime"] .md-tabs { + background-color: #c0ca33; } + [data-md-color-primary="yellow"] .md-tabs { + background-color: #f9a825; } + [data-md-color-primary="amber"] .md-tabs { + background-color: #ffa000; } + [data-md-color-primary="orange"] .md-tabs { + background-color: #fb8c00; } + [data-md-color-primary="deep-orange"] .md-tabs { + background-color: #ff7043; } + [data-md-color-primary="brown"] .md-tabs { + background-color: #795548; } + [data-md-color-primary="grey"] .md-tabs { + background-color: #757575; } + [data-md-color-primary="blue-grey"] .md-tabs { + background-color: #546e7a; } + [data-md-color-primary="white"] .md-tabs { + border-bottom: 0.1rem solid rgba(0, 0, 0, 0.07); + background-color: white; + color: rgba(0, 0, 0, 0.87); } } + +@media only screen and (min-width: 60em) { + [data-md-color-primary="white"] .md-search__input { + background-color: rgba(0, 0, 0, 0.07); } + [data-md-color-primary="white"] .md-search__input::-webkit-input-placeholder { + color: rgba(0, 0, 0, 0.54); } + [data-md-color-primary="white"] .md-search__input:-ms-input-placeholder { + color: rgba(0, 0, 0, 0.54); } + [data-md-color-primary="white"] .md-search__input::-ms-input-placeholder { + color: rgba(0, 0, 0, 0.54); } + [data-md-color-primary="white"] .md-search__input::placeholder { + color: rgba(0, 0, 0, 0.54); } } + +/*# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IiIsImZpbGUiOiJhc3NldHMvc3R5bGVzaGVldHMvYXBwbGljYXRpb24tcGFsZXR0ZS4yMjkxNTEyNi5jc3MiLCJzb3VyY2VSb290IjoiIn0=*/ \ No newline at end of file diff --git a/docs/material/assets/stylesheets/application.fbb7f3af.css b/docs/material/assets/stylesheets/application.fbb7f3af.css new file mode 100644 index 00000000..398e092a --- /dev/null +++ b/docs/material/assets/stylesheets/application.fbb7f3af.css @@ -0,0 +1,2554 @@ +@charset "UTF-8"; +html { + box-sizing: border-box; } + +*, +*::before, +*::after { + box-sizing: inherit; } + +html { + -webkit-text-size-adjust: none; + -moz-text-size-adjust: none; + -ms-text-size-adjust: none; + text-size-adjust: none; } + +body { + margin: 0; } + +hr { + overflow: visible; + box-sizing: content-box; } + +a { + -webkit-text-decoration-skip: objects; } + +a, +button, +label, +input { + -webkit-tap-highlight-color: transparent; } + +a { + color: inherit; + text-decoration: none; } + +small { + font-size: 80%; } + +sub, +sup { + position: relative; + font-size: 80%; + line-height: 0; + vertical-align: baseline; } + +sub { + bottom: -0.25em; } + +sup { + top: -0.5em; } + +img { + border-style: none; } + +table { + border-collapse: separate; + border-spacing: 0; } + +td, +th { + font-weight: normal; + vertical-align: top; } + +button { + margin: 0; + padding: 0; + border: 0; + outline-style: none; + background: transparent; + font-size: inherit; } + +input { + border: 0; + outline: 0; } + +.md-icon, .md-clipboard::before, .md-nav__title::before, .md-nav__button, .md-nav__link::after, .md-search-result__article--document::before, .md-source-file::before, .md-typeset .admonition > .admonition-title::before, .md-typeset details > .admonition-title::before, .md-typeset .admonition > summary::before, .md-typeset details > summary::before, .md-typeset .footnote-backref, .md-typeset .critic.comment::before, .md-typeset summary::after, .md-typeset .task-list-control .task-list-indicator::before { + font-family: "Material Icons"; + font-style: normal; + font-variant: normal; + font-weight: normal; + line-height: 1; + text-transform: none; + white-space: nowrap; + speak: none; + word-wrap: normal; + direction: ltr; } + .md-content__icon, .md-header-nav__button, .md-footer-nav__button, .md-nav__title::before, .md-nav__button, .md-search-result__article--document::before { + display: inline-block; + margin: 0.4rem; + padding: 0.8rem; + font-size: 2.4rem; + cursor: pointer; } + +.md-icon--arrow-back::before { + content: "\E5C4"; } + +.md-icon--arrow-forward::before { + content: "\E5C8"; } + +.md-icon--menu::before { + content: "\E5D2"; } + +.md-icon--search::before { + content: "\E8B6"; } + +[dir="rtl"] .md-icon--arrow-back::before { + content: "\E5C8"; } + +[dir="rtl"] .md-icon--arrow-forward::before { + content: "\E5C4"; } + +body { + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; } + +body, +input { + color: rgba(0, 0, 0, 0.87); + -webkit-font-feature-settings: "kern", "liga"; + font-feature-settings: "kern", "liga"; + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; } + +pre, +code, +kbd { + color: rgba(0, 0, 0, 0.87); + -webkit-font-feature-settings: "kern"; + font-feature-settings: "kern"; + font-family: "Courier New", Courier, monospace; } + +.md-typeset { + font-size: 1.6rem; + line-height: 1.6; + -webkit-print-color-adjust: exact; } + .md-typeset p, + .md-typeset ul, + .md-typeset ol, + .md-typeset blockquote { + margin: 1em 0; } + .md-typeset h1 { + margin: 0 0 4rem; + color: rgba(0, 0, 0, 0.54); + font-size: 3.125rem; + font-weight: 300; + letter-spacing: -0.01em; + line-height: 1.3; } + .md-typeset h2 { + margin: 4rem 0 1.6rem; + font-size: 2.5rem; + font-weight: 300; + letter-spacing: -0.01em; + line-height: 1.4; } + .md-typeset h3 { + margin: 3.2rem 0 1.6rem; + font-size: 2rem; + font-weight: 400; + letter-spacing: -0.01em; + line-height: 1.5; } + .md-typeset h2 + h3 { + margin-top: 1.6rem; } + .md-typeset h4 { + margin: 1.6rem 0; + font-size: 1.6rem; + font-weight: 700; + letter-spacing: -0.01em; } + .md-typeset h5, + .md-typeset h6 { + margin: 1.6rem 0; + color: rgba(0, 0, 0, 0.54); + font-size: 1.28rem; + font-weight: 700; + letter-spacing: -0.01em; } + .md-typeset h5 { + text-transform: uppercase; } + .md-typeset hr { + margin: 1.5em 0; + border-bottom: 0.1rem dotted rgba(0, 0, 0, 0.26); } + .md-typeset a { + color: #1381D7; + word-break: break-word; } + .md-typeset a, .md-typeset a::before { + transition: color 0.125s; } + .md-typeset a:hover, .md-typeset a:active { + color: #1998F1; } + .md-typeset code, + .md-typeset pre { + background-color: rgba(236, 236, 236, 0.5); + color: #37474F; + font-size: 85%; + direction: ltr; } + .md-typeset code { + margin: 0 0.29412em; + padding: 0.07353em 0; + border-radius: 0.2rem; + box-shadow: 0.29412em 0 0 rgba(236, 236, 236, 0.5), -0.29412em 0 0 rgba(236, 236, 236, 0.5); + word-break: break-word; + -webkit-box-decoration-break: clone; + box-decoration-break: clone; } + .md-typeset h1 code, + .md-typeset h2 code, + .md-typeset h3 code, + .md-typeset h4 code, + .md-typeset h5 code, + .md-typeset h6 code { + margin: 0; + background-color: transparent; + box-shadow: none; } + .md-typeset a > code { + margin: inherit; + padding: inherit; + border-radius: none; + background-color: inherit; + color: inherit; + box-shadow: none; } + .md-typeset pre { + position: relative; + margin: 1em 0; + border-radius: 0.2rem; + line-height: 1.4; + -webkit-overflow-scrolling: touch; } + .md-typeset pre > code { + display: block; + margin: 0; + padding: 1.05rem 1.2rem; + background-color: transparent; + font-size: inherit; + box-shadow: none; + -webkit-box-decoration-break: none; + box-decoration-break: none; + overflow: auto; } + .md-typeset pre > code::-webkit-scrollbar { + width: 0.4rem; + height: 0.4rem; } + .md-typeset pre > code::-webkit-scrollbar-thumb { + background-color: rgba(0, 0, 0, 0.26); } + .md-typeset pre > code::-webkit-scrollbar-thumb:hover { + background-color: #1AD6F5; } + .md-typeset kbd { + padding: 0 0.29412em; + border: 0.1rem solid #c9c9c9; + border-radius: 0.3rem; + border-bottom-color: #bcbcbc; + background-color: #FCFCFC; + color: #555555; + font-size: 85%; + box-shadow: 0 0.1rem 0 #b0b0b0; + word-break: break-word; } + .md-typeset mark { + margin: 0 0.25em; + padding: 0.0625em 0; + border-radius: 0.2rem; + background-color: rgba(255, 235, 59, 0.5); + box-shadow: 0.25em 0 0 rgba(255, 235, 59, 0.5), -0.25em 0 0 rgba(255, 235, 59, 0.5); + word-break: break-word; + -webkit-box-decoration-break: clone; + box-decoration-break: clone; } + .md-typeset abbr { + border-bottom: 0.1rem dotted rgba(0, 0, 0, 0.54); + text-decoration: none; + cursor: help; } + .md-typeset small { + opacity: 0.75; } + .md-typeset sup, + .md-typeset sub { + margin-left: 0.07812em; } + [dir="rtl"] .md-typeset sup, [dir="rtl"] + .md-typeset sub { + margin-right: 0.07812em; + margin-left: initial; } + .md-typeset blockquote { + padding-left: 1.2rem; + border-left: 0.4rem solid rgba(0, 0, 0, 0.26); + color: rgba(0, 0, 0, 0.54); } + [dir="rtl"] .md-typeset blockquote { + padding-right: 1.2rem; + padding-left: initial; + border-right: 0.4rem solid rgba(0, 0, 0, 0.26); + border-left: initial; } + .md-typeset ul { + list-style-type: disc; } + .md-typeset ul, + .md-typeset ol { + margin-left: 0.625em; + padding: 0; } + [dir="rtl"] .md-typeset ul, [dir="rtl"] + .md-typeset ol { + margin-right: 0.625em; + margin-left: initial; } + .md-typeset ul ol, + .md-typeset ol ol { + list-style-type: lower-alpha; } + .md-typeset ul ol ol, + .md-typeset ol ol ol { + list-style-type: lower-roman; } + .md-typeset ul li, + .md-typeset ol li { + margin-bottom: 0.5em; + margin-left: 1.25em; } + [dir="rtl"] .md-typeset ul li, [dir="rtl"] + .md-typeset ol li { + margin-right: 1.25em; + margin-left: initial; } + .md-typeset ul li p, + .md-typeset ul li blockquote, + .md-typeset ol li p, + .md-typeset ol li blockquote { + margin: 0.5em 0; } + .md-typeset ul li:last-child, + .md-typeset ol li:last-child { + margin-bottom: 0; } + .md-typeset ul li ul, + .md-typeset ul li ol, + .md-typeset ol li ul, + .md-typeset ol li ol { + margin: 0.5em 0 0.5em 0.625em; } + [dir="rtl"] .md-typeset ul li ul, [dir="rtl"] + .md-typeset ul li ol, [dir="rtl"] + .md-typeset ol li ul, [dir="rtl"] + .md-typeset ol li ol { + margin-right: 0.625em; + margin-left: initial; } + .md-typeset dd { + margin: 1em 0 1em 1.875em; } + [dir="rtl"] .md-typeset dd { + margin-right: 1.875em; + margin-left: initial; } + .md-typeset iframe, + .md-typeset img, + .md-typeset svg { + max-width: 100%; } + .md-typeset table:not([class]) { + box-shadow: 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12), 0 3px 1px -2px rgba(0, 0, 0, 0.2); + display: inline-block; + max-width: 100%; + border-radius: 0.2rem; + font-size: 1.28rem; + overflow: auto; + -webkit-overflow-scrolling: touch; } + .md-typeset table:not([class]) + * { + margin-top: 1.5em; } + .md-typeset table:not([class]) th:not([align]), + .md-typeset table:not([class]) td:not([align]) { + text-align: left; } + [dir="rtl"] .md-typeset table:not([class]) th:not([align]), [dir="rtl"] + .md-typeset table:not([class]) td:not([align]) { + text-align: right; } + .md-typeset table:not([class]) th { + min-width: 10rem; + padding: 1.2rem 1.6rem; + background-color: rgba(0, 0, 0, 0.54); + color: white; + vertical-align: top; } + .md-typeset table:not([class]) td { + padding: 1.2rem 1.6rem; + border-top: 0.1rem solid rgba(0, 0, 0, 0.07); + vertical-align: top; } + .md-typeset table:not([class]) tr:first-child td { + border-top: 0; } + .md-typeset table:not([class]) a { + word-break: normal; } + .md-typeset__scrollwrap { + margin: 1em -1.6rem; + overflow-x: auto; + -webkit-overflow-scrolling: touch; } + .md-typeset .md-typeset__table { + display: inline-block; + margin-bottom: 0.5em; + padding: 0 1.6rem; } + .md-typeset .md-typeset__table table { + display: table; + width: 100%; + margin: 0; + overflow: hidden; } + +html { + height: 100%; + font-size: 62.5%; + overflow-x: hidden; } + +body { + position: relative; + height: 100%; } + +hr { + display: block; + height: 0.1rem; + padding: 0; + border: 0; } + +.md-svg { + display: none; } + +.md-grid { + max-width: 122rem; + margin-right: auto; + margin-left: auto; } + +.md-container, +.md-main { + overflow: auto; } + +.md-container { + display: table; + width: 100%; + height: 100%; + padding-top: 4.8rem; + table-layout: fixed; } + +.md-main { + display: table-row; + height: 100%; } + .md-main__inner { + height: 100%; + padding-top: 3rem; + padding-bottom: 0.1rem; } + +.md-toggle { + display: none; } + +.md-overlay { + position: fixed; + top: 0; + width: 0; + height: 0; + transition: width 0s 0.25s, height 0s 0.25s, opacity 0.25s; + background-color: rgba(0, 0, 0, 0.54); + opacity: 0; + z-index: 3; } + +.md-flex { + display: table; } + .md-flex__cell { + display: table-cell; + position: relative; + vertical-align: top; } + .md-flex__cell--shrink { + width: 0%; } + .md-flex__cell--stretch { + display: table; + width: 100%; + table-layout: fixed; } + .md-flex__ellipsis { + display: table-cell; + text-overflow: ellipsis; + white-space: nowrap; + overflow: hidden; } + +.md-skip { + position: fixed; + width: 0.1rem; + height: 0.1rem; + margin: 1rem; + padding: 0.6rem 1rem; + clip: rect(0.1rem); + -webkit-transform: translateY(0.8rem); + transform: translateY(0.8rem); + border-radius: 0.2rem; + background-color: rgba(0, 0, 0, 0.87); + color: white; + font-size: 1.28rem; + opacity: 0; + overflow: hidden; } + .md-skip:focus { + width: auto; + height: auto; + clip: auto; + -webkit-transform: translateX(0); + transform: translateX(0); + transition: opacity 0.175s 0.075s, -webkit-transform 0.25s cubic-bezier(0.4, 0, 0.2, 1); + transition: transform 0.25s cubic-bezier(0.4, 0, 0.2, 1), opacity 0.175s 0.075s; + transition: transform 0.25s cubic-bezier(0.4, 0, 0.2, 1), opacity 0.175s 0.075s, -webkit-transform 0.25s cubic-bezier(0.4, 0, 0.2, 1); + opacity: 1; + z-index: 10; } + +@page { + margin: 25mm; } + +.md-clipboard { + position: absolute; + top: 0.6rem; + right: 0.6rem; + width: 2.8rem; + height: 2.8rem; + border-radius: 0.2rem; + font-size: 1.6rem; + cursor: pointer; + z-index: 1; + -webkit-backface-visibility: hidden; + backface-visibility: hidden; } + .md-clipboard::before { + transition: color 0.25s, opacity 0.25s; + color: rgba(0, 0, 0, 0.07); + content: "\E14D"; } + pre:hover .md-clipboard::before, + .codehilite:hover .md-clipboard::before, .md-typeset .highlight:hover .md-clipboard::before { + color: rgba(0, 0, 0, 0.54); } + .md-clipboard:focus::before, .md-clipboard:hover::before { + color: #1AD6F5; } + .md-clipboard__message { + display: block; + position: absolute; + top: 0; + right: 3.4rem; + padding: 0.6rem 1rem; + -webkit-transform: translateX(0.8rem); + transform: translateX(0.8rem); + transition: opacity 0.175s, -webkit-transform 0.25s cubic-bezier(0.9, 0.1, 0.9, 0); + transition: transform 0.25s cubic-bezier(0.9, 0.1, 0.9, 0), opacity 0.175s; + transition: transform 0.25s cubic-bezier(0.9, 0.1, 0.9, 0), opacity 0.175s, -webkit-transform 0.25s cubic-bezier(0.9, 0.1, 0.9, 0); + border-radius: 0.2rem; + background-color: rgba(0, 0, 0, 0.54); + color: white; + font-size: 1.28rem; + white-space: nowrap; + opacity: 0; + pointer-events: none; } + .md-clipboard__message--active { + -webkit-transform: translateX(0); + transform: translateX(0); + transition: opacity 0.175s 0.075s, -webkit-transform 0.25s cubic-bezier(0.4, 0, 0.2, 1); + transition: transform 0.25s cubic-bezier(0.4, 0, 0.2, 1), opacity 0.175s 0.075s; + transition: transform 0.25s cubic-bezier(0.4, 0, 0.2, 1), opacity 0.175s 0.075s, -webkit-transform 0.25s cubic-bezier(0.4, 0, 0.2, 1); + opacity: 1; + pointer-events: initial; } + .md-clipboard__message::before { + content: attr(aria-label); } + .md-clipboard__message::after { + display: block; + position: absolute; + top: 50%; + right: -0.4rem; + width: 0; + margin-top: -0.4rem; + border-width: 0.4rem 0 0.4rem 0.4rem; + border-style: solid; + border-color: transparent rgba(0, 0, 0, 0.54); + content: ""; } + +.md-content__inner { + margin: 0 1.6rem 2.4rem; + padding-top: 1.2rem; } + .md-content__inner::before { + display: block; + height: 0.8rem; + content: ""; } + .md-content__inner > :last-child { + margin-bottom: 0; } + +.md-content__icon { + position: relative; + margin: 0.8rem 0; + padding: 0; + float: right; } + .md-typeset .md-content__icon { + color: rgba(0, 0, 0, 0.26); } + +.md-header { + position: fixed; + top: 0; + right: 0; + left: 0; + height: 4.8rem; + transition: background-color 0.25s, color 0.25s; + background-color: #1381D7; + color: white; + box-shadow: none; + z-index: 2; + -webkit-backface-visibility: hidden; + backface-visibility: hidden; } + .no-js .md-header { + transition: none; + box-shadow: none; } + .md-header[data-md-state="shadow"] { + transition: background-color 0.25s, color 0.25s, box-shadow 0.25s; + box-shadow: 0 0 0.4rem rgba(0, 0, 0, 0.1), 0 0.4rem 0.8rem rgba(0, 0, 0, 0.2); } + +.md-header-nav { + padding: 0 0.4rem; } + .md-header-nav a.md-header-nav__button.md-logo { + padding: 1.4rem; } + .md-header-nav__button { + position: relative; + transition: opacity 0.25s; + z-index: 1; } + .md-header-nav__button:hover { + opacity: 0.7; } + .md-header-nav__button.md-logo * { + display: block; } + .no-js .md-header-nav__button.md-icon--search { + display: none; } + .md-header-nav__topic { + display: block; + position: absolute; + transition: opacity 0.15s, -webkit-transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1); + transition: transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.15s; + transition: transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.15s, -webkit-transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1); + text-overflow: ellipsis; + white-space: nowrap; + overflow: hidden; } + .md-header-nav__topic + .md-header-nav__topic { + -webkit-transform: translateX(2.5rem); + transform: translateX(2.5rem); + transition: opacity 0.15s, -webkit-transform 0.4s cubic-bezier(1, 0.7, 0.1, 0.1); + transition: transform 0.4s cubic-bezier(1, 0.7, 0.1, 0.1), opacity 0.15s; + transition: transform 0.4s cubic-bezier(1, 0.7, 0.1, 0.1), opacity 0.15s, -webkit-transform 0.4s cubic-bezier(1, 0.7, 0.1, 0.1); + opacity: 0; + z-index: -1; + pointer-events: none; } + [dir="rtl"] .md-header-nav__topic + .md-header-nav__topic { + -webkit-transform: translateX(-2.5rem); + transform: translateX(-2.5rem); } + .no-js .md-header-nav__topic { + position: initial; } + .no-js .md-header-nav__topic + .md-header-nav__topic { + display: none; } + .md-header-nav__title { + padding: 0 2rem; + font-size: 1.8rem; + line-height: 4.8rem; } + .md-header-nav__title[data-md-state="active"] .md-header-nav__topic { + -webkit-transform: translateX(-2.5rem); + transform: translateX(-2.5rem); + transition: opacity 0.15s, -webkit-transform 0.4s cubic-bezier(1, 0.7, 0.1, 0.1); + transition: transform 0.4s cubic-bezier(1, 0.7, 0.1, 0.1), opacity 0.15s; + transition: transform 0.4s cubic-bezier(1, 0.7, 0.1, 0.1), opacity 0.15s, -webkit-transform 0.4s cubic-bezier(1, 0.7, 0.1, 0.1); + opacity: 0; + z-index: -1; + pointer-events: none; } + [dir="rtl"] .md-header-nav__title[data-md-state="active"] .md-header-nav__topic { + -webkit-transform: translateX(2.5rem); + transform: translateX(2.5rem); } + .md-header-nav__title[data-md-state="active"] .md-header-nav__topic + .md-header-nav__topic { + -webkit-transform: translateX(0); + transform: translateX(0); + transition: opacity 0.15s, -webkit-transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1); + transition: transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.15s; + transition: transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.15s, -webkit-transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1); + opacity: 1; + z-index: 0; + pointer-events: initial; } + .md-header-nav__source { + display: none; } + +.md-hero { + transition: background 0.25s; + background-color: #004E66; + color: white; + font-size: 2rem; + overflow: hidden; } + .md-hero__inner { + margin-top: 2rem; + padding: 1.6rem 1.6rem 0.8rem; + transition: opacity 0.25s, -webkit-transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1); + transition: transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.25s; + transition: transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.25s, -webkit-transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1); + transition-delay: 0.1s; } + [data-md-state="hidden"] .md-hero__inner { + pointer-events: none; + -webkit-transform: translateY(1.25rem); + transform: translateY(1.25rem); + transition: opacity 0.1s 0s, -webkit-transform 0s 0.4s; + transition: transform 0s 0.4s, opacity 0.1s 0s; + transition: transform 0s 0.4s, opacity 0.1s 0s, -webkit-transform 0s 0.4s; + opacity: 0; } + .md-hero--expand .md-hero__inner { + margin-bottom: 2.4rem; } + +.md-footer-nav { + background-color: rgba(0, 0, 0, 0.87); + color: white; } + .md-footer-nav__inner { + padding: 0.4rem; + overflow: auto; } + .md-footer-nav__link { + padding-top: 2.8rem; + padding-bottom: 0.8rem; + transition: opacity 0.25s; } + .md-footer-nav__link:hover { + opacity: 0.7; } + .md-footer-nav__link--prev { + width: 25%; + float: left; } + [dir="rtl"] .md-footer-nav__link--prev { + float: right; } + .md-footer-nav__link--next { + width: 75%; + float: right; + text-align: right; } + [dir="rtl"] .md-footer-nav__link--next { + float: left; + text-align: left; } + .md-footer-nav__button { + transition: background 0.25s; } + .md-footer-nav__title { + position: relative; + padding: 0 2rem; + font-size: 1.8rem; + line-height: 4.8rem; } + .md-footer-nav__direction { + position: absolute; + right: 0; + left: 0; + margin-top: -2rem; + padding: 0 2rem; + color: rgba(255, 255, 255, 0.7); + font-size: 1.5rem; } + +.md-footer-meta { + background-color: rgba(0, 0, 0, 0.895); } + .md-footer-meta__inner { + padding: 0.4rem; + overflow: auto; } + html .md-footer-meta.md-typeset a { + color: rgba(255, 255, 255, 0.7); } + html .md-footer-meta.md-typeset a:focus, html .md-footer-meta.md-typeset a:hover { + color: white; } + +.md-footer-copyright { + margin: 0 1.2rem; + padding: 0.8rem 0; + color: rgba(255, 255, 255, 0.3); + font-size: 1.28rem; } + .md-footer-copyright__highlight { + color: rgba(255, 255, 255, 0.7); } + +.md-footer-social { + margin: 0 0.8rem; + padding: 0.4rem 0 1.2rem; } + .md-footer-social__link { + display: inline-block; + width: 3.2rem; + height: 3.2rem; + font-size: 1.6rem; + text-align: center; } + .md-footer-social__link::before { + line-height: 1.9; } + +.md-nav { + font-size: 1.4rem; + line-height: 1.3; } + .md-nav__title { + display: block; + padding: 0 1.2rem; + font-weight: 700; + text-overflow: ellipsis; + overflow: hidden; } + .md-nav__title::before { + display: none; + content: "\E5C4"; } + [dir="rtl"] .md-nav__title::before { + content: "\E5C8"; } + .md-nav__title .md-nav__button { + display: none; } + .md-nav__list { + margin: 0; + padding: 0; + list-style: none; } + .md-nav__item { + padding: 0 1.2rem; } + .md-nav__item:last-child { + padding-bottom: 1.2rem; } + .md-nav__item .md-nav__item { + padding-right: 0; } + [dir="rtl"] .md-nav__item .md-nav__item { + padding-right: 1.2rem; + padding-left: 0; } + .md-nav__item .md-nav__item:last-child { + padding-bottom: 0; } + .md-nav__button img { + width: 100%; + height: auto; } + .md-nav__link { + display: block; + margin-top: 0.625em; + transition: color 0.125s; + text-overflow: ellipsis; + cursor: pointer; + overflow: hidden; } + .md-nav__item--nested > .md-nav__link::after { + content: "\E313"; } + html .md-nav__link[for="__toc"] { + display: none; } + html .md-nav__link[for="__toc"] ~ .md-nav { + display: none; } + html .md-nav__link[for="__toc"] + .md-nav__link::after { + display: none; } + .md-nav__link[data-md-state="blur"] { + color: rgba(0, 0, 0, 0.54); } + .md-nav__link:active, .md-nav__link--active { + color: #1381D7; } + .md-nav__item--nested > .md-nav__link { + color: inherit; } + .md-nav__link:focus, .md-nav__link:hover { + color: #1998F1; } + .md-nav__source { + display: none; } + +.no-js .md-search { + display: none; } + +.md-search__overlay { + opacity: 0; + z-index: 1; } + +.md-search__form { + position: relative; } + +.md-search__input { + position: relative; + padding: 0 4.4rem 0 7.2rem; + text-overflow: ellipsis; + z-index: 2; } + [dir="rtl"] .md-search__input { + padding: 0 7.2rem 0 4.4rem; } + .md-search__input::-webkit-input-placeholder { + transition: color 0.25s cubic-bezier(0.1, 0.7, 0.1, 1); } + .md-search__input:-ms-input-placeholder { + transition: color 0.25s cubic-bezier(0.1, 0.7, 0.1, 1); } + .md-search__input::-ms-input-placeholder { + transition: color 0.25s cubic-bezier(0.1, 0.7, 0.1, 1); } + .md-search__input::placeholder { + transition: color 0.25s cubic-bezier(0.1, 0.7, 0.1, 1); } + .md-search__input ~ .md-search__icon, .md-search__input::-webkit-input-placeholder { + color: rgba(0, 0, 0, 0.54); } + .md-search__input ~ .md-search__icon, .md-search__input:-ms-input-placeholder { + color: rgba(0, 0, 0, 0.54); } + .md-search__input ~ .md-search__icon, .md-search__input::-ms-input-placeholder { + color: rgba(0, 0, 0, 0.54); } + .md-search__input ~ .md-search__icon, .md-search__input::placeholder { + color: rgba(0, 0, 0, 0.54); } + .md-search__input::-ms-clear { + display: none; } + +.md-search__icon { + position: absolute; + transition: color 0.25s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.25s; + font-size: 2.4rem; + cursor: pointer; + z-index: 2; } + .md-search__icon:hover { + opacity: 0.7; } + .md-search__icon[for="__search"] { + top: 0.6rem; + left: 1rem; } + [dir="rtl"] .md-search__icon[for="__search"] { + right: 1rem; + left: initial; } + .md-search__icon[for="__search"]::before { + content: "\E8B6"; } + .md-search__icon[type="reset"] { + top: 0.6rem; + right: 1rem; + -webkit-transform: scale(0.125); + transform: scale(0.125); + transition: opacity 0.15s, -webkit-transform 0.15s cubic-bezier(0.1, 0.7, 0.1, 1); + transition: transform 0.15s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.15s; + transition: transform 0.15s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.15s, -webkit-transform 0.15s cubic-bezier(0.1, 0.7, 0.1, 1); + opacity: 0; } + [dir="rtl"] .md-search__icon[type="reset"] { + right: initial; + left: 1rem; } + [data-md-toggle="search"]:checked ~ .md-header .md-search__input:valid ~ .md-search__icon[type="reset"] { + -webkit-transform: scale(1); + transform: scale(1); + opacity: 1; } + [data-md-toggle="search"]:checked ~ .md-header .md-search__input:valid ~ .md-search__icon[type="reset"]:hover { + opacity: 0.7; } + +.md-search__output { + position: absolute; + width: 100%; + border-radius: 0 0 0.2rem 0.2rem; + overflow: hidden; + z-index: 1; } + +.md-search__scrollwrap { + height: 100%; + background-color: white; + box-shadow: 0 0.1rem 0 rgba(0, 0, 0, 0.07) inset; + overflow-y: auto; + -webkit-overflow-scrolling: touch; } + +.md-search-result { + color: rgba(0, 0, 0, 0.87); + word-break: break-word; } + .md-search-result__meta { + padding: 0 1.6rem; + background-color: rgba(0, 0, 0, 0.07); + color: rgba(0, 0, 0, 0.54); + font-size: 1.28rem; + line-height: 3.6rem; } + .md-search-result__list { + margin: 0; + padding: 0; + border-top: 0.1rem solid rgba(0, 0, 0, 0.07); + list-style: none; } + .md-search-result__item { + box-shadow: 0 -0.1rem 0 rgba(0, 0, 0, 0.07); } + .md-search-result__link { + display: block; + transition: background 0.25s; + outline: 0; + overflow: hidden; } + .md-search-result__link[data-md-state="active"], .md-search-result__link:hover { + background-color: rgba(26, 214, 245, 0.1); } + .md-search-result__link[data-md-state="active"] .md-search-result__article::before, .md-search-result__link:hover .md-search-result__article::before { + opacity: 0.7; } + .md-search-result__link:last-child .md-search-result__teaser { + margin-bottom: 1.2rem; } + .md-search-result__article { + position: relative; + padding: 0 1.6rem; + overflow: auto; } + .md-search-result__article--document::before { + position: absolute; + left: 0; + margin: 0.2rem; + transition: opacity 0.25s; + color: rgba(0, 0, 0, 0.54); + content: "\E880"; } + [dir="rtl"] .md-search-result__article--document::before { + right: 0; + left: initial; } + .md-search-result__article--document .md-search-result__title { + margin: 1.1rem 0; + font-size: 1.6rem; + font-weight: 400; + line-height: 1.4; } + .md-search-result__title { + margin: 0.5em 0; + font-size: 1.28rem; + font-weight: 700; + line-height: 1.4; } + .md-search-result__teaser { + display: -webkit-box; + max-height: 3.3rem; + margin: 0.5em 0; + color: rgba(0, 0, 0, 0.54); + font-size: 1.28rem; + line-height: 1.4; + text-overflow: ellipsis; + overflow: hidden; + -webkit-line-clamp: 2; } + .md-search-result em { + font-style: normal; + font-weight: 700; + text-decoration: underline; } + +.md-sidebar { + position: absolute; + width: 24.2rem; + padding: 2.4rem 0; + overflow: hidden; } + .md-sidebar[data-md-state="lock"] { + position: fixed; + top: 4.8rem; } + .md-sidebar--secondary { + display: none; } + .md-sidebar__scrollwrap { + max-height: 100%; + margin: 0 0.4rem; + overflow-y: auto; + -webkit-backface-visibility: hidden; + backface-visibility: hidden; } + .md-sidebar__scrollwrap::-webkit-scrollbar { + width: 0.4rem; + height: 0.4rem; } + .md-sidebar__scrollwrap::-webkit-scrollbar-thumb { + background-color: rgba(0, 0, 0, 0.26); } + .md-sidebar__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #1AD6F5; } + +@-webkit-keyframes md-source__facts--done { + 0% { + height: 0; } + 100% { + height: 1.3rem; } } + +@keyframes md-source__facts--done { + 0% { + height: 0; } + 100% { + height: 1.3rem; } } + +@-webkit-keyframes md-source__fact--done { + 0% { + -webkit-transform: translateY(100%); + transform: translateY(100%); + opacity: 0; } + 50% { + opacity: 0; } + 100% { + -webkit-transform: translateY(0%); + transform: translateY(0%); + opacity: 1; } } + +@keyframes md-source__fact--done { + 0% { + -webkit-transform: translateY(100%); + transform: translateY(100%); + opacity: 0; } + 50% { + opacity: 0; } + 100% { + -webkit-transform: translateY(0%); + transform: translateY(0%); + opacity: 1; } } + +.md-source { + display: block; + padding-right: 1.2rem; + transition: opacity 0.25s; + font-size: 1.3rem; + line-height: 1.2; + white-space: nowrap; } + [dir="rtl"] .md-source { + padding-right: initial; + padding-left: 1.2rem; } + .md-source:hover { + opacity: 0.7; } + .md-source::after { + display: inline-block; + height: 4.8rem; + content: ""; + vertical-align: middle; } + .md-source__icon { + display: inline-block; + width: 4.8rem; + height: 4.8rem; + content: ""; + vertical-align: middle; } + .md-source__icon svg { + width: 2.4rem; + height: 2.4rem; + margin-top: 1.2rem; + margin-left: 1.2rem; } + [dir="rtl"] .md-source__icon svg { + margin-right: 1.2rem; + margin-left: initial; } + .md-source__icon + .md-source__repository { + margin-left: -4.4rem; + padding-left: 4rem; } + [dir="rtl"] .md-source__icon + .md-source__repository { + margin-right: -4.4rem; + margin-left: initial; + padding-right: 4rem; + padding-left: initial; } + .md-source__repository { + display: inline-block; + max-width: 100%; + margin-left: 1.2rem; + font-weight: 700; + text-overflow: ellipsis; + overflow: hidden; + vertical-align: middle; } + .md-source__facts { + margin: 0; + padding: 0; + font-size: 1.1rem; + font-weight: 700; + list-style-type: none; + opacity: 0.75; + overflow: hidden; } + [data-md-state="done"] .md-source__facts { + -webkit-animation: md-source__facts--done 0.25s ease-in; + animation: md-source__facts--done 0.25s ease-in; } + .md-source__fact { + float: left; } + [dir="rtl"] .md-source__fact { + float: right; } + [data-md-state="done"] .md-source__fact { + -webkit-animation: md-source__fact--done 0.4s ease-out; + animation: md-source__fact--done 0.4s ease-out; } + .md-source__fact::before { + margin: 0 0.2rem; + content: "\B7"; } + .md-source__fact:first-child::before { + display: none; } + +.md-source-file { + display: inline-block; + margin: 1em 0.5em 1em 0; + padding-right: 0.5rem; + border-radius: 0.2rem; + background-color: rgba(0, 0, 0, 0.07); + font-size: 1.28rem; + list-style-type: none; + cursor: pointer; + overflow: hidden; } + .md-source-file::before { + display: inline-block; + margin-right: 0.5rem; + padding: 0.5rem; + background-color: rgba(0, 0, 0, 0.26); + color: white; + font-size: 1.6rem; + content: "\E86F"; + vertical-align: middle; } + html .md-source-file { + transition: background 0.4s, color 0.4s, box-shadow 0.4s cubic-bezier(0.4, 0, 0.2, 1); } + html .md-source-file::before { + transition: inherit; } + html body .md-typeset .md-source-file { + color: rgba(0, 0, 0, 0.54); } + .md-source-file:hover { + box-shadow: 0 0 8px rgba(0, 0, 0, 0.18), 0 8px 16px rgba(0, 0, 0, 0.36); } + .md-source-file:hover::before { + background-color: #1AD6F5; } + +.md-tabs { + width: 100%; + transition: background 0.25s; + background-color: #004E66; + color: white; + overflow: auto; } + .md-tabs__list { + margin: 0; + margin-left: 0.4rem; + padding: 0; + list-style: none; + white-space: nowrap; } + .md-tabs__item { + display: inline-block; + height: 4.8rem; + padding-right: 1.2rem; + padding-left: 1.2rem; } + .md-tabs__link { + display: block; + margin-top: 1.6rem; + transition: opacity 0.25s, -webkit-transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1); + transition: transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.25s; + transition: transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.25s, -webkit-transform 0.4s cubic-bezier(0.1, 0.7, 0.1, 1); + font-size: 1.4rem; + opacity: 0.7; } + .md-tabs__link--active, .md-tabs__link:hover { + color: inherit; + opacity: 1; } + .md-tabs__item:nth-child(2) .md-tabs__link { + transition-delay: 0.02s; } + .md-tabs__item:nth-child(3) .md-tabs__link { + transition-delay: 0.04s; } + .md-tabs__item:nth-child(4) .md-tabs__link { + transition-delay: 0.06s; } + .md-tabs__item:nth-child(5) .md-tabs__link { + transition-delay: 0.08s; } + .md-tabs__item:nth-child(6) .md-tabs__link { + transition-delay: 0.1s; } + .md-tabs__item:nth-child(7) .md-tabs__link { + transition-delay: 0.12s; } + .md-tabs__item:nth-child(8) .md-tabs__link { + transition-delay: 0.14s; } + .md-tabs__item:nth-child(9) .md-tabs__link { + transition-delay: 0.16s; } + .md-tabs__item:nth-child(10) .md-tabs__link { + transition-delay: 0.18s; } + .md-tabs__item:nth-child(11) .md-tabs__link { + transition-delay: 0.2s; } + .md-tabs__item:nth-child(12) .md-tabs__link { + transition-delay: 0.22s; } + .md-tabs__item:nth-child(13) .md-tabs__link { + transition-delay: 0.24s; } + .md-tabs__item:nth-child(14) .md-tabs__link { + transition-delay: 0.26s; } + .md-tabs__item:nth-child(15) .md-tabs__link { + transition-delay: 0.28s; } + .md-tabs__item:nth-child(16) .md-tabs__link { + transition-delay: 0.3s; } + .md-tabs[data-md-state="hidden"] { + pointer-events: none; } + .md-tabs[data-md-state="hidden"] .md-tabs__link { + -webkit-transform: translateY(50%); + transform: translateY(50%); + transition: color 0.25s, opacity 0.1s, -webkit-transform 0s 0.4s; + transition: color 0.25s, transform 0s 0.4s, opacity 0.1s; + transition: color 0.25s, transform 0s 0.4s, opacity 0.1s, -webkit-transform 0s 0.4s; + opacity: 0; } + +.md-typeset .admonition, .md-typeset details { + box-shadow: 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12), 0 3px 1px -2px rgba(0, 0, 0, 0.2); + position: relative; + margin: 1.5625em 0; + padding: 0 1.2rem; + border-left: 0.4rem solid #448aff; + border-radius: 0.2rem; + font-size: 1.28rem; + overflow: auto; } + [dir="rtl"] .md-typeset .admonition, [dir="rtl"] .md-typeset details { + border-right: 0.4rem solid #448aff; + border-left: none; } + html .md-typeset .admonition > :last-child, html .md-typeset details > :last-child { + margin-bottom: 1.2rem; } + .md-typeset .admonition .admonition, .md-typeset details .admonition, .md-typeset .admonition details, .md-typeset details details { + margin: 1em 0; } + .md-typeset .admonition > .admonition-title, .md-typeset details > .admonition-title, .md-typeset .admonition > summary, .md-typeset details > summary { + margin: 0 -1.2rem; + padding: 0.8rem 1.2rem 0.8rem 4rem; + border-bottom: 0.1rem solid rgba(68, 138, 255, 0.1); + background-color: rgba(68, 138, 255, 0.1); + font-weight: 700; } + [dir="rtl"] .md-typeset .admonition > .admonition-title, [dir="rtl"] .md-typeset details > .admonition-title, [dir="rtl"] .md-typeset .admonition > summary, [dir="rtl"] .md-typeset details > summary { + padding: 0.8rem 4rem 0.8rem 1.2rem; } + .md-typeset .admonition > .admonition-title:last-child, .md-typeset details > .admonition-title:last-child, .md-typeset .admonition > summary:last-child, .md-typeset details > summary:last-child { + margin-bottom: 0; } + .md-typeset .admonition > .admonition-title::before, .md-typeset details > .admonition-title::before, .md-typeset .admonition > summary::before, .md-typeset details > summary::before { + position: absolute; + left: 1.2rem; + color: #448aff; + font-size: 2rem; + content: "\E3C9"; } + [dir="rtl"] .md-typeset .admonition > .admonition-title::before, [dir="rtl"] .md-typeset details > .admonition-title::before, [dir="rtl"] .md-typeset .admonition > summary::before, [dir="rtl"] .md-typeset details > summary::before { + right: 1.2rem; + left: initial; } + .md-typeset .admonition.summary, .md-typeset details.summary, .md-typeset .admonition.tldr, .md-typeset details.tldr, .md-typeset .admonition.abstract, .md-typeset details.abstract { + border-left-color: #00b0ff; } + [dir="rtl"] .md-typeset .admonition.summary, [dir="rtl"] .md-typeset details.summary, [dir="rtl"] .md-typeset .admonition.tldr, [dir="rtl"] .md-typeset details.tldr, [dir="rtl"] .md-typeset .admonition.abstract, [dir="rtl"] .md-typeset details.abstract { + border-right-color: #00b0ff; } + .md-typeset .admonition.summary > .admonition-title, .md-typeset details.summary > .admonition-title, .md-typeset .admonition.tldr > .admonition-title, .md-typeset details.tldr > .admonition-title, .md-typeset .admonition.summary > summary, .md-typeset details.summary > summary, .md-typeset .admonition.tldr > summary, .md-typeset details.tldr > summary, .md-typeset .admonition.abstract > .admonition-title, .md-typeset details.abstract > .admonition-title, .md-typeset .admonition.abstract > summary, .md-typeset details.abstract > summary { + border-bottom-color: 0.1rem solid rgba(0, 176, 255, 0.1); + background-color: rgba(0, 176, 255, 0.1); } + .md-typeset .admonition.summary > .admonition-title::before, .md-typeset details.summary > .admonition-title::before, .md-typeset .admonition.tldr > .admonition-title::before, .md-typeset details.tldr > .admonition-title::before, .md-typeset .admonition.summary > summary::before, .md-typeset details.summary > summary::before, .md-typeset .admonition.tldr > summary::before, .md-typeset details.tldr > summary::before, .md-typeset .admonition.abstract > .admonition-title::before, .md-typeset details.abstract > .admonition-title::before, .md-typeset .admonition.abstract > summary::before, .md-typeset details.abstract > summary::before { + color: #00b0ff; + content: "\E8D2"; } + .md-typeset .admonition.todo, .md-typeset details.todo, .md-typeset .admonition.info, .md-typeset details.info { + border-left-color: #00b8d4; } + [dir="rtl"] .md-typeset .admonition.todo, [dir="rtl"] .md-typeset details.todo, [dir="rtl"] .md-typeset .admonition.info, [dir="rtl"] .md-typeset details.info { + border-right-color: #00b8d4; } + .md-typeset .admonition.todo > .admonition-title, .md-typeset details.todo > .admonition-title, .md-typeset .admonition.todo > summary, .md-typeset details.todo > summary, .md-typeset .admonition.info > .admonition-title, .md-typeset details.info > .admonition-title, .md-typeset .admonition.info > summary, .md-typeset details.info > summary { + border-bottom-color: 0.1rem solid rgba(0, 184, 212, 0.1); + background-color: rgba(0, 184, 212, 0.1); } + .md-typeset .admonition.todo > .admonition-title::before, .md-typeset details.todo > .admonition-title::before, .md-typeset .admonition.todo > summary::before, .md-typeset details.todo > summary::before, .md-typeset .admonition.info > .admonition-title::before, .md-typeset details.info > .admonition-title::before, .md-typeset .admonition.info > summary::before, .md-typeset details.info > summary::before { + color: #00b8d4; + content: "\E88E"; } + .md-typeset .admonition.hint, .md-typeset details.hint, .md-typeset .admonition.important, .md-typeset details.important, .md-typeset .admonition.tip, .md-typeset details.tip { + border-left-color: #00bfa5; } + [dir="rtl"] .md-typeset .admonition.hint, [dir="rtl"] .md-typeset details.hint, [dir="rtl"] .md-typeset .admonition.important, [dir="rtl"] .md-typeset details.important, [dir="rtl"] .md-typeset .admonition.tip, [dir="rtl"] .md-typeset details.tip { + border-right-color: #00bfa5; } + .md-typeset .admonition.hint > .admonition-title, .md-typeset details.hint > .admonition-title, .md-typeset .admonition.important > .admonition-title, .md-typeset details.important > .admonition-title, .md-typeset .admonition.hint > summary, .md-typeset details.hint > summary, .md-typeset .admonition.important > summary, .md-typeset details.important > summary, .md-typeset .admonition.tip > .admonition-title, .md-typeset details.tip > .admonition-title, .md-typeset .admonition.tip > summary, .md-typeset details.tip > summary { + border-bottom-color: 0.1rem solid rgba(0, 191, 165, 0.1); + background-color: rgba(0, 191, 165, 0.1); } + .md-typeset .admonition.hint > .admonition-title::before, .md-typeset details.hint > .admonition-title::before, .md-typeset .admonition.important > .admonition-title::before, .md-typeset details.important > .admonition-title::before, .md-typeset .admonition.hint > summary::before, .md-typeset details.hint > summary::before, .md-typeset .admonition.important > summary::before, .md-typeset details.important > summary::before, .md-typeset .admonition.tip > .admonition-title::before, .md-typeset details.tip > .admonition-title::before, .md-typeset .admonition.tip > summary::before, .md-typeset details.tip > summary::before { + color: #00bfa5; + content: "\E80E"; } + .md-typeset .admonition.check, .md-typeset details.check, .md-typeset .admonition.done, .md-typeset details.done, .md-typeset .admonition.success, .md-typeset details.success { + border-left-color: #00c853; } + [dir="rtl"] .md-typeset .admonition.check, [dir="rtl"] .md-typeset details.check, [dir="rtl"] .md-typeset .admonition.done, [dir="rtl"] .md-typeset details.done, [dir="rtl"] .md-typeset .admonition.success, [dir="rtl"] .md-typeset details.success { + border-right-color: #00c853; } + .md-typeset .admonition.check > .admonition-title, .md-typeset details.check > .admonition-title, .md-typeset .admonition.done > .admonition-title, .md-typeset details.done > .admonition-title, .md-typeset .admonition.check > summary, .md-typeset details.check > summary, .md-typeset .admonition.done > summary, .md-typeset details.done > summary, .md-typeset .admonition.success > .admonition-title, .md-typeset details.success > .admonition-title, .md-typeset .admonition.success > summary, .md-typeset details.success > summary { + border-bottom-color: 0.1rem solid rgba(0, 200, 83, 0.1); + background-color: rgba(0, 200, 83, 0.1); } + .md-typeset .admonition.check > .admonition-title::before, .md-typeset details.check > .admonition-title::before, .md-typeset .admonition.done > .admonition-title::before, .md-typeset details.done > .admonition-title::before, .md-typeset .admonition.check > summary::before, .md-typeset details.check > summary::before, .md-typeset .admonition.done > summary::before, .md-typeset details.done > summary::before, .md-typeset .admonition.success > .admonition-title::before, .md-typeset details.success > .admonition-title::before, .md-typeset .admonition.success > summary::before, .md-typeset details.success > summary::before { + color: #00c853; + content: "\E876"; } + .md-typeset .admonition.help, .md-typeset details.help, .md-typeset .admonition.faq, .md-typeset details.faq, .md-typeset .admonition.question, .md-typeset details.question { + border-left-color: #64dd17; } + [dir="rtl"] .md-typeset .admonition.help, [dir="rtl"] .md-typeset details.help, [dir="rtl"] .md-typeset .admonition.faq, [dir="rtl"] .md-typeset details.faq, [dir="rtl"] .md-typeset .admonition.question, [dir="rtl"] .md-typeset details.question { + border-right-color: #64dd17; } + .md-typeset .admonition.help > .admonition-title, .md-typeset details.help > .admonition-title, .md-typeset .admonition.faq > .admonition-title, .md-typeset details.faq > .admonition-title, .md-typeset .admonition.help > summary, .md-typeset details.help > summary, .md-typeset .admonition.faq > summary, .md-typeset details.faq > summary, .md-typeset .admonition.question > .admonition-title, .md-typeset details.question > .admonition-title, .md-typeset .admonition.question > summary, .md-typeset details.question > summary { + border-bottom-color: 0.1rem solid rgba(100, 221, 23, 0.1); + background-color: rgba(100, 221, 23, 0.1); } + .md-typeset .admonition.help > .admonition-title::before, .md-typeset details.help > .admonition-title::before, .md-typeset .admonition.faq > .admonition-title::before, .md-typeset details.faq > .admonition-title::before, .md-typeset .admonition.help > summary::before, .md-typeset details.help > summary::before, .md-typeset .admonition.faq > summary::before, .md-typeset details.faq > summary::before, .md-typeset .admonition.question > .admonition-title::before, .md-typeset details.question > .admonition-title::before, .md-typeset .admonition.question > summary::before, .md-typeset details.question > summary::before { + color: #64dd17; + content: "\E887"; } + .md-typeset .admonition.caution, .md-typeset details.caution, .md-typeset .admonition.attention, .md-typeset details.attention, .md-typeset .admonition.warning, .md-typeset details.warning { + border-left-color: #ff9100; } + [dir="rtl"] .md-typeset .admonition.caution, [dir="rtl"] .md-typeset details.caution, [dir="rtl"] .md-typeset .admonition.attention, [dir="rtl"] .md-typeset details.attention, [dir="rtl"] .md-typeset .admonition.warning, [dir="rtl"] .md-typeset details.warning { + border-right-color: #ff9100; } + .md-typeset .admonition.caution > .admonition-title, .md-typeset details.caution > .admonition-title, .md-typeset .admonition.attention > .admonition-title, .md-typeset details.attention > .admonition-title, .md-typeset .admonition.caution > summary, .md-typeset details.caution > summary, .md-typeset .admonition.attention > summary, .md-typeset details.attention > summary, .md-typeset .admonition.warning > .admonition-title, .md-typeset details.warning > .admonition-title, .md-typeset .admonition.warning > summary, .md-typeset details.warning > summary { + border-bottom-color: 0.1rem solid rgba(255, 145, 0, 0.1); + background-color: rgba(255, 145, 0, 0.1); } + .md-typeset .admonition.caution > .admonition-title::before, .md-typeset details.caution > .admonition-title::before, .md-typeset .admonition.attention > .admonition-title::before, .md-typeset details.attention > .admonition-title::before, .md-typeset .admonition.caution > summary::before, .md-typeset details.caution > summary::before, .md-typeset .admonition.attention > summary::before, .md-typeset details.attention > summary::before, .md-typeset .admonition.warning > .admonition-title::before, .md-typeset details.warning > .admonition-title::before, .md-typeset .admonition.warning > summary::before, .md-typeset details.warning > summary::before { + color: #ff9100; + content: "\E002"; } + .md-typeset .admonition.fail, .md-typeset details.fail, .md-typeset .admonition.missing, .md-typeset details.missing, .md-typeset .admonition.failure, .md-typeset details.failure { + border-left-color: #ff5252; } + [dir="rtl"] .md-typeset .admonition.fail, [dir="rtl"] .md-typeset details.fail, [dir="rtl"] .md-typeset .admonition.missing, [dir="rtl"] .md-typeset details.missing, [dir="rtl"] .md-typeset .admonition.failure, [dir="rtl"] .md-typeset details.failure { + border-right-color: #ff5252; } + .md-typeset .admonition.fail > .admonition-title, .md-typeset details.fail > .admonition-title, .md-typeset .admonition.missing > .admonition-title, .md-typeset details.missing > .admonition-title, .md-typeset .admonition.fail > summary, .md-typeset details.fail > summary, .md-typeset .admonition.missing > summary, .md-typeset details.missing > summary, .md-typeset .admonition.failure > .admonition-title, .md-typeset details.failure > .admonition-title, .md-typeset .admonition.failure > summary, .md-typeset details.failure > summary { + border-bottom-color: 0.1rem solid rgba(255, 82, 82, 0.1); + background-color: rgba(255, 82, 82, 0.1); } + .md-typeset .admonition.fail > .admonition-title::before, .md-typeset details.fail > .admonition-title::before, .md-typeset .admonition.missing > .admonition-title::before, .md-typeset details.missing > .admonition-title::before, .md-typeset .admonition.fail > summary::before, .md-typeset details.fail > summary::before, .md-typeset .admonition.missing > summary::before, .md-typeset details.missing > summary::before, .md-typeset .admonition.failure > .admonition-title::before, .md-typeset details.failure > .admonition-title::before, .md-typeset .admonition.failure > summary::before, .md-typeset details.failure > summary::before { + color: #ff5252; + content: "\E14C"; } + .md-typeset .admonition.error, .md-typeset details.error, .md-typeset .admonition.danger, .md-typeset details.danger { + border-left-color: #ff1744; } + [dir="rtl"] .md-typeset .admonition.error, [dir="rtl"] .md-typeset details.error, [dir="rtl"] .md-typeset .admonition.danger, [dir="rtl"] .md-typeset details.danger { + border-right-color: #ff1744; } + .md-typeset .admonition.error > .admonition-title, .md-typeset details.error > .admonition-title, .md-typeset .admonition.error > summary, .md-typeset details.error > summary, .md-typeset .admonition.danger > .admonition-title, .md-typeset details.danger > .admonition-title, .md-typeset .admonition.danger > summary, .md-typeset details.danger > summary { + border-bottom-color: 0.1rem solid rgba(255, 23, 68, 0.1); + background-color: rgba(255, 23, 68, 0.1); } + .md-typeset .admonition.error > .admonition-title::before, .md-typeset details.error > .admonition-title::before, .md-typeset .admonition.error > summary::before, .md-typeset details.error > summary::before, .md-typeset .admonition.danger > .admonition-title::before, .md-typeset details.danger > .admonition-title::before, .md-typeset .admonition.danger > summary::before, .md-typeset details.danger > summary::before { + color: #ff1744; + content: "\E3E7"; } + .md-typeset .admonition.bug, .md-typeset details.bug { + border-left-color: #f50057; } + [dir="rtl"] .md-typeset .admonition.bug, [dir="rtl"] .md-typeset details.bug { + border-right-color: #f50057; } + .md-typeset .admonition.bug > .admonition-title, .md-typeset details.bug > .admonition-title, .md-typeset .admonition.bug > summary, .md-typeset details.bug > summary { + border-bottom-color: 0.1rem solid rgba(245, 0, 87, 0.1); + background-color: rgba(245, 0, 87, 0.1); } + .md-typeset .admonition.bug > .admonition-title::before, .md-typeset details.bug > .admonition-title::before, .md-typeset .admonition.bug > summary::before, .md-typeset details.bug > summary::before { + color: #f50057; + content: "\E868"; } + .md-typeset .admonition.example, .md-typeset details.example { + border-left-color: #651fff; } + [dir="rtl"] .md-typeset .admonition.example, [dir="rtl"] .md-typeset details.example { + border-right-color: #651fff; } + .md-typeset .admonition.example > .admonition-title, .md-typeset details.example > .admonition-title, .md-typeset .admonition.example > summary, .md-typeset details.example > summary { + border-bottom-color: 0.1rem solid rgba(101, 31, 255, 0.1); + background-color: rgba(101, 31, 255, 0.1); } + .md-typeset .admonition.example > .admonition-title::before, .md-typeset details.example > .admonition-title::before, .md-typeset .admonition.example > summary::before, .md-typeset details.example > summary::before { + color: #651fff; + content: "\E242"; } + .md-typeset .admonition.cite, .md-typeset details.cite, .md-typeset .admonition.quote, .md-typeset details.quote { + border-left-color: #9e9e9e; } + [dir="rtl"] .md-typeset .admonition.cite, [dir="rtl"] .md-typeset details.cite, [dir="rtl"] .md-typeset .admonition.quote, [dir="rtl"] .md-typeset details.quote { + border-right-color: #9e9e9e; } + .md-typeset .admonition.cite > .admonition-title, .md-typeset details.cite > .admonition-title, .md-typeset .admonition.cite > summary, .md-typeset details.cite > summary, .md-typeset .admonition.quote > .admonition-title, .md-typeset details.quote > .admonition-title, .md-typeset .admonition.quote > summary, .md-typeset details.quote > summary { + border-bottom-color: 0.1rem solid rgba(158, 158, 158, 0.1); + background-color: rgba(158, 158, 158, 0.1); } + .md-typeset .admonition.cite > .admonition-title::before, .md-typeset details.cite > .admonition-title::before, .md-typeset .admonition.cite > summary::before, .md-typeset details.cite > summary::before, .md-typeset .admonition.quote > .admonition-title::before, .md-typeset details.quote > .admonition-title::before, .md-typeset .admonition.quote > summary::before, .md-typeset details.quote > summary::before { + color: #9e9e9e; + content: "\E244"; } + +.codehilite .o, .md-typeset .highlight .o { + color: inherit; } + +.codehilite .ow, .md-typeset .highlight .ow { + color: inherit; } + +.codehilite .ge, .md-typeset .highlight .ge { + color: #000000; } + +.codehilite .gr, .md-typeset .highlight .gr { + color: #AA0000; } + +.codehilite .gh, .md-typeset .highlight .gh { + color: #999999; } + +.codehilite .go, .md-typeset .highlight .go { + color: #888888; } + +.codehilite .gp, .md-typeset .highlight .gp { + color: #555555; } + +.codehilite .gs, .md-typeset .highlight .gs { + color: inherit; } + +.codehilite .gu, .md-typeset .highlight .gu { + color: #AAAAAA; } + +.codehilite .gt, .md-typeset .highlight .gt { + color: #AA0000; } + +.codehilite .gd, .md-typeset .highlight .gd { + background-color: #FFDDDD; } + +.codehilite .gi, .md-typeset .highlight .gi { + background-color: #DDFFDD; } + +.codehilite .k, .md-typeset .highlight .k { + color: #3B78E7; } + +.codehilite .kc, .md-typeset .highlight .kc { + color: #A71D5D; } + +.codehilite .kd, .md-typeset .highlight .kd { + color: #3B78E7; } + +.codehilite .kn, .md-typeset .highlight .kn { + color: #3B78E7; } + +.codehilite .kp, .md-typeset .highlight .kp { + color: #A71D5D; } + +.codehilite .kr, .md-typeset .highlight .kr { + color: #3E61A2; } + +.codehilite .kt, .md-typeset .highlight .kt { + color: #3E61A2; } + +.codehilite .c, .md-typeset .highlight .c { + color: #999999; } + +.codehilite .cm, .md-typeset .highlight .cm { + color: #999999; } + +.codehilite .cp, .md-typeset .highlight .cp { + color: #666666; } + +.codehilite .c1, .md-typeset .highlight .c1 { + color: #999999; } + +.codehilite .ch, .md-typeset .highlight .ch { + color: #999999; } + +.codehilite .cs, .md-typeset .highlight .cs { + color: #999999; } + +.codehilite .na, .md-typeset .highlight .na { + color: #C2185B; } + +.codehilite .nb, .md-typeset .highlight .nb { + color: #C2185B; } + +.codehilite .bp, .md-typeset .highlight .bp { + color: #3E61A2; } + +.codehilite .nc, .md-typeset .highlight .nc { + color: #C2185B; } + +.codehilite .no, .md-typeset .highlight .no { + color: #3E61A2; } + +.codehilite .nd, .md-typeset .highlight .nd { + color: #666666; } + +.codehilite .ni, .md-typeset .highlight .ni { + color: #666666; } + +.codehilite .ne, .md-typeset .highlight .ne { + color: #C2185B; } + +.codehilite .nf, .md-typeset .highlight .nf { + color: #C2185B; } + +.codehilite .nl, .md-typeset .highlight .nl { + color: #3B5179; } + +.codehilite .nn, .md-typeset .highlight .nn { + color: #EC407A; } + +.codehilite .nt, .md-typeset .highlight .nt { + color: #3B78E7; } + +.codehilite .nv, .md-typeset .highlight .nv { + color: #3E61A2; } + +.codehilite .vc, .md-typeset .highlight .vc { + color: #3E61A2; } + +.codehilite .vg, .md-typeset .highlight .vg { + color: #3E61A2; } + +.codehilite .vi, .md-typeset .highlight .vi { + color: #3E61A2; } + +.codehilite .nx, .md-typeset .highlight .nx { + color: #EC407A; } + +.codehilite .m, .md-typeset .highlight .m { + color: #E74C3C; } + +.codehilite .mf, .md-typeset .highlight .mf { + color: #E74C3C; } + +.codehilite .mh, .md-typeset .highlight .mh { + color: #E74C3C; } + +.codehilite .mi, .md-typeset .highlight .mi { + color: #E74C3C; } + +.codehilite .il, .md-typeset .highlight .il { + color: #E74C3C; } + +.codehilite .mo, .md-typeset .highlight .mo { + color: #E74C3C; } + +.codehilite .s, .md-typeset .highlight .s { + color: #0D904F; } + +.codehilite .sb, .md-typeset .highlight .sb { + color: #0D904F; } + +.codehilite .sc, .md-typeset .highlight .sc { + color: #0D904F; } + +.codehilite .sd, .md-typeset .highlight .sd { + color: #999999; } + +.codehilite .s2, .md-typeset .highlight .s2 { + color: #0D904F; } + +.codehilite .se, .md-typeset .highlight .se { + color: #183691; } + +.codehilite .sh, .md-typeset .highlight .sh { + color: #183691; } + +.codehilite .si, .md-typeset .highlight .si { + color: #183691; } + +.codehilite .sx, .md-typeset .highlight .sx { + color: #183691; } + +.codehilite .sr, .md-typeset .highlight .sr { + color: #009926; } + +.codehilite .s1, .md-typeset .highlight .s1 { + color: #0D904F; } + +.codehilite .ss, .md-typeset .highlight .ss { + color: #0D904F; } + +.codehilite .err, .md-typeset .highlight .err { + color: #A61717; } + +.codehilite .w, .md-typeset .highlight .w { + color: transparent; } + +.codehilite .hll, .md-typeset .highlight .hll { + display: block; + margin: 0 -1.2rem; + padding: 0 1.2rem; + background-color: rgba(255, 235, 59, 0.5); } + +.md-typeset .codehilite, .md-typeset .highlight { + position: relative; + margin: 1em 0; + padding: 0; + border-radius: 0.2rem; + background-color: rgba(236, 236, 236, 0.5); + color: #37474F; + line-height: 1.4; + -webkit-overflow-scrolling: touch; } + .md-typeset .codehilite pre, .md-typeset .highlight pre, + .md-typeset .codehilite code, .md-typeset .highlight code { + display: block; + margin: 0; + padding: 1.05rem 1.2rem; + background-color: transparent; + overflow: auto; + vertical-align: top; } + .md-typeset .codehilite pre::-webkit-scrollbar, .md-typeset .highlight pre::-webkit-scrollbar, + .md-typeset .codehilite code::-webkit-scrollbar, .md-typeset .highlight code::-webkit-scrollbar { + width: 0.4rem; + height: 0.4rem; } + .md-typeset .codehilite pre::-webkit-scrollbar-thumb, .md-typeset .highlight pre::-webkit-scrollbar-thumb, + .md-typeset .codehilite code::-webkit-scrollbar-thumb, .md-typeset .highlight code::-webkit-scrollbar-thumb { + background-color: rgba(0, 0, 0, 0.26); } + .md-typeset .codehilite pre::-webkit-scrollbar-thumb:hover, .md-typeset .highlight pre::-webkit-scrollbar-thumb:hover, + .md-typeset .codehilite code::-webkit-scrollbar-thumb:hover, .md-typeset .highlight code::-webkit-scrollbar-thumb:hover { + background-color: #1AD6F5; } + +.md-typeset pre.codehilite, .md-typeset pre.highlight { + overflow: visible; } + .md-typeset pre.codehilite code, .md-typeset pre.highlight code { + display: block; + padding: 1.05rem 1.2rem; + overflow: auto; } + +.md-typeset .codehilitetable, .md-typeset .highlighttable { + display: block; + margin: 1em 0; + border-radius: 0.2em; + font-size: 1.6rem; + overflow: hidden; } + .md-typeset .codehilitetable tbody, .md-typeset .highlighttable tbody, + .md-typeset .codehilitetable td, .md-typeset .highlighttable td { + display: block; + padding: 0; } + .md-typeset .codehilitetable tr, .md-typeset .highlighttable tr { + display: flex; } + .md-typeset .codehilitetable .codehilite, .md-typeset .highlighttable .codehilite, .md-typeset .codehilitetable .highlight, .md-typeset .highlighttable .highlight, + .md-typeset .codehilitetable .linenodiv, .md-typeset .highlighttable .linenodiv { + margin: 0; + border-radius: 0; } + .md-typeset .codehilitetable .linenodiv, .md-typeset .highlighttable .linenodiv { + padding: 1.05rem 1.2rem; } + .md-typeset .codehilitetable .linenos, .md-typeset .highlighttable .linenos { + background-color: rgba(0, 0, 0, 0.07); + color: rgba(0, 0, 0, 0.26); + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; } + .md-typeset .codehilitetable .linenos pre, .md-typeset .highlighttable .linenos pre { + margin: 0; + padding: 0; + background-color: transparent; + color: inherit; + text-align: right; } + .md-typeset .codehilitetable .code, .md-typeset .highlighttable .code { + flex: 1; + overflow: hidden; } + +.md-typeset > .codehilitetable, .md-typeset > .highlighttable { + box-shadow: none; } + +.md-typeset [id^="fnref:"] { + display: inline-block; } + .md-typeset [id^="fnref:"]:target { + margin-top: -7.6rem; + padding-top: 7.6rem; + pointer-events: none; } + +.md-typeset [id^="fn:"]::before { + display: none; + height: 0; + content: ""; } + +.md-typeset [id^="fn:"]:target::before { + display: block; + margin-top: -7rem; + padding-top: 7rem; + pointer-events: none; } + +.md-typeset .footnote { + color: rgba(0, 0, 0, 0.54); + font-size: 1.28rem; } + .md-typeset .footnote ol { + margin-left: 0; } + .md-typeset .footnote li { + transition: color 0.25s; } + .md-typeset .footnote li:target { + color: rgba(0, 0, 0, 0.87); } + .md-typeset .footnote li :first-child { + margin-top: 0; } + .md-typeset .footnote li:hover .footnote-backref, + .md-typeset .footnote li:target .footnote-backref { + -webkit-transform: translateX(0); + transform: translateX(0); + opacity: 1; } + .md-typeset .footnote li:hover .footnote-backref:hover, + .md-typeset .footnote li:target .footnote-backref { + color: #1AD6F5; } + +.md-typeset .footnote-ref { + display: inline-block; + pointer-events: initial; } + .md-typeset .footnote-ref::before { + display: inline; + margin: 0 0.2em; + border-left: 0.1rem solid rgba(0, 0, 0, 0.26); + font-size: 1.25em; + content: ""; + vertical-align: -0.5rem; } + +.md-typeset .footnote-backref { + display: inline-block; + -webkit-transform: translateX(0.5rem); + transform: translateX(0.5rem); + transition: color 0.25s, opacity 0.125s 0.125s, -webkit-transform 0.25s 0.125s; + transition: transform 0.25s 0.125s, color 0.25s, opacity 0.125s 0.125s; + transition: transform 0.25s 0.125s, color 0.25s, opacity 0.125s 0.125s, -webkit-transform 0.25s 0.125s; + color: rgba(0, 0, 0, 0.26); + font-size: 0; + opacity: 0; + vertical-align: text-bottom; } + [dir="rtl"] .md-typeset .footnote-backref { + -webkit-transform: translateX(-0.5rem); + transform: translateX(-0.5rem); } + .md-typeset .footnote-backref::before { + display: inline-block; + font-size: 1.6rem; + content: "\E31B"; } + [dir="rtl"] .md-typeset .footnote-backref::before { + -webkit-transform: scaleX(-1); + transform: scaleX(-1); } + +.md-typeset .headerlink { + display: inline-block; + margin-left: 1rem; + -webkit-transform: translate(0, 0.5rem); + transform: translate(0, 0.5rem); + transition: color 0.25s, opacity 0.125s 0.25s, -webkit-transform 0.25s 0.25s; + transition: transform 0.25s 0.25s, color 0.25s, opacity 0.125s 0.25s; + transition: transform 0.25s 0.25s, color 0.25s, opacity 0.125s 0.25s, -webkit-transform 0.25s 0.25s; + opacity: 0; } + [dir="rtl"] .md-typeset .headerlink { + margin-right: 1rem; + margin-left: initial; } + html body .md-typeset .headerlink { + color: rgba(0, 0, 0, 0.26); } + +.md-typeset h1[id]::before { + display: block; + margin-top: -0.9rem; + padding-top: 0.9rem; + content: ""; } + +.md-typeset h1[id]:target::before { + margin-top: -6.9rem; + padding-top: 6.9rem; } + +.md-typeset h1[id]:hover .headerlink, +.md-typeset h1[id]:target .headerlink, +.md-typeset h1[id] .headerlink:focus { + -webkit-transform: translate(0, 0); + transform: translate(0, 0); + opacity: 1; } + +.md-typeset h1[id]:hover .headerlink:hover, +.md-typeset h1[id]:target .headerlink, +.md-typeset h1[id] .headerlink:focus { + color: #1AD6F5; } + +.md-typeset h2[id]::before { + display: block; + margin-top: -0.8rem; + padding-top: 0.8rem; + content: ""; } + +.md-typeset h2[id]:target::before { + margin-top: -6.8rem; + padding-top: 6.8rem; } + +.md-typeset h2[id]:hover .headerlink, +.md-typeset h2[id]:target .headerlink, +.md-typeset h2[id] .headerlink:focus { + -webkit-transform: translate(0, 0); + transform: translate(0, 0); + opacity: 1; } + +.md-typeset h2[id]:hover .headerlink:hover, +.md-typeset h2[id]:target .headerlink, +.md-typeset h2[id] .headerlink:focus { + color: #1AD6F5; } + +.md-typeset h3[id]::before { + display: block; + margin-top: -0.9rem; + padding-top: 0.9rem; + content: ""; } + +.md-typeset h3[id]:target::before { + margin-top: -6.9rem; + padding-top: 6.9rem; } + +.md-typeset h3[id]:hover .headerlink, +.md-typeset h3[id]:target .headerlink, +.md-typeset h3[id] .headerlink:focus { + -webkit-transform: translate(0, 0); + transform: translate(0, 0); + opacity: 1; } + +.md-typeset h3[id]:hover .headerlink:hover, +.md-typeset h3[id]:target .headerlink, +.md-typeset h3[id] .headerlink:focus { + color: #1AD6F5; } + +.md-typeset h4[id]::before { + display: block; + margin-top: -0.9rem; + padding-top: 0.9rem; + content: ""; } + +.md-typeset h4[id]:target::before { + margin-top: -6.9rem; + padding-top: 6.9rem; } + +.md-typeset h4[id]:hover .headerlink, +.md-typeset h4[id]:target .headerlink, +.md-typeset h4[id] .headerlink:focus { + -webkit-transform: translate(0, 0); + transform: translate(0, 0); + opacity: 1; } + +.md-typeset h4[id]:hover .headerlink:hover, +.md-typeset h4[id]:target .headerlink, +.md-typeset h4[id] .headerlink:focus { + color: #1AD6F5; } + +.md-typeset h5[id]::before { + display: block; + margin-top: -1.1rem; + padding-top: 1.1rem; + content: ""; } + +.md-typeset h5[id]:target::before { + margin-top: -7.1rem; + padding-top: 7.1rem; } + +.md-typeset h5[id]:hover .headerlink, +.md-typeset h5[id]:target .headerlink, +.md-typeset h5[id] .headerlink:focus { + -webkit-transform: translate(0, 0); + transform: translate(0, 0); + opacity: 1; } + +.md-typeset h5[id]:hover .headerlink:hover, +.md-typeset h5[id]:target .headerlink, +.md-typeset h5[id] .headerlink:focus { + color: #1AD6F5; } + +.md-typeset h6[id]::before { + display: block; + margin-top: -1.1rem; + padding-top: 1.1rem; + content: ""; } + +.md-typeset h6[id]:target::before { + margin-top: -7.1rem; + padding-top: 7.1rem; } + +.md-typeset h6[id]:hover .headerlink, +.md-typeset h6[id]:target .headerlink, +.md-typeset h6[id] .headerlink:focus { + -webkit-transform: translate(0, 0); + transform: translate(0, 0); + opacity: 1; } + +.md-typeset h6[id]:hover .headerlink:hover, +.md-typeset h6[id]:target .headerlink, +.md-typeset h6[id] .headerlink:focus { + color: #1AD6F5; } + +.md-typeset .MJXc-display { + margin: 0.75em 0; + padding: 0.75em 0; + overflow: auto; + -webkit-overflow-scrolling: touch; } + +.md-typeset .MathJax_CHTML { + outline: 0; } + +.md-typeset del.critic, +.md-typeset ins.critic, +.md-typeset .critic.comment { + margin: 0 0.25em; + padding: 0.0625em 0; + border-radius: 0.2rem; + -webkit-box-decoration-break: clone; + box-decoration-break: clone; } + +.md-typeset del.critic { + background-color: #FFDDDD; + box-shadow: 0.25em 0 0 #FFDDDD, -0.25em 0 0 #FFDDDD; } + +.md-typeset ins.critic { + background-color: #DDFFDD; + box-shadow: 0.25em 0 0 #DDFFDD, -0.25em 0 0 #DDFFDD; } + +.md-typeset .critic.comment { + background-color: rgba(236, 236, 236, 0.5); + color: #37474F; + box-shadow: 0.25em 0 0 rgba(236, 236, 236, 0.5), -0.25em 0 0 rgba(236, 236, 236, 0.5); } + .md-typeset .critic.comment::before { + padding-right: 0.125em; + color: rgba(0, 0, 0, 0.26); + content: "\E0B7"; + vertical-align: -0.125em; } + +.md-typeset .critic.block { + display: block; + margin: 1em 0; + padding-right: 1.6rem; + padding-left: 1.6rem; + box-shadow: none; } + .md-typeset .critic.block :first-child { + margin-top: 0.5em; } + .md-typeset .critic.block :last-child { + margin-bottom: 0.5em; } + +.md-typeset details { + display: block; + padding-top: 0; } + .md-typeset details[open] > summary::after { + -webkit-transform: rotate(180deg); + transform: rotate(180deg); } + .md-typeset details:not([open]) { + padding-bottom: 0; } + .md-typeset details:not([open]) > summary { + border-bottom: none; } + .md-typeset details summary { + padding-right: 4rem; } + [dir="rtl"] .md-typeset details summary { + padding-left: 4rem; } + .no-details .md-typeset details:not([open]) > * { + display: none; } + .no-details .md-typeset details:not([open]) summary { + display: block; } + +.md-typeset summary { + display: block; + outline: none; + cursor: pointer; } + .md-typeset summary::-webkit-details-marker { + display: none; } + .md-typeset summary::after { + position: absolute; + top: 0.8rem; + right: 1.2rem; + color: rgba(0, 0, 0, 0.26); + font-size: 2rem; + content: "\E313"; } + [dir="rtl"] .md-typeset summary::after { + right: initial; + left: 1.2rem; } + +.md-typeset .emojione { + width: 2rem; + vertical-align: text-top; } + +.md-typeset code.codehilite, .md-typeset code.highlight { + margin: 0 0.29412em; + padding: 0.07353em 0; } + +.md-typeset .superfences-content { + display: none; + order: 99; + width: 100%; + background-color: white; } + .md-typeset .superfences-content > * { + margin: 0; + border-radius: 0; } + +.md-typeset .superfences-tabs { + display: flex; + position: relative; + flex-wrap: wrap; + margin: 1em 0; + border: 0.1rem solid rgba(0, 0, 0, 0.07); + border-radius: 0.2em; } + .md-typeset .superfences-tabs > input { + display: none; } + .md-typeset .superfences-tabs > input:checked + label { + font-weight: 700; } + .md-typeset .superfences-tabs > input:checked + label + .superfences-content { + display: block; } + .md-typeset .superfences-tabs > label { + width: auto; + padding: 1.2rem 1.2rem; + transition: color 0.125s; + font-size: 1.28rem; + cursor: pointer; } + html .md-typeset .superfences-tabs > label:hover { + color: #1AD6F5; } + +.md-typeset .task-list-item { + position: relative; + list-style-type: none; } + .md-typeset .task-list-item [type="checkbox"] { + position: absolute; + top: 0.45em; + left: -2em; } + [dir="rtl"] .md-typeset .task-list-item [type="checkbox"] { + right: -2em; + left: initial; } + +.md-typeset .task-list-control .task-list-indicator::before { + position: absolute; + top: 0.15em; + left: -1.25em; + color: rgba(0, 0, 0, 0.26); + font-size: 1.25em; + content: "\E835"; + vertical-align: -0.25em; } + [dir="rtl"] .md-typeset .task-list-control .task-list-indicator::before { + right: -1.25em; + left: initial; } + +.md-typeset .task-list-control [type="checkbox"]:checked + .task-list-indicator::before { + content: "\E834"; } + +.md-typeset .task-list-control [type="checkbox"] { + opacity: 0; + z-index: -1; } + +@media print { + .md-typeset a::after { + color: rgba(0, 0, 0, 0.54); + content: " [" attr(href) "]"; } + .md-typeset code, + .md-typeset pre { + white-space: pre-wrap; } + .md-typeset code { + box-shadow: none; + -webkit-box-decoration-break: initial; + box-decoration-break: initial; } + .md-clipboard { + display: none; } + .md-content__icon { + display: none; } + .md-header { + display: none; } + .md-footer { + display: none; } + .md-sidebar { + display: none; } + .md-tabs { + display: none; } + .md-typeset .headerlink { + display: none; } } + +@media only screen and (max-width: 44.9375em) { + .md-typeset pre { + margin: 1em -1.6rem; + border-radius: 0; } + .md-typeset pre > code { + padding: 1.05rem 1.6rem; } + .md-footer-nav__link--prev .md-footer-nav__title { + display: none; } + .md-search-result__teaser { + max-height: 5rem; + -webkit-line-clamp: 3; } + .codehilite .hll, .md-typeset .highlight .hll { + margin: 0 -1.6rem; + padding: 0 1.6rem; } + .md-typeset > .codehilite, .md-typeset > .highlight { + margin: 1em -1.6rem; + border-radius: 0; } + .md-typeset > .codehilite pre, .md-typeset > .highlight pre, + .md-typeset > .codehilite code, + .md-typeset > .highlight code { + padding: 1.05rem 1.6rem; } + .md-typeset > .codehilitetable, .md-typeset > .highlighttable { + margin: 1em -1.6rem; + border-radius: 0; } + .md-typeset > .codehilitetable .codehilite > pre, .md-typeset > .highlighttable .codehilite > pre, .md-typeset > .codehilitetable .highlight > pre, .md-typeset > .highlighttable .highlight > pre, + .md-typeset > .codehilitetable .codehilite > code, + .md-typeset > .highlighttable .codehilite > code, .md-typeset > .codehilitetable .highlight > code, .md-typeset > .highlighttable .highlight > code, + .md-typeset > .codehilitetable .linenodiv, + .md-typeset > .highlighttable .linenodiv { + padding: 1rem 1.6rem; } + .md-typeset > p > .MJXc-display { + margin: 0.75em -1.6rem; + padding: 0.25em 1.6rem; } + .md-typeset > .superfences-tabs { + margin: 1em -1.6rem; + border: 0; + border-top: 0.1rem solid rgba(0, 0, 0, 0.07); + border-radius: 0; } + .md-typeset > .superfences-tabs pre, + .md-typeset > .superfences-tabs code { + padding: 1.05rem 1.6rem; } } + +@media only screen and (min-width: 100em) { + html { + font-size: 68.75%; } } + +@media only screen and (min-width: 125em) { + html { + font-size: 75%; } } + +@media only screen and (max-width: 59.9375em) { + body[data-md-state="lock"] { + overflow: hidden; } + .ios body[data-md-state="lock"] .md-container { + display: none; } + html .md-nav__link[for="__toc"] { + display: block; + padding-right: 4.8rem; } + html .md-nav__link[for="__toc"]::after { + color: inherit; + content: "\E8DE"; } + html .md-nav__link[for="__toc"] + .md-nav__link { + display: none; } + html .md-nav__link[for="__toc"] ~ .md-nav { + display: flex; } + html [dir="rtl"] .md-nav__link { + padding-right: 1.6rem; + padding-left: 4.8rem; } + .md-nav__source { + display: block; + padding: 0 0.4rem; + background-color: rgba(0, 62, 81, 0.9675); + color: white; } + .md-search__overlay { + position: absolute; + top: 0.4rem; + left: 0.4rem; + width: 3.6rem; + height: 3.6rem; + -webkit-transform-origin: center; + transform-origin: center; + transition: opacity 0.2s 0.2s, -webkit-transform 0.3s 0.1s; + transition: transform 0.3s 0.1s, opacity 0.2s 0.2s; + transition: transform 0.3s 0.1s, opacity 0.2s 0.2s, -webkit-transform 0.3s 0.1s; + border-radius: 2rem; + background-color: white; + overflow: hidden; + pointer-events: none; } + [dir="rtl"] .md-search__overlay { + right: 0.4rem; + left: initial; } + [data-md-toggle="search"]:checked ~ .md-header .md-search__overlay { + transition: opacity 0.1s, -webkit-transform 0.4s; + transition: transform 0.4s, opacity 0.1s; + transition: transform 0.4s, opacity 0.1s, -webkit-transform 0.4s; + opacity: 1; } + .md-search__inner { + position: fixed; + top: 0; + left: 100%; + width: 100%; + height: 100%; + -webkit-transform: translateX(5%); + transform: translateX(5%); + transition: right 0s 0.3s, left 0s 0.3s, opacity 0.15s 0.15s, -webkit-transform 0.15s 0.15s cubic-bezier(0.4, 0, 0.2, 1); + transition: right 0s 0.3s, left 0s 0.3s, transform 0.15s 0.15s cubic-bezier(0.4, 0, 0.2, 1), opacity 0.15s 0.15s; + transition: right 0s 0.3s, left 0s 0.3s, transform 0.15s 0.15s cubic-bezier(0.4, 0, 0.2, 1), opacity 0.15s 0.15s, -webkit-transform 0.15s 0.15s cubic-bezier(0.4, 0, 0.2, 1); + opacity: 0; + z-index: 2; } + [data-md-toggle="search"]:checked ~ .md-header .md-search__inner { + left: 0; + -webkit-transform: translateX(0); + transform: translateX(0); + transition: right 0s 0s, left 0s 0s, opacity 0.15s 0.15s, -webkit-transform 0.15s 0.15s cubic-bezier(0.1, 0.7, 0.1, 1); + transition: right 0s 0s, left 0s 0s, transform 0.15s 0.15s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.15s 0.15s; + transition: right 0s 0s, left 0s 0s, transform 0.15s 0.15s cubic-bezier(0.1, 0.7, 0.1, 1), opacity 0.15s 0.15s, -webkit-transform 0.15s 0.15s cubic-bezier(0.1, 0.7, 0.1, 1); + opacity: 1; } + [dir="rtl"] [data-md-toggle="search"]:checked ~ .md-header .md-search__inner { + right: 0; + left: initial; } + html [dir="rtl"] .md-search__inner { + right: 100%; + left: initial; + -webkit-transform: translateX(-5%); + transform: translateX(-5%); } + .md-search__input { + width: 100%; + height: 4.8rem; + font-size: 1.8rem; } + .md-search__icon[for="__search"] { + top: 1.2rem; + left: 1.6rem; } + .md-search__icon[for="__search"][for="__search"]::before { + content: "\E5C4"; } + [dir="rtl"] .md-search__icon[for="__search"][for="__search"]::before { + content: "\E5C8"; } + .md-search__icon[type="reset"] { + top: 1.2rem; + right: 1.6rem; } + .md-search__output { + top: 4.8rem; + bottom: 0; } + .md-search-result__article--document::before { + display: none; } } + +@media only screen and (max-width: 76.1875em) { + [data-md-toggle="drawer"]:checked ~ .md-overlay { + width: 100%; + height: 100%; + transition: width 0s, height 0s, opacity 0.25s; + opacity: 1; } + .md-header-nav__button.md-icon--home, .md-header-nav__button.md-logo { + display: none; } + .md-hero__inner { + margin-top: 4.8rem; + margin-bottom: 2.4rem; } + .md-nav { + background-color: white; } + .md-nav--primary, + .md-nav--primary .md-nav { + display: flex; + position: absolute; + top: 0; + right: 0; + left: 0; + flex-direction: column; + height: 100%; + z-index: 1; } + .md-nav--primary .md-nav__title, + .md-nav--primary .md-nav__item { + font-size: 1.6rem; + line-height: 1.5; } + html .md-nav--primary .md-nav__title { + position: relative; + height: 11.2rem; + padding: 6rem 1.6rem 0.4rem; + background-color: rgba(0, 0, 0, 0.07); + color: rgba(0, 0, 0, 0.54); + font-weight: 400; + line-height: 4.8rem; + white-space: nowrap; + cursor: pointer; } + html .md-nav--primary .md-nav__title::before { + display: block; + position: absolute; + top: 0.4rem; + left: 0.4rem; + width: 4rem; + height: 4rem; + color: rgba(0, 0, 0, 0.54); } + html .md-nav--primary .md-nav__title ~ .md-nav__list { + background-color: white; + box-shadow: 0 0.1rem 0 rgba(0, 0, 0, 0.07) inset; } + html .md-nav--primary .md-nav__title ~ .md-nav__list > .md-nav__item:first-child { + border-top: 0; } + html .md-nav--primary .md-nav__title--site { + position: relative; + background-color: #004E66; + color: white; } + html .md-nav--primary .md-nav__title--site .md-nav__button { + display: block; + position: absolute; + top: 0.4rem; + left: 0.4rem; + width: 6.4rem; + height: 6.4rem; + font-size: 4.8rem; } + html .md-nav--primary .md-nav__title--site::before { + display: none; } + html [dir="rtl"] .md-nav--primary .md-nav__title::before { + right: 0.4rem; + left: initial; } + html [dir="rtl"] .md-nav--primary .md-nav__title--site .md-nav__button { + right: 0.4rem; + left: initial; } + .md-nav--primary .md-nav__list { + flex: 1; + overflow-y: auto; } + .md-nav--primary .md-nav__item { + padding: 0; + border-top: 0.1rem solid rgba(0, 0, 0, 0.07); } + [dir="rtl"] .md-nav--primary .md-nav__item { + padding: 0; } + .md-nav--primary .md-nav__item--nested > .md-nav__link { + padding-right: 4.8rem; } + [dir="rtl"] .md-nav--primary .md-nav__item--nested > .md-nav__link { + padding-right: 1.6rem; + padding-left: 4.8rem; } + .md-nav--primary .md-nav__item--nested > .md-nav__link::after { + content: "\E315"; } + [dir="rtl"] .md-nav--primary .md-nav__item--nested > .md-nav__link::after { + content: "\E314"; } + .md-nav--primary .md-nav__link { + position: relative; + margin-top: 0; + padding: 1.2rem 1.6rem; } + .md-nav--primary .md-nav__link::after { + position: absolute; + top: 50%; + right: 1.2rem; + margin-top: -1.2rem; + color: inherit; + font-size: 2.4rem; } + [dir="rtl"] .md-nav--primary .md-nav__link::after { + right: initial; + left: 1.2rem; } + .md-nav--primary .md-nav--secondary .md-nav__link { + position: static; } + .md-nav--primary .md-nav--secondary .md-nav { + position: static; + background-color: transparent; } + .md-nav--primary .md-nav--secondary .md-nav .md-nav__link { + padding-left: 2.8rem; } + [dir="rtl"] .md-nav--primary .md-nav--secondary .md-nav .md-nav__link { + padding-right: 2.8rem; + padding-left: initial; } + .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav__link { + padding-left: 4rem; } + [dir="rtl"] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav__link { + padding-right: 4rem; + padding-left: initial; } + .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav__link { + padding-left: 5.2rem; } + [dir="rtl"] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav__link { + padding-right: 5.2rem; + padding-left: initial; } + .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav .md-nav__link { + padding-left: 6.4rem; } + [dir="rtl"] .md-nav--primary .md-nav--secondary .md-nav .md-nav .md-nav .md-nav .md-nav__link { + padding-right: 6.4rem; + padding-left: initial; } + .md-nav__toggle ~ .md-nav { + display: flex; + -webkit-transform: translateX(100%); + transform: translateX(100%); + transition: opacity 0.125s 0.05s, -webkit-transform 0.25s cubic-bezier(0.8, 0, 0.6, 1); + transition: transform 0.25s cubic-bezier(0.8, 0, 0.6, 1), opacity 0.125s 0.05s; + transition: transform 0.25s cubic-bezier(0.8, 0, 0.6, 1), opacity 0.125s 0.05s, -webkit-transform 0.25s cubic-bezier(0.8, 0, 0.6, 1); + opacity: 0; } + [dir="rtl"] .md-nav__toggle ~ .md-nav { + -webkit-transform: translateX(-100%); + transform: translateX(-100%); } + .no-csstransforms3d .md-nav__toggle ~ .md-nav { + display: none; } + .md-nav__toggle:checked ~ .md-nav { + -webkit-transform: translateX(0); + transform: translateX(0); + transition: opacity 0.125s 0.125s, -webkit-transform 0.25s cubic-bezier(0.4, 0, 0.2, 1); + transition: transform 0.25s cubic-bezier(0.4, 0, 0.2, 1), opacity 0.125s 0.125s; + transition: transform 0.25s cubic-bezier(0.4, 0, 0.2, 1), opacity 0.125s 0.125s, -webkit-transform 0.25s cubic-bezier(0.4, 0, 0.2, 1); + opacity: 1; } + .no-csstransforms3d .md-nav__toggle:checked ~ .md-nav { + display: flex; } + .md-sidebar--primary { + position: fixed; + top: 0; + left: -24.2rem; + width: 24.2rem; + height: 100%; + -webkit-transform: translateX(0); + transform: translateX(0); + transition: box-shadow 0.25s, -webkit-transform 0.25s cubic-bezier(0.4, 0, 0.2, 1); + transition: transform 0.25s cubic-bezier(0.4, 0, 0.2, 1), box-shadow 0.25s; + transition: transform 0.25s cubic-bezier(0.4, 0, 0.2, 1), box-shadow 0.25s, -webkit-transform 0.25s cubic-bezier(0.4, 0, 0.2, 1); + background-color: white; + z-index: 3; } + [dir="rtl"] .md-sidebar--primary { + right: -24.2rem; + left: initial; } + .no-csstransforms3d .md-sidebar--primary { + display: none; } + [data-md-toggle="drawer"]:checked ~ .md-container .md-sidebar--primary { + box-shadow: 0 8px 10px 1px rgba(0, 0, 0, 0.14), 0 3px 14px 2px rgba(0, 0, 0, 0.12), 0 5px 5px -3px rgba(0, 0, 0, 0.4); + -webkit-transform: translateX(24.2rem); + transform: translateX(24.2rem); } + [dir="rtl"] [data-md-toggle="drawer"]:checked ~ .md-container .md-sidebar--primary { + -webkit-transform: translateX(-24.2rem); + transform: translateX(-24.2rem); } + .no-csstransforms3d [data-md-toggle="drawer"]:checked ~ .md-container .md-sidebar--primary { + display: block; } + .md-sidebar--primary .md-sidebar__scrollwrap { + overflow: hidden; } + .md-sidebar--primary .md-sidebar__scrollwrap { + position: absolute; + top: 0; + right: 0; + bottom: 0; + left: 0; + margin: 0; } + .md-tabs { + display: none; } } + +@media only screen and (min-width: 60em) { + .md-content { + margin-right: 24.2rem; } + [dir="rtl"] .md-content { + margin-right: initial; + margin-left: 24.2rem; } + .md-header-nav__button.md-icon--search { + display: none; } + .md-header-nav__source { + display: block; + width: 23rem; + max-width: 23rem; + margin-left: 2.8rem; + padding-right: 1.2rem; } + [dir="rtl"] .md-header-nav__source { + margin-right: 2.8rem; + margin-left: initial; + padding-right: initial; + padding-left: 1.2rem; } + .md-search { + padding: 0.4rem; } + .md-search__overlay { + position: fixed; + top: 0; + left: 0; + width: 0; + height: 0; + transition: width 0s 0.25s, height 0s 0.25s, opacity 0.25s; + background-color: rgba(0, 0, 0, 0.54); + cursor: pointer; } + [dir="rtl"] .md-search__overlay { + right: 0; + left: initial; } + [data-md-toggle="search"]:checked ~ .md-header .md-search__overlay { + width: 100%; + height: 100%; + transition: width 0s, height 0s, opacity 0.25s; + opacity: 1; } + .md-search__inner { + position: relative; + width: 23rem; + padding: 0.2rem 0; + float: right; + transition: width 0.25s cubic-bezier(0.1, 0.7, 0.1, 1); } + [dir="rtl"] .md-search__inner { + float: left; } + .md-search__form { + border-radius: 0.2rem; } + .md-search__input { + width: 100%; + height: 3.6rem; + padding-left: 4.4rem; + transition: background-color 0.25s cubic-bezier(0.1, 0.7, 0.1, 1), color 0.25s cubic-bezier(0.1, 0.7, 0.1, 1); + border-radius: 0.2rem; + background-color: rgba(0, 0, 0, 0.26); + color: inherit; + font-size: 1.6rem; } + [dir="rtl"] .md-search__input { + padding-right: 4.4rem; } + .md-search__input + .md-search__icon { + color: inherit; } + .md-search__input::-webkit-input-placeholder { + color: rgba(255, 255, 255, 0.7); } + .md-search__input:-ms-input-placeholder { + color: rgba(255, 255, 255, 0.7); } + .md-search__input::-ms-input-placeholder { + color: rgba(255, 255, 255, 0.7); } + .md-search__input::placeholder { + color: rgba(255, 255, 255, 0.7); } + .md-search__input:hover { + background-color: rgba(255, 255, 255, 0.12); } + [data-md-toggle="search"]:checked ~ .md-header .md-search__input { + border-radius: 0.2rem 0.2rem 0 0; + background-color: white; + color: rgba(0, 0, 0, 0.87); + text-overflow: none; } + [data-md-toggle="search"]:checked ~ .md-header .md-search__input + .md-search__icon, [data-md-toggle="search"]:checked ~ .md-header .md-search__input::-webkit-input-placeholder { + color: rgba(0, 0, 0, 0.54); } + [data-md-toggle="search"]:checked ~ .md-header .md-search__input + .md-search__icon, [data-md-toggle="search"]:checked ~ .md-header .md-search__input:-ms-input-placeholder { + color: rgba(0, 0, 0, 0.54); } + [data-md-toggle="search"]:checked ~ .md-header .md-search__input + .md-search__icon, [data-md-toggle="search"]:checked ~ .md-header .md-search__input::-ms-input-placeholder { + color: rgba(0, 0, 0, 0.54); } + [data-md-toggle="search"]:checked ~ .md-header .md-search__input + .md-search__icon, [data-md-toggle="search"]:checked ~ .md-header .md-search__input::placeholder { + color: rgba(0, 0, 0, 0.54); } + .md-search__output { + top: 3.8rem; + transition: opacity 0.4s; + opacity: 0; } + [data-md-toggle="search"]:checked ~ .md-header .md-search__output { + box-shadow: 0 6px 10px 0 rgba(0, 0, 0, 0.14), 0 1px 18px 0 rgba(0, 0, 0, 0.12), 0 3px 5px -1px rgba(0, 0, 0, 0.4); + opacity: 1; } + .md-search__scrollwrap { + max-height: 0; } + [data-md-toggle="search"]:checked ~ .md-header .md-search__scrollwrap { + max-height: 75vh; } + .md-search__scrollwrap::-webkit-scrollbar { + width: 0.4rem; + height: 0.4rem; } + .md-search__scrollwrap::-webkit-scrollbar-thumb { + background-color: rgba(0, 0, 0, 0.26); } + .md-search__scrollwrap::-webkit-scrollbar-thumb:hover { + background-color: #1AD6F5; } + .md-search-result__meta { + padding-left: 4.4rem; } + [dir="rtl"] .md-search-result__meta { + padding-right: 4.4rem; + padding-left: initial; } + .md-search-result__article { + padding-left: 4.4rem; } + [dir="rtl"] .md-search-result__article { + padding-right: 4.4rem; + padding-left: 1.6rem; } + .md-sidebar--secondary { + display: block; + margin-left: 100%; + -webkit-transform: translate(-100%, 0); + transform: translate(-100%, 0); } + [dir="rtl"] .md-sidebar--secondary { + margin-right: 100%; + margin-left: initial; + -webkit-transform: translate(100%, 0); + transform: translate(100%, 0); } } + +@media only screen and (min-width: 76.25em) { + .md-content { + margin-left: 24.2rem; } + [dir="rtl"] .md-content { + margin-right: 24.2rem; } + .md-content__inner { + margin-right: 2.4rem; + margin-left: 2.4rem; } + .md-header-nav__button.md-icon--menu { + display: none; } + .md-nav[data-md-state="animate"] { + transition: max-height 0.25s cubic-bezier(0.86, 0, 0.07, 1); } + .md-nav__toggle ~ .md-nav { + max-height: 0; + overflow: hidden; } + .no-js .md-nav__toggle ~ .md-nav { + display: none; } + .md-nav__toggle:checked ~ .md-nav, .md-nav[data-md-state="expand"] { + max-height: 100%; } + .no-js .md-nav__toggle:checked ~ .md-nav, .no-js .md-nav[data-md-state="expand"] { + display: block; } + .md-nav__item--nested > .md-nav > .md-nav__title { + display: none; } + .md-nav__item--nested > .md-nav__link::after { + display: inline-block; + -webkit-transform-origin: 0.45em 0.45em; + transform-origin: 0.45em 0.45em; + -webkit-transform-style: preserve-3d; + transform-style: preserve-3d; + vertical-align: -0.125em; } + .js .md-nav__item--nested > .md-nav__link::after { + transition: -webkit-transform 0.4s; + transition: transform 0.4s; + transition: transform 0.4s, -webkit-transform 0.4s; } + .md-nav__item--nested .md-nav__toggle:checked ~ .md-nav__link::after { + -webkit-transform: rotateX(180deg); + transform: rotateX(180deg); } + [data-md-toggle="search"]:checked ~ .md-header .md-search__inner { + width: 68.8rem; } + .md-search__scrollwrap { + width: 68.8rem; } + .md-sidebar--secondary { + margin-left: 122rem; } + [dir="rtl"] .md-sidebar--secondary { + margin-right: 122rem; + margin-left: initial; } + .md-tabs ~ .md-main .md-nav--primary > .md-nav__list > .md-nav__item--nested { + font-size: 0; + visibility: hidden; } + .md-tabs--active ~ .md-main .md-nav--primary .md-nav__title { + display: block; + padding: 0; } + .md-tabs--active ~ .md-main .md-nav--primary .md-nav__title--site { + display: none; } + .no-js .md-tabs--active ~ .md-main .md-nav--primary .md-nav { + display: block; } + .md-tabs--active ~ .md-main .md-nav--primary > .md-nav__list > .md-nav__item { + font-size: 0; + visibility: hidden; } + .md-tabs--active ~ .md-main .md-nav--primary > .md-nav__list > .md-nav__item--nested { + display: none; + font-size: 1.4rem; + overflow: auto; + visibility: visible; } + .md-tabs--active ~ .md-main .md-nav--primary > .md-nav__list > .md-nav__item--nested > .md-nav__link { + display: none; } + .md-tabs--active ~ .md-main .md-nav--primary > .md-nav__list > .md-nav__item--active { + display: block; } + .md-tabs--active ~ .md-main .md-nav[data-md-level="1"] { + max-height: initial; + overflow: visible; } + .md-tabs--active ~ .md-main .md-nav[data-md-level="1"] > .md-nav__list > .md-nav__item { + padding-left: 0; } + .md-tabs--active ~ .md-main .md-nav[data-md-level="1"] .md-nav .md-nav__title { + display: none; } } + +@media only screen and (min-width: 45em) { + .md-footer-nav__link { + width: 50%; } + .md-footer-copyright { + max-width: 75%; + float: left; } + [dir="rtl"] .md-footer-copyright { + float: right; } + .md-footer-social { + padding: 1.2rem 0; + float: right; } + [dir="rtl"] .md-footer-social { + float: left; } } + +@media only screen and (max-width: 29.9375em) { + [data-md-toggle="search"]:checked ~ .md-header .md-search__overlay { + -webkit-transform: scale(45); + transform: scale(45); } } + +@media only screen and (min-width: 30em) and (max-width: 44.9375em) { + [data-md-toggle="search"]:checked ~ .md-header .md-search__overlay { + -webkit-transform: scale(60); + transform: scale(60); } } + +@media only screen and (min-width: 45em) and (max-width: 59.9375em) { + [data-md-toggle="search"]:checked ~ .md-header .md-search__overlay { + -webkit-transform: scale(75); + transform: scale(75); } } + +@media only screen and (min-width: 60em) and (max-width: 76.1875em) { + [data-md-toggle="search"]:checked ~ .md-header .md-search__inner { + width: 46.8rem; } + .md-search__scrollwrap { + width: 46.8rem; } + .md-search-result__teaser { + max-height: 5rem; + -webkit-line-clamp: 3; } } + +/*# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IiIsImZpbGUiOiJhc3NldHMvc3R5bGVzaGVldHMvYXBwbGljYXRpb24uZmJiN2YzYWYuY3NzIiwic291cmNlUm9vdCI6IiJ9*/ diff --git a/docs/material/base.html b/docs/material/base.html new file mode 100644 index 00000000..e03968b7 --- /dev/null +++ b/docs/material/base.html @@ -0,0 +1,212 @@ +{% import "partials/language.html" as lang with context %} +{% set feature = config.theme.feature %} +{% set palette = config.theme.palette %} +{% set font = config.theme.font %} + + + + {% block site_meta %} + + + + {% if page and page.meta and page.meta.description %} + + {% elif config.site_description %} + + {% endif %} + {% if page.canonical_url %} + + {% endif %} + {% if page and page.meta and page.meta.author %} + + {% elif config.site_author %} + + {% endif %} + {% for key in [ + "clipboard.copy", + "clipboard.copied", + "search.language", + "search.pipeline.stopwords", + "search.pipeline.trimmer", + "search.result.none", + "search.result.one", + "search.result.other", + "search.tokenizer" + ] %} + + {% endfor %} + + + {% endblock %} + {% block htmltitle %} + {% if page and page.meta and page.meta.title %} + {{ page.meta.title }} + {% elif page and page.title and not page.is_homepage %} + {{ page.title }} - {{ config.site_name }} + {% else %} + {{ config.site_name }} + {% endif %} + {% endblock %} + {% block styles %} + + {% if palette.primary or palette.accent %} + + {% endif %} + {% if palette.primary %} + {% import "partials/palette.html" as map %} + {% set primary = map.primary( + palette.primary | replace(" ", "-") | lower + ) %} + + {% endif %} + {% endblock %} + {% block libs %} + + {% endblock %} + {% block fonts %} + + {% if font != false %} + + + {% endif %} + {% endblock %} + + {% for path in extra_css %} + + {% endfor %} + {% block extrahead %}{% endblock %} + + {% if palette.primary or palette.accent %} + {% set primary = palette.primary | replace(" ", "-") | lower %} + {% set accent = palette.accent | replace(" ", "-") | lower %} + + {% else %} + + {% endif %} + + + {% set platform = config.extra.repo_icon or config.repo_url %} + {% if "github" in platform %} + {% include "assets/images/icons/github.f0b8504a.svg" %} + {% elif "gitlab" in platform %} + {% include "assets/images/icons/gitlab.6dd19c00.svg" %} + {% elif "bitbucket" in platform %} + {% include "assets/images/icons/bitbucket.1b09e088.svg" %} + {% endif %} + + + + + + {% if page.toc | first is defined %} + + {{ lang.t('skip.link.title') }} + + {% endif %} + {% block header %} + {% include "partials/header.html" %} + {% endblock %} +
+ {% block hero %} + {% if page and page.meta and page.meta.hero %} + {% include "partials/hero.html" with context %} + {% endif %} + {% endblock %} + {% if feature.tabs %} + {% include "partials/tabs.html" %} + {% endif %} +
+
+ {% block site_nav %} + {% if nav %} +
+
+
+ {% include "partials/nav.html" %} +
+
+
+ {% endif %} + {% if page.toc %} +
+
+
+ {% include "partials/toc.html" %} +
+
+
+ {% endif %} + {% endblock %} +
+
+ {% block content %} + {% if page.edit_url %} + + {% endif %} + {% if not "\x3ch1" in page.content %} +

{{ page.title | default(config.site_name, true)}}

+ {% endif %} + {{ page.content }} + {% block source %} + {% if page and page.meta and page.meta.source %} +

{{ lang.t("meta.source") }}

+ {% set repo = config.repo_url %} + {% if repo | last == "/" %} + {% set repo = repo[:-1] %} + {% endif %} + {% set path = page.meta.path | default([""]) %} + {% set file = page.meta.source %} + + {{ file }} + + {% endif %} + {% endblock %} + {% endblock %} + {% block disqus %} + {% include "partials/integrations/disqus.html" %} + {% endblock %} +
+
+
+
+ {% block footer %} + {% include "partials/footer.html" %} + {% endblock %} +
+ {% block scripts %} + + {% if lang.t("search.language") != "en" %} + {% set languages = lang.t("search.language").split(",") %} + {% if languages | length and languages[0] != "" %} + {% set path = base_url + "/assets/javascripts/lunr" %} + + {% for language in languages | map("trim") %} + {% if language != "en" %} + {% if language == "jp" %} + + {% endif %} + {% if language in ("da", "de", "du", "es", "fi", "fr", "hu", "it", "jp", "no", "pt", "ro", "ru", "sv", "tr") %} + + {% endif %} + {% endif %} + {% endfor %} + {% if languages | length > 1 %} + + {% endif %} + {% endif %} + {% endif %} + + {% for path in extra_javascript %} + + {% endfor %} + {% endblock %} + {% block analytics %} + {% if config.google_analytics %} + {% include "partials/integrations/analytics.html" %} + {% endif %} + {% endblock %} + + diff --git a/docs/material/main.html b/docs/material/main.html new file mode 100644 index 00000000..94d9808c --- /dev/null +++ b/docs/material/main.html @@ -0,0 +1 @@ +{% extends "base.html" %} diff --git a/docs/material/manifest.json b/docs/material/manifest.json new file mode 100644 index 00000000..016754a9 --- /dev/null +++ b/docs/material/manifest.json @@ -0,0 +1,9 @@ +{ + "assets/images/icons/bitbucket.svg": "assets/images/icons/bitbucket.1b09e088.svg", + "assets/images/icons/github.svg": "assets/images/icons/github.f0b8504a.svg", + "assets/images/icons/gitlab.svg": "assets/images/icons/gitlab.6dd19c00.svg", + "assets/javascripts/application.js": "assets/javascripts/application.878fdd8d.js", + "assets/javascripts/modernizr.js": "assets/javascripts/modernizr.1aa3b519.js", + "assets/stylesheets/application-palette.css": "assets/stylesheets/application-palette.22915126.css", + "assets/stylesheets/application.css": "assets/stylesheets/application.fbb7f3af.css" +} \ No newline at end of file diff --git a/docs/material/mkdocs_theme.yml b/docs/material/mkdocs_theme.yml new file mode 100644 index 00000000..2cd43ea5 --- /dev/null +++ b/docs/material/mkdocs_theme.yml @@ -0,0 +1,73 @@ +# Copyright (c) 2016-2018 Martin Donath + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + +# Language for theme localization +language: en + +# Text direction (can be ltr or rtl), default: ltr +direction: + +# Feature flags for functionality that alters behavior significantly, and thus +# may be a matter of taste +feature: + + # Another layer on top of the main navigation for larger screens in the form + # of tabs, especially useful for larger documentation projects + tabs: false + +# Sets the primary and accent color palettes as defined in the Material Design +# documentation - possible values can be looked up in the getting started guide +palette: + + # Primary color used for header, sidebar and links, default: indigo + primary: + + # Accent color for highlighting user interaction, default: indigo + accent: + +# Fonts used by Material, automatically loaded from Google Fonts - see the site +# for a list of available fonts +font: + + # Default font for text + text: Roboto + + # Fixed-width font for code listings + code: Roboto Mono + +# Favicon to be rendered +favicon: assets/images/favicon.png + +# The logo of the documentation shown in the header and navigation can either +# be a Material Icon ligature (see https://material.io/icons/) or an image URL +logo: + icon: "\uE80C" + +# Material includes the search in the header as a partial, not as a separate +# template, so it's correct that search.html is missing +include_search_page: false + +# Material doesn't use MkDocs search functionality but provides its own. For +# this reason, only the search index needs to be built +search_index_only: true + +# Static pages to build +static_templates: + - 404.html diff --git a/docs/material/partials/footer.html b/docs/material/partials/footer.html new file mode 100644 index 00000000..5ab451ef --- /dev/null +++ b/docs/material/partials/footer.html @@ -0,0 +1,53 @@ +{% import "partials/language.html" as lang with context %} + diff --git a/docs/material/partials/header.html b/docs/material/partials/header.html new file mode 100644 index 00000000..f7aeae68 --- /dev/null +++ b/docs/material/partials/header.html @@ -0,0 +1,51 @@ +
+ +
diff --git a/docs/material/partials/hero.html b/docs/material/partials/hero.html new file mode 100644 index 00000000..9f6d77e9 --- /dev/null +++ b/docs/material/partials/hero.html @@ -0,0 +1,10 @@ +{% set feature = config.theme.feature %} +{% set class = "md-hero" %} +{% if not feature.tabs %} + {% set class = "md-hero md-hero--expand" %} +{% endif %} +
+
+ {{ page.meta.hero }} +
+
diff --git a/docs/material/partials/integrations/analytics.html b/docs/material/partials/integrations/analytics.html new file mode 100644 index 00000000..a01c64d0 --- /dev/null +++ b/docs/material/partials/integrations/analytics.html @@ -0,0 +1 @@ + diff --git a/docs/material/partials/integrations/disqus.html b/docs/material/partials/integrations/disqus.html new file mode 100644 index 00000000..967ae634 --- /dev/null +++ b/docs/material/partials/integrations/disqus.html @@ -0,0 +1,21 @@ +{% set disqus = config.extra.disqus %} +{% if page and page.meta and page.meta.disqus is string %} + {% set disqus = page.meta.disqus %} +{% endif %} +{% if not page.is_homepage and disqus %} +

{{ lang.t("meta.comments") }}

+
+ +{% endif %} diff --git a/docs/material/partials/language.html b/docs/material/partials/language.html new file mode 100644 index 00000000..70736226 --- /dev/null +++ b/docs/material/partials/language.html @@ -0,0 +1,11 @@ +{% import "partials/language/" + config.theme.language + ".html" as lang %} +{% import "partials/language/en.html" as fallback %} +{% macro t(key) %}{{ { + "direction": config.theme.direction, + "search.language": ( + config.extra.search | default({}) + ).language, + "search.tokenizer": ( + config.extra.search | default({}) + ).tokenizer | default("", true), +}[key] or lang.t(key) or fallback.t(key) }}{% endmacro %} diff --git a/docs/material/partials/language/ar.html b/docs/material/partials/language/ar.html new file mode 100644 index 00000000..d8143423 --- /dev/null +++ b/docs/material/partials/language/ar.html @@ -0,0 +1,22 @@ +{% macro t(key) %}{{ { + "language": "ar", + "direction": "rtl", + "clipboard.copy": "نسخ إلى الحافظة", + "clipboard.copied": "تم النسخ الى الحافظة", + "edit.link.title": "عدل الصفحة", + "footer.previous": "السابقة", + "footer.next": "التالية", + "meta.comments": "التعليقات", + "meta.source": "المصدر", + "search.language": "", + "search.pipeline.stopwords": false, + "search.pipeline.trimmer": false, + "search.placeholder": "بحث", + "search.result.placeholder": "اكتب لبدء البحث", + "search.result.none": "لا توجد نتائج", + "search.result.one": "نتائج البحث مستند واحد", + "search.result.other": "نتائج البحث # مستندات", + "skip.link.title": "انتقل إلى المحتوى", + "source.link.title": "اذهب إلى المصدر", + "toc.title": "جدول المحتويات" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/ca.html b/docs/material/partials/language/ca.html new file mode 100644 index 00000000..aa7bccb9 --- /dev/null +++ b/docs/material/partials/language/ca.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "ca", + "clipboard.copy": "Còpia al porta-retalls", + "clipboard.copied": "Copiat al porta-retalls", + "edit.link.title": "Edita aquesta pàgina", + "footer.previous": "Anterior", + "footer.next": "Següent", + "meta.comments": "Comentaris", + "meta.source": "Codi font", + "search.language": "", + "search.placeholder": "Cerca", + "search.result.placeholder": "Escriu per a començar a cercar", + "search.result.none": "Cap document coincideix", + "search.result.one": "1 document coincident", + "search.result.other": "# documents coincidents", + "skip.link.title": "Salta el contingut", + "source.link.title": "Ves al repositori", + "toc.title": "Taula de continguts" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/cs.html b/docs/material/partials/language/cs.html new file mode 100644 index 00000000..73712b11 --- /dev/null +++ b/docs/material/partials/language/cs.html @@ -0,0 +1,20 @@ +{% macro t(key) %}{{ { + "language": "cs", + "clipboard.copy": "Kopírovat do schránky", + "clipboard.copied": "Zkopírováno do schránky", + "edit.link.title": "Upravit tuto stránku", + "footer.previous": "Předchozí", + "footer.next": "Další", + "meta.comments": "Komentáře", + "meta.source": "Zdroj", + "search.language": "ro", + "search.placeholder": "Hledat", + "search.result.placeholder": "Pište co se má vyhledat", + "search.result.none": "Nenalezeny žádné dokumenty", + "search.result.one": "Nalezený dokument: 1", + "search.result.other": "Nalezené dokumenty: #", + "search.tokenizer": "[\s\-]+", + "skip.link.title": "Přeskočit obsah", + "source.link.title": "Přejít do repozitáře", + "toc.title": "Obsah" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/da.html b/docs/material/partials/language/da.html new file mode 100644 index 00000000..8cf53e9b --- /dev/null +++ b/docs/material/partials/language/da.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "da", + "clipboard.copy": "Kopiér til udklipsholderen", + "clipboard.copied": "Kopieret til udklipsholderen", + "edit.link.title": "Redigér denne side", + "footer.previous": "Forrige", + "footer.next": "Næste", + "meta.comments": "Kommentarer", + "meta.source": "Kilde", + "search.language": "da", + "search.placeholder": "Søg", + "search.result.placeholder": "Indtask søgeord", + "search.result.none": "Ingen resultater fundet", + "search.result.one": "1 resultat", + "search.result.other": "# resultater", + "skip.link.title": "Gå til indholdet", + "source.link.title": "Åbn arkiv", + "toc.title": "Indholdsfortegnelse" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/de.html b/docs/material/partials/language/de.html new file mode 100644 index 00000000..fda18040 --- /dev/null +++ b/docs/material/partials/language/de.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "de", + "clipboard.copy": "In Zwischenablage kopieren", + "clipboard.copied": "In Zwischenablage kopiert", + "edit.link.title": "Seite editieren", + "footer.previous": "Zurück", + "footer.next": "Weiter", + "meta.comments": "Kommentare", + "meta.source": "Quellcode", + "search.language": "de", + "search.placeholder": "Suche", + "search.result.placeholder": "Suchbegriff eingeben", + "search.result.none": "Keine Suchergebnisse", + "search.result.one": "1 Suchergebnis", + "search.result.other": "# Suchergebnisse", + "skip.link.title": "Zum Inhalt", + "source.link.title": "Quellcode", + "toc.title": "Inhaltsverzeichnis" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/en.html b/docs/material/partials/language/en.html new file mode 100644 index 00000000..686d052c --- /dev/null +++ b/docs/material/partials/language/en.html @@ -0,0 +1,23 @@ +{% macro t(key) %}{{ { + "language": "en", + "direction": "ltr", + "clipboard.copy": "Copy to clipboard", + "clipboard.copied": "Copied to clipboard", + "edit.link.title": "Edit this page", + "footer.previous": "Previous", + "footer.next": "Next", + "meta.comments": "Comments", + "meta.source": "Source", + "search.language": "en", + "search.pipeline.stopwords": true, + "search.pipeline.trimmer": true, + "search.placeholder": "Search", + "search.result.placeholder": "Type to start searching", + "search.result.none": "No matching documents", + "search.result.one": "1 matching document", + "search.result.other": "# matching documents", + "search.tokenizer": "[\s\-]+", + "skip.link.title": "Skip to content", + "source.link.title": "Go to repository", + "toc.title": "Table of contents" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/es.html b/docs/material/partials/language/es.html new file mode 100644 index 00000000..ccac0097 --- /dev/null +++ b/docs/material/partials/language/es.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "es", + "clipboard.copy": "Copiar al portapapeles", + "clipboard.copied": "Copiado al portapapeles", + "edit.link.title": "Editar esta página", + "footer.previous": "Anterior", + "footer.next": "Siguiente", + "meta.comments": "Comentarios", + "meta.source": "Fuente", + "search.language": "es", + "search.placeholder": "Búsqueda", + "search.result.placeholder": "Teclee para comenzar búsqueda", + "search.result.none": "No se encontraron documentos", + "search.result.one": "1 documento encontrado", + "search.result.other": "# documentos encontrados", + "skip.link.title": "Saltar a contenido", + "source.link.title": "Ir al repositorio", + "toc.title": "Tabla de contenidos" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/fa.html b/docs/material/partials/language/fa.html new file mode 100644 index 00000000..54a7d286 --- /dev/null +++ b/docs/material/partials/language/fa.html @@ -0,0 +1,22 @@ +{% macro t(key) %}{{ { + "language": "fa", + "direction": "rtl", + "clipboard.copy": "کپی کردن", + "clipboard.copied": "کپی شد", + "edit.link.title": "این صفحه را ویرایش کنید", + "footer.previous": "قبلی", + "footer.next": "بعدی", + "meta.comments": "نظرات", + "meta.source": "منبع", + "search.language": "", + "search.pipeline.stopwords": false, + "search.pipeline.trimmer": false, + "search.placeholder": "جستجو", + "search.result.placeholder": "برای شروع جستجو تایپ کنید", + "search.result.none": "سندی یافت نشد", + "search.result.one": "1 سند یافت شد", + "search.result.other": "# سند یافت شد", + "skip.link.title": "پرش به محتویات", + "source.link.title": "رفتن به مخزن", + "toc.title": "فهرست موضوعات" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/fi.html b/docs/material/partials/language/fi.html new file mode 100644 index 00000000..a5e0d6da --- /dev/null +++ b/docs/material/partials/language/fi.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "fi", + "clipboard.copy": "Kopioi leikepöydälle", + "clipboard.copied": "Kopioitu leikepöydälle", + "edit.link.title": "Muokkaa tätä sivua", + "footer.previous": "Edellinen", + "footer.next": "Seuraava", + "meta.comments": "Kommentit", + "meta.source": "Lähdekodi", + "search.language": "fi", + "search.placeholder": "Hae", + "search.result.placeholder": "Kirjoita aloittaaksesi haun", + "search.result.none": "Ei täsmääviä dokumentteja", + "search.result.one": "1 täsmäävä dokumentti", + "search.result.other": "# täsmäävää dokumenttia", + "skip.link.title": "Hyppää sisältöön", + "source.link.title": "Mene repositoryyn", + "toc.title": "Sisällysluettelo" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/fr.html b/docs/material/partials/language/fr.html new file mode 100644 index 00000000..f6fa3164 --- /dev/null +++ b/docs/material/partials/language/fr.html @@ -0,0 +1,18 @@ +{% macro t(key) %}{{ { + "language": "fr", + "clipboard.copy": "Copier dans le presse-papier", + "clipboard.copied": "Copié dans le presse-papier", + "edit.link.title": "Editer cette page", + "footer.previous": "Précédent", + "footer.next": "Suivant", + "meta.comments": "Commentaires", + "meta.source": "Source", + "search.language": "fr", + "search.placeholder": "Rechercher", + "search.result.placeholder": "Taper pour démarrer la recherche", + "search.result.none": "Aucun document trouvé", + "search.result.one": "1 document trouvé", + "search.result.other": "# documents trouvés", + "source.link.title": "Aller au dépôt", + "toc.title": "Table des matières" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/gl.html b/docs/material/partials/language/gl.html new file mode 100644 index 00000000..14c4a868 --- /dev/null +++ b/docs/material/partials/language/gl.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "gl", + "clipboard.copy": "Copiar no cortapapeis", + "clipboard.copied": "Copiado no cortapapeis", + "edit.link.title": "Editar esta páxina", + "footer.previous": "Anterior", + "footer.next": "Seguinte", + "meta.comments": "Comentarios", + "meta.source": "Fonte", + "search.language": "es", + "search.placeholder": "Busca", + "search.result.placeholder": "Insira un termo", + "search.result.none": "Sen resultados", + "search.result.one": "1 resultado atopado", + "search.result.other": "# resultados atopados", + "skip.link.title": "Ir ao contido", + "source.link.title": "Ir ao repositorio", + "toc.title": "Táboa de contidos" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/he.html b/docs/material/partials/language/he.html new file mode 100644 index 00000000..1450982c --- /dev/null +++ b/docs/material/partials/language/he.html @@ -0,0 +1,22 @@ +{% macro t(key) %}{{ { + "language": "he", + "direction": "rtl", + "clipboard.copy": "העתק ללוח", + "clipboard.copied": "הועתק ללוח", + "edit.link.title": "ערוך דף זה", + "footer.previous": "קודם", + "footer.next": "הַבָּא", + "meta.comments": "הערות", + "meta.source": "מָקוֹר", + "search.language": "", + "search.pipeline.stopwords": false, + "search.pipeline.trimmer": false, + "search.placeholder": "לחפש", + "search.result.placeholder": "הקלד כדי להתחיל לחפש", + "search.result.none": "אין מסמכים תואמים", + "search.result.one": "1 מסמך תואם", + "search.result.other": "# מסמך תואם", + "skip.link.title": "דלג לתוכן", + "source.link.title": "עבור אל מאגר", + "toc.title": "תוכן העניינים" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/hi.html b/docs/material/partials/language/hi.html new file mode 100644 index 00000000..e3260cb4 --- /dev/null +++ b/docs/material/partials/language/hi.html @@ -0,0 +1,22 @@ +{% macro t(key) %}{{ { + "language": "hi", + "clipboard.copy": "क्लिपबोर्ड पर कॉपी करें", + "clipboard.copied": "क्लिपबोर्ड पर कॉपी कर दिया गया", + "edit.link.title": "इस पृष्ठ को संपादित करें", + "footer.previous": "पिछला", + "footer.next": "आगामी", + "meta.comments": "टिप्पणियाँ", + "meta.source": "स्रोत", + "search.language": "", + "search.pipeline.stopwords": false, + "search.pipeline.trimmer": false, + "search.placeholder": "खोज", + "search.result.placeholder": "खोज शुरू करने के लिए टाइप करें", + "search.result.none": "कोई मिलान डॉक्यूमेंट नहीं", + "search.result.one": "1 मिलान डॉक्यूमेंट", + "search.result.other": "# मिलान डाक्यूमेंट्स", + "search.tokenizer": "[\s\-]+", + "skip.link.title": "विषय पर बढ़ें", + "source.link.title": "रिपॉजिटरी पर जाएं", + "toc.title": "विषय - सूची" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/hu.html b/docs/material/partials/language/hu.html new file mode 100644 index 00000000..9395b5dc --- /dev/null +++ b/docs/material/partials/language/hu.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "hu", + "clipboard.copy": "Másolás vágólapra", + "clipboard.copied": "Vágólapra másolva", + "edit.link.title": "Oldal szerkesztése", + "footer.previous": "Előző", + "footer.next": "Következő", + "meta.comments": "Hozzászólások", + "meta.source": "Forrás", + "search.language": "hu", + "search.placeholder": "Keresés", + "search.result.placeholder": "Kereséshez írj ide valamit", + "search.result.none": "Nincs találat", + "search.result.one": "1 egyező dokumentum", + "search.result.other": "# egyező dokumentum", + "skip.link.title": "Kihagyás", + "source.link.title": "Főoldalra ugrás", + "toc.title": "Tartalomjegyzék" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/it.html b/docs/material/partials/language/it.html new file mode 100644 index 00000000..954be2d3 --- /dev/null +++ b/docs/material/partials/language/it.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "it", + "clipboard.copy": "Copia", + "clipboard.copied": "Copiato", + "edit.link.title": "Modifica", + "footer.previous": "Precedente", + "footer.next": "Prossimo", + "meta.comments": "Commenti", + "meta.source": "Sorgente", + "search.language": "it", + "search.placeholder": "Cerca", + "search.result.placeholder": "Scrivi per iniziare a cercare", + "search.result.none": "Nessun documento trovato", + "search.result.one": "1 documento trovato", + "search.result.other": "# documenti trovati", + "skip.link.title": "Vai al contenuto", + "source.link.title": "Apri repository", + "toc.title": "Indice" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/ja.html b/docs/material/partials/language/ja.html new file mode 100644 index 00000000..5e94d8b6 --- /dev/null +++ b/docs/material/partials/language/ja.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "ja", + "clipboard.copy": "クリップボードへコピー", + "clipboard.copied": "コピーしました", + "edit.link.title": "編集", + "footer.previous": "前", + "footer.next": "次", + "meta.comments": "コメント", + "meta.source": "ソース", + "search.language": "jp", + "search.placeholder": "検索", + "search.result.placeholder": "検索キーワードを入力してください", + "search.result.none": "何も見つかりませんでした", + "search.result.one": "1件見つかりました", + "search.result.other": "#件見つかりました", + "search.tokenizer": "[\s\- 、。,.]+", + "source.link.title": "リポジトリへ", + "toc.title": "目次" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/kr.html b/docs/material/partials/language/kr.html new file mode 100644 index 00000000..e7d88dd8 --- /dev/null +++ b/docs/material/partials/language/kr.html @@ -0,0 +1,18 @@ +{% macro t(key) %}{{ { + "language": "kr", + "clipboard.copy": "클립보드로 복사", + "clipboard.copied": "클립보드에 복사됨", + "edit.link.title": "이 페이지를 편집", + "footer.previous": "이전", + "footer.next": "다음", + "meta.comments": "댓글", + "meta.source": "출처", + "search.language": "jp", + "search.placeholder": "검색", + "search.result.placeholder": "검색어를 입력하세요", + "search.result.none": "검색어와 일치하는 문서가 없습니다", + "search.result.one": "1개의 일치하는 문서", + "search.result.other": "#개의 일치하는 문서", + "source.link.title": "저장소로 이동", + "toc.title": "목차" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/nl.html b/docs/material/partials/language/nl.html new file mode 100644 index 00000000..36be6dd1 --- /dev/null +++ b/docs/material/partials/language/nl.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "nl", + "clipboard.copy": "Kopiëren naar klembord", + "clipboard.copied": "Gekopieerd naar klembord", + "edit.link.title": "Wijzig deze pagina", + "footer.previous": "Vorige", + "footer.next": "Volgende", + "meta.comments": "Reacties", + "meta.source": "Bron", + "search.language": "du", + "search.placeholder": "Zoeken", + "search.result.placeholder": "Typ om te beginnen met zoeken", + "search.result.none": "Geen overeenkomende documenten", + "search.result.one": "1 overeenkomende document", + "search.result.other": "# overeenkomende documenten", + "skip.link.title": "Ga naar inhoud", + "source.link.title": "Ga naar repository", + "toc.title": "Inhoudstafel" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/no.html b/docs/material/partials/language/no.html new file mode 100644 index 00000000..8d3b3d12 --- /dev/null +++ b/docs/material/partials/language/no.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "no", + "clipboard.copy": "Kopier til utklippstavlen", + "clipboard.copied": "Kopiert til utklippstavlen", + "edit.link.title": "Rediger denne siden", + "footer.previous": "Forrige", + "footer.next": "Neste", + "meta.comments": "Kommentarer", + "meta.source": "Kilde", + "search.language": "no", + "search.placeholder": "Søk", + "search.result.placeholder": "Skriv søkeord", + "search.result.none": "Ingen treff", + "search.result.one": "1 treff", + "search.result.other": "# treff", + "skip.link.title": "Gå til innhold", + "source.link.title": "Gå til kilde", + "toc.title": "Innholdsfortegnelse" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/pl.html b/docs/material/partials/language/pl.html new file mode 100644 index 00000000..b158a129 --- /dev/null +++ b/docs/material/partials/language/pl.html @@ -0,0 +1,21 @@ +{% macro t(key) %}{{ { + "language": "pl", + "clipboard.copy": "Kopiuj do schowka", + "clipboard.copied": "Skopiowane", + "edit.link.title": "Edytuj tę stronę", + "footer.previous": "Poprzednia strona", + "footer.next": "Następna strona", + "meta.comments": "Komentarze", + "meta.source": "Kod źródłowy", + "search.language": "", + "search.pipeline.stopwords": false, + "search.pipeline.trimmer": false, + "search.placeholder": "Szukaj", + "search.result.placeholder": "Zacznij pisać, aby szukać", + "search.result.none": "Brak wyników wyszukiwania", + "search.result.one": "Wyniki wyszukiwania: 1", + "search.result.other": "Wyniki wyszukiwania: #", + "skip.link.title": "Przejdź do treści", + "source.link.title": "Idź do repozytorium", + "toc.title": "Spis treści" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/pt.html b/docs/material/partials/language/pt.html new file mode 100644 index 00000000..e4fbe7d1 --- /dev/null +++ b/docs/material/partials/language/pt.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "pt", + "clipboard.copy": "Copiar para área de transferência", + "clipboard.copied": "Copiado para área de transferência", + "edit.link.title": "Editar esta página", + "footer.previous": "Anterior", + "footer.next": "Próximo", + "meta.comments": "Comentários", + "meta.source": "Fonte", + "search.language": "pt", + "search.placeholder": "Buscar", + "search.result.placeholder": "Digite para iniciar a busca", + "search.result.none": "Nenhum resultado encontrado", + "search.result.one": "1 resultado encontrado", + "search.result.other": "# resultados encontrados", + "skip.link.title": "Ir para o conteúdo", + "source.link.title": "Ir ao repositório", + "toc.title": "Índice" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/ru.html b/docs/material/partials/language/ru.html new file mode 100644 index 00000000..424b22f6 --- /dev/null +++ b/docs/material/partials/language/ru.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "ru", + "clipboard.copy": "Копировать в буфер", + "clipboard.copied": "Скопировано в буфер", + "edit.link.title": "Редактировать страницу", + "footer.previous": "Назад", + "footer.next": "Вперед", + "meta.comments": "Комментарии", + "meta.source": "Исходный код", + "search.language": "ru", + "search.placeholder": "Поиск", + "search.result.placeholder": "Начните печатать для поиска", + "search.result.none": "Совпадений не найдено", + "search.result.one": "Найдено 1 совпадение", + "search.result.other": "Найдено # совпадений", + "skip.link.title": "Перейти к содержанию", + "source.link.title": "Перейти к репозиторию", + "toc.title": "Содержание" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/sk.html b/docs/material/partials/language/sk.html new file mode 100644 index 00000000..97bdc2c4 --- /dev/null +++ b/docs/material/partials/language/sk.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "sk", + "clipboard.copy": "Kopírovať do schránky", + "clipboard.copied": "Skopírované do schránky", + "edit.link.title": "Upraviť túto stránku", + "footer.previous": "Späť", + "footer.next": "Ďalej", + "meta.comments": "Komentáre", + "meta.source": "Zdroj", + "search.language": "sk", + "search.placeholder": "Hľadať", + "search.result.placeholder": "Pre vyhľadávanie začni písať", + "search.result.none": "Žiadne vyhovujúce dokumenty", + "search.result.one": "Vyhovujúci dokument: 1", + "search.result.other": "Vyhovujúce dokumenty: #", + "skip.link.title": "Preskočiť na obsah", + "source.link.title": "Zobraziť repozitár", + "toc.title": "Obsah" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/sv.html b/docs/material/partials/language/sv.html new file mode 100644 index 00000000..5d22b98b --- /dev/null +++ b/docs/material/partials/language/sv.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "sv", + "clipboard.copy": "Kopiera till urklipp", + "clipboard.copied": "Kopierat till urklipp", + "edit.link.title": "Redigera sidan", + "footer.previous": "Föregående", + "footer.next": "Nästa", + "meta.comments": "Kommentarer", + "meta.source": "Källa", + "search.language": "sv", + "search.placeholder": "Sök", + "search.result.placeholder": "Skriv sökord", + "search.result.none": "Inga sökresultat", + "search.result.one": "1 sökresultat", + "search.result.other": "# sökresultat", + "skip.link.title": "Gå till innehållet", + "source.link.title": "Gå till datakatalog", + "toc.title": "Innehållsförteckning" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/tr.html b/docs/material/partials/language/tr.html new file mode 100644 index 00000000..7e0b1a37 --- /dev/null +++ b/docs/material/partials/language/tr.html @@ -0,0 +1,18 @@ +{% macro t(key) %}{{ { + "language": "tr", + "clipboard.copy": "Kopyala", + "clipboard.copied": "Kopyalandı", + "edit.link.title": "Düzenle", + "footer.previous": "Önceki", + "footer.next": "Sonraki", + "meta.comments": "Yorumlar", + "meta.source": "Kaynak", + "search.language": "tr", + "search.placeholder": "Ara", + "search.result.placeholder": "Aramaya başlamak için yazın", + "search.result.none": "Eşleşen doküman bulunamadı", + "search.result.one": "1 doküman bulundu", + "search.result.other": "# doküman bulundu", + "source.link.title": "Depoya git", + "toc.title": "İçindekiler" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/uk.html b/docs/material/partials/language/uk.html new file mode 100644 index 00000000..78f4f4ec --- /dev/null +++ b/docs/material/partials/language/uk.html @@ -0,0 +1,19 @@ +{% macro t(key) %}{{ { + "language": "uk", + "clipboard.copy": "Скопіювати в буфер", + "clipboard.copied": "Скопійовано в буфер", + "edit.link.title": "Редагувати сторінку", + "footer.previous": "Назад", + "footer.next": "Вперед", + "meta.comments": "Коментарі", + "meta.source": "Вихідний код", + "search.language": "ru", + "search.placeholder": "Пошук", + "search.result.placeholder": "Розпочніть писати для пошуку", + "search.result.none": "Збігів не знайдено", + "search.result.one": "Знайдено 1 збіг", + "search.result.other": "Знайдено # збігів", + "skip.link.title": "Перейти до змісту", + "source.link.title": "Перейти до репозиторію", + "toc.title": "Зміст" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/vi.html b/docs/material/partials/language/vi.html new file mode 100644 index 00000000..c9f3082b --- /dev/null +++ b/docs/material/partials/language/vi.html @@ -0,0 +1,18 @@ +{% macro t(key) %}{{ { + "language": "vi", + "clipboard.copy": "Sao chép vào bộ nhớ", + "clipboard.copied": "Sao chép xong", + "edit.link.title": "Chỉnh sửa", + "footer.previous": "Trước", + "footer.next": "Sau", + "meta.comments": "Bình luận", + "meta.source": "Mã nguồn", + "search.placeholder": "Tìm kiếm", + "search.result.placeholder": "Nhập để bắt đầu tìm kiếm", + "search.result.none": "Không tìm thấy tài liệu liên quan", + "search.result.one": "1 tài liệu liên quan", + "search.result.other": "# tài liệu liên quan", + "skip.link.title": "Vào thẳng nội dung", + "source.link.title": "Đến kho lưu trữ mã nguồn", + "toc.title": "Mục lục" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/zh-Hant.html b/docs/material/partials/language/zh-Hant.html new file mode 100644 index 00000000..49948333 --- /dev/null +++ b/docs/material/partials/language/zh-Hant.html @@ -0,0 +1,20 @@ +{% macro t(key) %}{{ { + "language": "zh-Hant", + "clipboard.copy": "拷貝", + "clipboard.copied": "已拷貝", + "edit.link.title": "編輯此頁", + "footer.previous": "上一頁", + "footer.next": "下一頁", + "meta.comments": "評論", + "meta.source": "來源", + "search.language": "jp", + "search.placeholder": "搜尋", + "search.result.placeholder": "鍵入以開始檢索", + "search.result.none": "沒有找到符合條件的結果", + "search.result.one": "找到 1 个符合條件的結果", + "search.result.other": "# 個符合條件的結果", + "search.tokenizer": "[\,\。]+", + "skip.link.title": "跳轉至", + "source.link.title": "前往 Github 倉庫", + "toc.title": "目錄" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/language/zh.html b/docs/material/partials/language/zh.html new file mode 100644 index 00000000..951663d9 --- /dev/null +++ b/docs/material/partials/language/zh.html @@ -0,0 +1,20 @@ +{% macro t(key) %}{{ { + "language": "zh", + "clipboard.copy": "复制", + "clipboard.copied": "已复制", + "edit.link.title": "编辑此页", + "footer.previous": "后退", + "footer.next": "前进", + "meta.comments": "评论", + "meta.source": "来源", + "search.language": "jp", + "search.placeholder": "搜索", + "search.result.placeholder": "键入以开始搜索", + "search.result.none": "没有找到符合条件的结果", + "search.result.one": "找到 1 个符合条件的结果", + "search.result.other": "# 个符合条件的结果", + "search.tokenizer": "[\,\。]+", + "skip.link.title": "跳转至", + "source.link.title": "前往 Github 仓库", + "toc.title": "目录" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/nav-item.html b/docs/material/partials/nav-item.html new file mode 100644 index 00000000..60d27fe8 --- /dev/null +++ b/docs/material/partials/nav-item.html @@ -0,0 +1,54 @@ +{% set class = "md-nav__item" %} +{% if nav_item.active %} + {% set class = "md-nav__item md-nav__item--active" %} +{% endif %} +{% if nav_item.children %} +
  • + {% if nav_item.active %} + + {% else %} + + {% endif %} + + +
  • +{% elif nav_item == page %} +
  • + {% set toc_ = page.toc %} + + {% if toc_ | first is defined and "\x3ch1 id=" in page.content %} + {% set toc_ = (toc_ | first).children %} + {% endif %} + {% if toc_ | first is defined %} + + {% endif %} + + {{ nav_item.title }} + + {% if toc_ | first is defined %} + {% include "partials/toc.html" %} + {% endif %} +
  • +{% else %} +
  • + + {{ nav_item.title }} + +
  • +{% endif %} diff --git a/docs/material/partials/nav.html b/docs/material/partials/nav.html new file mode 100644 index 00000000..aa171a88 --- /dev/null +++ b/docs/material/partials/nav.html @@ -0,0 +1,26 @@ + diff --git a/docs/material/partials/palette.html b/docs/material/partials/palette.html new file mode 100644 index 00000000..a1bc0bc3 --- /dev/null +++ b/docs/material/partials/palette.html @@ -0,0 +1,39 @@ +{% macro primary(key) %}{{ { + "red": "#ef5350", + "pink": "#e91e63", + "purple": "#ab47bc", + "deep-purple": "#7e57c2", + "indigo": "#3f51b5", + "blue": "#2196f3", + "light-blue": "#03a9f4", + "cyan": "#00bcd4", + "teal": "#009688", + "green": "#4caf50", + "light-green": "#7cb342", + "lime": "#c0ca33", + "yellow": "#f9a825", + "amber": "#ffa000", + "orange": "#fb8c00", + "deep-orange": "#ff7043", + "brown": "#795548", + "grey": "#757575", + "blue-grey": "#546e7a" +}[key] }}{% endmacro %} +{% macro accent(key) %}{{ { + "red": "#ff1744", + "pink": "#f50057", + "purple": "#e040fb", + "deep-purple": "#7c4dff", + "indigo": "#536dfe", + "blue": "#448aff", + "light-blue": "#0091ea", + "cyan": "#00b8d4", + "teal": "#00bfa5", + "green": "#00c853", + "light-green": "#64dd17", + "lime": "#aeea00", + "yellow": "#ffd600", + "amber": "#ffab00", + "orange": "#ff9100", + "deep-orange": "#ff6e40" +}[key] }}{% endmacro %} diff --git a/docs/material/partials/search.html b/docs/material/partials/search.html new file mode 100644 index 00000000..25ecc48a --- /dev/null +++ b/docs/material/partials/search.html @@ -0,0 +1,23 @@ +{% import "partials/language.html" as lang with context %} + diff --git a/docs/material/partials/social.html b/docs/material/partials/social.html new file mode 100644 index 00000000..430ab890 --- /dev/null +++ b/docs/material/partials/social.html @@ -0,0 +1,8 @@ +{% if config.extra.social %} + +{% endif %} diff --git a/docs/material/partials/source.html b/docs/material/partials/source.html new file mode 100644 index 00000000..a0417b88 --- /dev/null +++ b/docs/material/partials/source.html @@ -0,0 +1,25 @@ +{% import "partials/language.html" as lang with context %} +{% set platform = config.extra.repo_icon or config.repo_url %} +{% if "github" in platform %} + {% set repo_type = "github" %} +{% elif "gitlab" in platform %} + {% set repo_type = "gitlab" %} +{% elif "bitbucket" in platform %} + {% set repo_type = "bitbucket" %} +{% else %} + {% set repo_type = "" %} +{% endif %} +{% block repo %} + + {% if repo_type %} +
    + + + +
    + {% endif %} +
    + {{ config.repo_name }} +
    +
    +{% endblock %} diff --git a/docs/material/partials/tabs-item.html b/docs/material/partials/tabs-item.html new file mode 100644 index 00000000..686b5a59 --- /dev/null +++ b/docs/material/partials/tabs-item.html @@ -0,0 +1,31 @@ +{% if nav_item.is_homepage %} +
  • + {% if not page.ancestors | length and nav | selectattr("url", page.url) %} + + {{ nav_item.title }} + + {% else %} + + {{ nav_item.title }} + + {% endif %} +
  • +{% elif nav_item.children and nav_item.children | length > 0 %} + {% set title = title | default(nav_item.title) %} + {% if (nav_item.children | first).children | length > 0 %} + {% set nav_item = nav_item.children | first %} + {% include "partials/tabs-item.html" %} + {% else %} +
  • + {% if nav_item.active %} + + {{ title }} + + {% else %} + + {{ title }} + + {% endif %} +
  • + {% endif %} +{% endif %} diff --git a/docs/material/partials/tabs.html b/docs/material/partials/tabs.html new file mode 100644 index 00000000..e040436b --- /dev/null +++ b/docs/material/partials/tabs.html @@ -0,0 +1,13 @@ +{% set class = "md-tabs" %} +{% if page.ancestors | length > 0 %} + {% set class = "md-tabs md-tabs--active" %} +{% endif %} + diff --git a/docs/material/partials/toc-item.html b/docs/material/partials/toc-item.html new file mode 100644 index 00000000..3b4f4d76 --- /dev/null +++ b/docs/material/partials/toc-item.html @@ -0,0 +1,14 @@ +
  • + + {{ toc_item.title }} + + {% if toc_item.children %} + + {% endif %} +
  • diff --git a/docs/material/partials/toc.html b/docs/material/partials/toc.html new file mode 100644 index 00000000..b4bf3572 --- /dev/null +++ b/docs/material/partials/toc.html @@ -0,0 +1,33 @@ +{% import "partials/language.html" as lang with context %} + diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml new file mode 100644 index 00000000..e7caaffc --- /dev/null +++ b/docs/mkdocs.yml @@ -0,0 +1,46 @@ +site_name: Delphix Virtualization SDK 2.0.0 +theme: + name: material + custom_dir: 'material/' + palette: + primary: + accent: + logo: 'images/delphix-logo-white.png' + favicon: 'images/logo.png' + font: + text: Helvetica Neue + code: Ubuntu Mono + +copyright: Copyright © 2019 Delphix Corp. + +google_analytics: + - 'UA-35429885-3' + - 'auto' + +extra: + social: + - type: sitemap + link: https://www.delphix.com/ + - type: facebook + link: https://www.facebook.com/DelphixCorp/ + - type: twitter + link: https://twitter.com/delphix + - type: linkedin + link: https://www.linkedin.com/company/delphix + - type: github + link: https://github.com/delphix + +extra_css: + - 'stylesheets/extra.css' + +markdown_extensions: + - toc: + permalink: true + - admonition + - codehilite: + guess_lang: false + - meta + +plugins: + - search + - awesome-pages diff --git a/docs/readme.md b/docs/readme.md new file mode 100644 index 00000000..6049ebc3 --- /dev/null +++ b/docs/readme.md @@ -0,0 +1,86 @@ +# Virtualization SDK Repository + +This is the Markdown-based documentation for the Virtualization SDK. + +## Local Testing +Create a `virtualenv` using Python 3 and run `pipenv run mkdocs serve` + +``` +$ virtualenv -p /usr/local/bin/python3 . +Running virtualenv with interpreter /usr/local/bin/python3 +Using base prefix '/usr/local/Cellar/python/3.7.2_1/Frameworks/Python.framework/Versions/3.7' +New python executable in /Users/asarin/Documents/repos/virt-sdk-docs/env/bin/python3.7 +Also creating executable in /Users/asarin/Documents/repos/virt-sdk-docs/env/bin/python +Installing setuptools, pip, wheel... +done. + +$ source bin/activate + +$ pipenv run mkdocs serve +INFO - Building documentation... +INFO - Cleaning site directory +[I 200424 15:54:06 server:292] Serving on http://127.0.0.1:8000 +[I 200424 15:54:06 handlers:59] Start watching changes +[I 200424 15:54:06 handlers:61] Start detecting changes +``` + +The docs would be served up at [http://127.0.0.1:8000](http://127.0.0.1:8000). + +### Debugging + +#### mkdocs not found +``` +$ pipenv run mkdocs serve +Error: the command mkdocs could not be found within PATH or Pipfile's [scripts]. +``` +Run `pipenv install` to make sure all the dependencies are installed from the Pipfile. + +#### setuptools incompatibility +``` +$ pipenv install +Installing dependencies from Pipfile.lock (65135d)… +An error occurred while installing markupsafe==1.0 --hash=sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665! Will try again. + 🐍 ▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉ 14/14 — 00:00:10 +Installing initially failed dependencies… +[pipenv.exceptions.InstallError]: File "/usr/local/lib/python3.7/site-packages/pipenv/core.py", line 1874, in do_install +[pipenv.exceptions.InstallError]: keep_outdated=keep_outdated +[pipenv.exceptions.InstallError]: File "/usr/local/lib/python3.7/site-packages/pipenv/core.py", line 1253, in do_init +[pipenv.exceptions.InstallError]: pypi_mirror=pypi_mirror, +[pipenv.exceptions.InstallError]: File "/usr/local/lib/python3.7/site-packages/pipenv/core.py", line 859, in do_install_dependencies +[pipenv.exceptions.InstallError]: retry_list, procs, failed_deps_queue, requirements_dir, **install_kwargs +[pipenv.exceptions.InstallError]: File "/usr/local/lib/python3.7/site-packages/pipenv/core.py", line 763, in batch_install +[pipenv.exceptions.InstallError]: _cleanup_procs(procs, not blocking, failed_deps_queue, retry=retry) +[pipenv.exceptions.InstallError]: File "/usr/local/lib/python3.7/site-packages/pipenv/core.py", line 681, in _cleanup_procs +[pipenv.exceptions.InstallError]: raise exceptions.InstallError(c.dep.name, extra=err_lines) +[pipenv.exceptions.InstallError]: ['Collecting markupsafe==1.0', ' Using cached MarkupSafe-1.0.tar.gz (14 kB)'] +[pipenv.exceptions.InstallError]: ['ERROR: Command errored out with exit status 1:', ' command: /Users/asarin/Documents/repos/github/virtualization-sdk/docs/env/bin/python3.7 -c \'import sys, setuptools, tokenize; sys.argv[0] = \'"\'"\'/private/var/folders/fg/d4zl41bs6wv97zpzq9gckxsm0000gn/T/pip-install-txi66ppe/markupsafe/setup.py\'"\'"\'; __file__=\'"\'"\'/private/var/folders/fg/d4zl41bs6wv97zpzq9gckxsm0000gn/T/pip-install-txi66ppe/markupsafe/setup.py\'"\'"\';f=getattr(tokenize, \'"\'"\'open\'"\'"\', open)(__file__);code=f.read().replace(\'"\'"\'\\r\\n\'"\'"\', \'"\'"\'\\n\'"\'"\');f.close();exec(compile(code, __file__, \'"\'"\'exec\'"\'"\'))\' egg_info --egg-base /private/var/folders/fg/d4zl41bs6wv97zpzq9gckxsm0000gn/T/pip-pip-egg-info-cl5ykzbs', ' cwd: /private/var/folders/fg/d4zl41bs6wv97zpzq9gckxsm0000gn/T/pip-install-txi66ppe/markupsafe/', ' Complete output (5 lines):', ' Traceback (most recent call last):', ' File "", line 1, in ', ' File "/private/var/folders/fg/d4zl41bs6wv97zpzq9gckxsm0000gn/T/pip-install-txi66ppe/markupsafe/setup.py", line 6, in ', ' from setuptools import setup, Extension, Feature', " ImportError: cannot import name 'Feature' from 'setuptools' (/Users/asarin/Documents/repos/github/virtualization-sdk/docs/env/lib/python3.7/site-packages/setuptools/__init__.py)", ' ----------------------------------------', 'ERROR: Command errored out with exit status 1: python setup.py egg_info Check the logs for full command output.'] +ERROR: ERROR: Package installation failed... +``` + +Install `setuptools==45` to get around a deprecated API in version 46. + +``` +$ pip install setuptools==45 +Collecting setuptools==45 + Downloading setuptools-45.0.0-py2.py3-none-any.whl (583 kB) + |████████████████████████████████| 583 kB 2.7 MB/s +Installing collected packages: setuptools + Attempting uninstall: setuptools + Found existing installation: setuptools 46.1.3 + Uninstalling setuptools-46.1.3: + Successfully uninstalled setuptools-46.1.3 +Successfully installed setuptools-45.0.0 +(env) ~/Documents/repos/github/virtualization-sdk/docs$ pipenv install +Installing dependencies from Pipfile.lock (65135d)… + 🐍 ▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉ 14/14 — 00:00:03 +``` + +## Live Testing and Reviews +The command `git docsdev-review` will handle publishing reviews, and putting your changes on a live docs server. For example, you can clone the `docsdev-server` image on DCOA, and then run `git docsdev-review -m `. This will: + +- Push your doc changes to your VM +- Give you a link to the docdev server so you can test your changes live in a browser +- Publish a review + +## Workflow diagrams +We create workflow diagrams using a tool called `draw.io` which allows us to import/export diagrams in html format. If you want to add a diagram or edit an existing one, simply create or import the html file in `docs/References/html` into `draw.io` and make your desired changes. When you are done, select your diagram and export it as a png file. You can think of the html files as source code, and the png files as build artifacts. After this step, you will be prompted to crop what was selected. You'll want this box checked to trim the whitespace around the diagram. After the diagrams are exported, check in the updated html file to `docs/References/html` and png file to `docs/References/images`. diff --git a/dvp/MANIFEST.in b/dvp/MANIFEST.in index eb9b6989..0479bae9 100644 --- a/dvp/MANIFEST.in +++ b/dvp/MANIFEST.in @@ -2,4 +2,5 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -include LICENSE \ No newline at end of file +include LICENSE +include src/main/python/dlpx/virtualization/VERSION \ No newline at end of file diff --git a/dvp/Pipfile.lock b/dvp/Pipfile.lock deleted file mode 100644 index ef316abe..00000000 --- a/dvp/Pipfile.lock +++ /dev/null @@ -1,172 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "7fe2a4e73ec8db7857c00cac846ad4c410257958a99eb13527aaa0716064ba3e" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "name": "delphix", - "url": "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/", - "verifySsl": true - } - ] - }, - "default": { - "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.0.0-internal-020.tar.gz", - "version": "== 1.0.0-internal-020" - }, - "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.0.0-internal-020.tar.gz", - "version": "== 1.0.0-internal-020" - }, - "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.0.0-internal-020.tar.gz", - "version": "== 1.0.0-internal-020" - }, - "dvp-tools": { - "path": "../tools/build/python-dist/dvp-tools-1.0.0-internal-020.tar.gz", - "version": "== 1.0.0-internal-020" - } - }, - "develop": { - "atomicwrites": { - "hashes": [ - "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", - "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" - ], - "version": "==1.3.0" - }, - "attrs": { - "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" - ], - "version": "==19.1.0" - }, - "configparser": { - "hashes": [ - "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", - "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" - ], - "markers": "python_version < '3'", - "version": "==3.7.4" - }, - "contextlib2": { - "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" - ], - "markers": "python_version < '3'", - "version": "==0.5.5" - }, - "funcsigs": { - "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - ], - "markers": "python_version < '3.0'", - "version": "==1.0.2" - }, - "importlib-metadata": { - "hashes": [ - "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7", - "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db" - ], - "version": "==0.18" - }, - "more-itertools": { - "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" - ], - "markers": "python_version <= '2.7'", - "version": "==5.0.0" - }, - "packaging": { - "hashes": [ - "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", - "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3" - ], - "version": "==19.0" - }, - "pathlib2": { - "hashes": [ - "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e", - "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8" - ], - "markers": "python_version == '3.4.*' or python_version < '3'", - "version": "==2.3.4" - }, - "pluggy": { - "hashes": [ - "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc", - "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c" - ], - "version": "==0.12.0" - }, - "py": { - "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" - ], - "version": "==1.8.0" - }, - "pyparsing": { - "hashes": [ - "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a", - "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03" - ], - "version": "==2.4.0" - }, - "pytest": { - "hashes": [ - "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae", - "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6" - ], - "index": "delphix", - "version": "==4.6.4" - }, - "scandir": { - "hashes": [ - "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", - "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", - "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", - "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", - "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", - "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", - "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", - "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", - "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", - "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", - "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" - ], - "markers": "python_version < '3.5'", - "version": "==1.10.0" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "wcwidth": { - "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" - ], - "version": "==0.1.7" - }, - "zipp": { - "hashes": [ - "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a", - "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec" - ], - "version": "==0.5.2" - } - } -} diff --git a/dvp/build.gradle b/dvp/build.gradle deleted file mode 100644 index 6810040e..00000000 --- a/dvp/build.gradle +++ /dev/null @@ -1,76 +0,0 @@ -/** - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -plugins { - id "delphix.python" -} - -artifacts { - python sdist.distFile -} - -dependencies { - // Necessary to compile generated python protocol buffer libraries. - python project(path: ":common", configuration: "python") - python project(path: ":platform", configuration: "python") - python project(path: ":libs", configuration: "python") - python project(path: ":tools", configuration: "python") -} - -dlpxPython { - sources { - delphix { - url = "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/" - } - } - - dist { - name = "dvp" - } - - packages { - "dvp-common" { - version = "== $project.version" - path = file(tasks.getByPath(":common:sdist").getDistFile().toString()) - } - - "dvp-platform" { - version = "== $project.version" - path = file(tasks.getByPath(":platform:sdist").getDistFile().toString()) - } - - "dvp-libs" { - version = "== $project.version" - path = file(tasks.getByPath(":libs:sdist").getDistFile().toString()) - } - - "dvp-tools" { - version = "== $project.version" - path = file(tasks.getByPath(":tools:sdist").getDistFile().toString()) - } - } - - supportedPythons { - "python2.7" {} - } -} - -/* - * As part of running a packages 'setup.py' file, setuptools creates 'egg-info' directories that contain information - * about the build distribution. These can sometimes cause issues. We should probably build Python distributions in - * 'build' so these would be created there, however they still could be created in the 'src' directory if someone runs - * 'setup.py' manually. This is often done during development to install the package for testing. - */ -task removeEggInfo(type: Delete) { - delete "${projectDir}/src/main/python/dvp.egg-info" -} - -task wheel(type: SetupPyTask) { - setupPyCommand "bdist_wheel" - distFile String.format("%s-%s-%s-%s-%s.whl", dist.name.get().replace("-", "_"), "$project.version".replace("-", "_"), "py2", "none", "any") - - dependsOn makeSetupPy -} - -clean.dependsOn('removeEggInfo') diff --git a/dvp/lock.dev-requirements.txt b/dvp/lock.dev-requirements.txt deleted file mode 100644 index ccac1fd9..00000000 --- a/dvp/lock.dev-requirements.txt +++ /dev/null @@ -1,18 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -atomicwrites==1.3.0 -attrs==19.1.0 -configparser==3.7.4 ; python_version < '3' -contextlib2==0.5.5 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' -importlib-metadata==0.18 -more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.0 -pathlib2==2.3.4 ; python_version == '3.4.*' or python_version < '3' -pluggy==0.12.0 -py==1.8.0 -pyparsing==2.4.0 -pytest==4.6.4 -scandir==1.10.0 ; python_version < '3.5' -six==1.12.0 -wcwidth==0.1.7 -zipp==0.5.2 diff --git a/dvp/lock.requirements.txt b/dvp/lock.requirements.txt deleted file mode 100644 index 11cc7491..00000000 --- a/dvp/lock.requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.0.0-internal-020.tar.gz -./../libs/build/python-dist/dvp-libs-1.0.0-internal-020.tar.gz -./../platform/build/python-dist/dvp-platform-1.0.0-internal-020.tar.gz -./../tools/build/python-dist/dvp-tools-1.0.0-internal-020.tar.gz diff --git a/dvp/requirements.txt b/dvp/requirements.txt new file mode 100644 index 00000000..e584d23d --- /dev/null +++ b/dvp/requirements.txt @@ -0,0 +1,15 @@ +bump2version==0.5.11 +contextlib2==0.6.0.post1 ; python_version < '3' +funcsigs==1.0.2 ; python_version < '3.0' +importlib-metadata==1.3.0 ; python_version < '3.8' +more-itertools==5.0.0 ; python_version <= '2.7' +packaging==20.0 +pathlib2==2.3.5 ; python_version < '3' +pluggy==0.13.1 +py==1.8.1 +pyparsing==2.4.6 +pytest==4.6.9 +scandir==1.10.0 ; python_version < '3.5' +six==1.13.0 +wcwidth==0.1.8 +zipp==0.6.0 diff --git a/dvp/setup.py b/dvp/setup.py new file mode 100644 index 00000000..b530d493 --- /dev/null +++ b/dvp/setup.py @@ -0,0 +1,21 @@ +import os +import setuptools + +PYTHON_SRC = 'src/main/python' + +with open(os.path.join(PYTHON_SRC, 'dlpx/virtualization/VERSION')) as version_file: + version = version_file.read().strip() + +install_requires = [ + "dvp-common == {}".format(version), + "dvp-libs == {}".format(version), + "dvp-platform == {}".format(version), + "dvp-tools == {}".format(version) +] + +setuptools.setup(name='dvp', + version=version, + install_requires=install_requires, + package_dir={'': PYTHON_SRC}, + packages=setuptools.find_packages(PYTHON_SRC), +) diff --git a/dvp/src/main/python/dlpx/virtualization/VERSION b/dvp/src/main/python/dlpx/virtualization/VERSION new file mode 100644 index 00000000..359a5b95 --- /dev/null +++ b/dvp/src/main/python/dlpx/virtualization/VERSION @@ -0,0 +1 @@ +2.0.0 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 28861d27..00000000 Binary files a/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index a24381c6..00000000 --- a/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,10 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -distributionSha256Sum=7506638a380092a0406364c79d6c87d03d23017fc25a5770379d1ce23c3fcd4d -distributionUrl=http\://artifactory.delphix.com/artifactory/gradle-distributions/gradle-5.1-bin.zip -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew deleted file mode 100755 index cccdd3d5..00000000 --- a/gradlew +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env sh - -############################################################################## -## -## Gradle start up script for UN*X -## -############################################################################## - -# Attempt to set APP_HOME -# Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi -done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null - -APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" - -warn () { - echo "$*" -} - -die () { - echo - echo "$*" - echo - exit 1 -} - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; -esac - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" - else - JAVACMD="$JAVA_HOME/bin/java" - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD="java" - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." -fi - -# Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi -fi - -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi - -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi - # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" - fi - i=$((i+1)) - done - case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac -fi - -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=$(save "$@") - -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" - -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi - -exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat deleted file mode 100644 index e95643d6..00000000 --- a/gradlew.bat +++ /dev/null @@ -1,84 +0,0 @@ -@if "%DEBUG%" == "" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto init - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% - -:end -@rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega diff --git a/libs/.gitignore b/libs/.gitignore deleted file mode 100644 index 74641651..00000000 --- a/libs/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -# Ignore generated Java files -src/main/java/ diff --git a/libs/MANIFEST.in b/libs/MANIFEST.in index eb9b6989..e99a4c2d 100644 --- a/libs/MANIFEST.in +++ b/libs/MANIFEST.in @@ -2,4 +2,5 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -include LICENSE \ No newline at end of file +include LICENSE +include src/main/python/dlpx/virtualization/libs/VERSION diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock deleted file mode 100644 index 98d02f70..00000000 --- a/libs/Pipfile.lock +++ /dev/null @@ -1,197 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "b4a0a3cdaae619ce4a794b1499550bce41401ab620dc38f2b46b931a7b90153c" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "name": "delphix", - "url": "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/", - "verifySsl": true - } - ] - }, - "default": { - "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.0.0.tar.gz", - "version": "== 1.0.0" - }, - "protobuf": { - "hashes": [ - "sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4", - "sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811", - "sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444", - "sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96", - "sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2", - "sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef", - "sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e", - "sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995", - "sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed", - "sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9", - "sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90", - "sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19", - "sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625", - "sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9", - "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", - "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" - ], - "index": "delphix", - "version": "==3.6.1" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - } - }, - "develop": { - "atomicwrites": { - "hashes": [ - "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", - "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" - ], - "version": "==1.3.0" - }, - "attrs": { - "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" - ], - "version": "==19.1.0" - }, - "configparser": { - "hashes": [ - "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", - "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" - ], - "markers": "python_version < '3'", - "version": "==3.7.4" - }, - "contextlib2": { - "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" - ], - "markers": "python_version < '3'", - "version": "==0.5.5" - }, - "funcsigs": { - "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - ], - "markers": "python_version < '3.3'", - "version": "==1.0.2" - }, - "importlib-metadata": { - "hashes": [ - "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8", - "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3" - ], - "version": "==0.19" - }, - "mock": { - "hashes": [ - "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", - "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" - ], - "index": "delphix", - "version": "==3.0.5" - }, - "more-itertools": { - "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" - ], - "markers": "python_version <= '2.7'", - "version": "==5.0.0" - }, - "packaging": { - "hashes": [ - "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9", - "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe" - ], - "version": "==19.1" - }, - "pathlib2": { - "hashes": [ - "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e", - "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8" - ], - "markers": "python_version == '3.4.*' or python_version < '3'", - "version": "==2.3.4" - }, - "pluggy": { - "hashes": [ - "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc", - "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c" - ], - "version": "==0.12.0" - }, - "py": { - "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" - ], - "version": "==1.8.0" - }, - "pyparsing": { - "hashes": [ - "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", - "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4" - ], - "version": "==2.4.2" - }, - "pytest": { - "hashes": [ - "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae", - "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6" - ], - "index": "delphix", - "version": "==4.6.4" - }, - "scandir": { - "hashes": [ - "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", - "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", - "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", - "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", - "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", - "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", - "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", - "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", - "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", - "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", - "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" - ], - "markers": "python_version < '3.5'", - "version": "==1.10.0" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "wcwidth": { - "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" - ], - "version": "==0.1.7" - }, - "zipp": { - "hashes": [ - "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a", - "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec" - ], - "version": "==0.5.2" - } - } -} diff --git a/libs/build.gradle b/libs/build.gradle deleted file mode 100644 index b4a205a1..00000000 --- a/libs/build.gradle +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -plugins { - id "java" - id "com.google.protobuf" - id "delphix.python" -} - -repositories { - mavenCentral() -} - -dependencies { - // Necessary to compile generated java protocol buffer libraries. - compile project(":common") - - // Necessary to compile generated python protocol buffer libraries. - python project(path: ":common", configuration: "python") -} - -protobuf { - protoc { - artifact = "com.google.protobuf:protoc:3.6.1" - } - - // This activates other protoc language targets. - // https://github.com/google/protobuf-gradle-plugin#default-outputs - generateProtoTasks { - all().each { task -> - task.builtins { - python { - } - } - } - } - generatedFilesBaseDir = "$projectDir/src" -} - -artifacts { - python sdist.distFile -} - -dlpxPython { - sources { - delphix { - url = "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/" - } - } - - dist { - name = "dvp-libs" - } - - packages { - protobuf { - version = "==3.6.1" - } - - "dvp-common" { - version = "== $project.version" - path = file(tasks.getByPath(":common:sdist").getDistFile().toString()) - } - } - - devPackages { - - mock { - version = ">=2.0" - } - } - - supportedPythons { - "python2.7" {} - } -} - -/* - * This is a temporary task. 'src/main/java' only contains compiled protobuf classes. Sometimes these get out of date - * and they need to be cleaned up manually. In the long term, everything should probably be built under 'build' so - * the 'clean' task automatically deletes them. In the short term though, this task cleans them up. - */ -task removeProtobufJava(type: Delete) { - delete "${projectDir}/src/main/java" -} - -/* - * As part of running a packages 'setup.py' file, setuptools creates 'egg-info' directories that contain information - * about the build distribution. These can sometimes cause issues. We should probably build Python distributions in - * 'build' so these would be created there, however they still could be created in the 'src' directory if someone runs - * 'setup.py' manually. This is often done during development to install the package for testing. - */ -task removeEggInfo(type: Delete) { - delete "${projectDir}/src/main/python/dvp_libs.egg-info" -} - -task wheel(type: SetupPyTask) { - setupPyCommand "bdist_wheel" - distFile String.format("%s-%s-%s-%s-%s.whl", dist.name.get().replace("-", "_"), "$project.version".replace("-", "_"), "py2", "none", "any") - - dependsOn makeSetupPy -} - -clean.dependsOn('removeProtobufJava') -clean.dependsOn('removeEggInfo') - -// sdist and the python tests depend on the generated python libraries -// from the protoc plugin. Must manually specify as plugins are not aware -// of each other. -project.afterEvaluate { - tasks["sdist"].dependsOn tasks["generateProto"] - tasks["test_python2.7"].dependsOn tasks["generateProto"] -} diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt deleted file mode 100644 index f4c45d02..00000000 --- a/libs/lock.dev-requirements.txt +++ /dev/null @@ -1,19 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -atomicwrites==1.3.0 -attrs==19.1.0 -configparser==3.7.4 ; python_version < '3' -contextlib2==0.5.5 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' -importlib-metadata==0.19 -mock==3.0.5 -more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.1 -pathlib2==2.3.4 ; python_version == '3.4.*' or python_version < '3' -pluggy==0.12.0 -py==1.8.0 -pyparsing==2.4.2 -pytest==4.6.4 -scandir==1.10.0 ; python_version < '3.5' -six==1.12.0 -wcwidth==0.1.7 -zipp==0.5.2 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt deleted file mode 100644 index fce04b93..00000000 --- a/libs/lock.requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.0.0.tar.gz -protobuf==3.6.1 -six==1.12.0 diff --git a/libs/requirements.txt b/libs/requirements.txt new file mode 100644 index 00000000..10fbe011 --- /dev/null +++ b/libs/requirements.txt @@ -0,0 +1,17 @@ +./../common +bump2version==0.5.11 +contextlib2==0.6.0.post1 ; python_version < '3' +funcsigs==1.0.2 ; python_version < '3.3' +importlib-metadata==1.3.0 ; python_version < '3.8' +mock==3.0.5 +more-itertools==5.0.0 ; python_version <= '2.7' +packaging==19.2 +pathlib2==2.3.5 ; python_version < '3.6' +pluggy==0.13.1 +py==1.8.1 +pyparsing==2.4.6 +pytest==4.6.8 +scandir==1.10.0 ; python_version < '3.5' +six==1.13.0 +wcwidth==0.1.8 +zipp==0.6.0 diff --git a/libs/setup.py b/libs/setup.py new file mode 100644 index 00000000..83597b14 --- /dev/null +++ b/libs/setup.py @@ -0,0 +1,19 @@ +import os +import setuptools + +PYTHON_SRC = 'src/main/python' + +with open(os.path.join(PYTHON_SRC, 'dlpx/virtualization/libs/VERSION')) as version_file: + version = version_file.read().strip() + +install_requires = [ + "dvp-api == 1.1.0", + "dvp-common == {}".format(version) +] + +setuptools.setup(name='dvp-libs', + version=version, + install_requires=install_requires, + package_dir={'': PYTHON_SRC}, + packages=setuptools.find_packages(PYTHON_SRC), +) diff --git a/libs/src/main/proto/dlpx/virtualization/libs.proto b/libs/src/main/proto/dlpx/virtualization/libs.proto deleted file mode 100644 index 5dfabcf4..00000000 --- a/libs/src/main/proto/dlpx/virtualization/libs.proto +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2018, 2019 by Delphix. All rights reserved. - */ - -syntax = "proto3"; - -import "dlpx/virtualization/common.proto"; - -option java_multiple_files = true; - -package com.delphix.virtualization.libs; - - -message ActionableLibraryError { - int32 id = 1; - string message = 2; -} - -message NonActionableLibraryError { -} - -message LibraryErrorResult { - oneof error { - ActionableLibraryError actionable_error = 1; - NonActionableLibraryError non_actionable_error = 2; - } -} - -message RunBashRequest { - com.delphix.virtualization.common.RemoteConnection remote_connection = 1; - string command = 2; - map variables = 3; - bool use_login_shell = 4; -} - -message RunBashResult { - int32 exit_code = 1; - string stdout = 2; - string stderr = 3; -} - -message RunBashResponse { - oneof result { - RunBashResult return_value = 1; - LibraryErrorResult error = 2; - } -} - -message RunPowerShellRequest { - com.delphix.virtualization.common.RemoteConnection remote_connection = 1; - string command = 2; - map variables = 3; -} - -message RunPowerShellResult { - int32 exit_code = 1; - string stdout = 2; - string stderr = 3; -} - -message RunPowerShellResponse { - oneof result { - RunPowerShellResult return_value = 1; - LibraryErrorResult error = 2; - } -} - -message RunSyncRequest { - com.delphix.virtualization.common.RemoteConnection remote_connection = 1; - string source_directory = 2; - string rsync_user = 3; - repeated string exclude_paths = 4; - repeated string sym_links_to_follow = 5; - -} - -message RunSyncResult { } - -message RunSyncResponse { - oneof result { - RunSyncResult return_value = 1; - LibraryErrorResult error = 2; - } -} - -message RunExpectRequest { - com.delphix.virtualization.common.RemoteConnection remote_connection = 1; - string command = 2; - map variables = 3; -} - -message RunExpectResult { - int32 exit_code = 1; - string stdout = 2; - string stderr = 3; -} - -message RunExpectResponse { - oneof result { - RunExpectResult return_value = 1; - LibraryErrorResult error = 2; - } -} - -message LogRequest { - string message = 1; - enum LogLevel { - DEBUG = 0; - INFO = 1; - ERROR = 2; - } - LogLevel level = 2; -} - -message LogResult { } - -message LogResponse { - oneof result { - LogResult return_value = 1; - LibraryErrorResult error = 2; - } -} diff --git a/libs/src/main/python/dlpx/virtualization/libs/VERSION b/libs/src/main/python/dlpx/virtualization/libs/VERSION new file mode 100644 index 00000000..359a5b95 --- /dev/null +++ b/libs/src/main/python/dlpx/virtualization/libs/VERSION @@ -0,0 +1 @@ +2.0.0 \ No newline at end of file diff --git a/libs/src/main/python/dlpx/virtualization/libs/libs.py b/libs/src/main/python/dlpx/virtualization/libs/libs.py index c8f042a2..2e0fb4bd 100644 --- a/libs/src/main/python/dlpx/virtualization/libs/libs.py +++ b/libs/src/main/python/dlpx/virtualization/libs/libs.py @@ -26,7 +26,7 @@ import sys -from dlpx.virtualization import libs_pb2 +from dlpx.virtualization.api import libs_pb2 from dlpx.virtualization.libs.exceptions import (IncorrectArgumentTypeError, LibraryError, PluginScriptError) diff --git a/libs/src/test/java/NotUsed.java b/libs/src/test/java/NotUsed.java deleted file mode 100644 index 6608cff8..00000000 --- a/libs/src/test/java/NotUsed.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -/** - * Gradle will fail when running the test task if there are not classes in the - * Java test jar. This class is simply here to prevent that from happening. - * If a test is introduced in the future this file will be deleted. - */ -public class NotUsed { -} diff --git a/libs/src/test/python/dlpx/virtualization/test_delphix_libs_generated.py b/libs/src/test/python/dlpx/virtualization/test_delphix_libs_generated.py index 00f726fa..b1575da1 100644 --- a/libs/src/test/python/dlpx/virtualization/test_delphix_libs_generated.py +++ b/libs/src/test/python/dlpx/virtualization/test_delphix_libs_generated.py @@ -6,10 +6,10 @@ def test_import_common(): - from dlpx.virtualization import common_pb2 + from dlpx.virtualization.api import common_pb2 assert issubclass(common_pb2.Repository, message.Message) def test_import_libs(): - from dlpx.virtualization import libs_pb2 + from dlpx.virtualization.api import libs_pb2 assert issubclass(libs_pb2.RunSyncRequest, message.Message) diff --git a/libs/src/test/python/dlpx/virtualization/test_libs.py b/libs/src/test/python/dlpx/virtualization/test_libs.py index 4e5ce096..83364932 100644 --- a/libs/src/test/python/dlpx/virtualization/test_libs.py +++ b/libs/src/test/python/dlpx/virtualization/test_libs.py @@ -1,11 +1,11 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import mock import pytest -from dlpx.virtualization import libs_pb2 +from dlpx.virtualization.api import libs_pb2 from dlpx.virtualization import libs from dlpx.virtualization.libs.exceptions import ( IncorrectArgumentTypeError, LibraryError, PluginScriptError) @@ -189,11 +189,12 @@ def test_run_bash_bad_variables(remote_connection): with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_bash(remote_connection, command, variables, use_login_shell) - assert err_info.value.message == ( - "The function run_bash's argument 'variables' was" - " a dict of {type 'str':type 'int', type 'str':type 'str'}" - " but should be of" - " type 'dict of basestring:basestring' if defined.") + message = ("The function run_bash's argument 'variables' was" + " a dict of {{type 'str':type '{}', type 'str':type '{}'}}" + " but should be of" + " type 'dict of basestring:basestring' if defined.") + assert (err_info.value.message == message.format('int', 'str') or + err_info.value.message == message.format('str', 'int')) @staticmethod def test_run_bash_bad_use_login_shell(remote_connection): @@ -213,7 +214,6 @@ def test_run_bash_bad_use_login_shell(remote_connection): class TestLibsRunSync: @staticmethod def test_run_sync(remote_connection): - expected_run_sync_response = libs_pb2.RunSyncResponse() expected_source_directory = 'sourceDirectory' @@ -269,7 +269,6 @@ def test_run_sync_with_actionable_error(remote_connection): @staticmethod def test_run_sync_with_nonactionable_error(remote_connection): - response = libs_pb2.RunSyncResponse() na_error = libs_pb2.NonActionableLibraryError() response.error.non_actionable_error.CopyFrom(na_error) @@ -593,11 +592,12 @@ def test_run_powershell_bad_variables(remote_connection): with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_powershell(remote_connection, command, variables) - assert err_info.value.message == ( - "The function run_powershell's argument 'variables' was" - " a dict of {type 'str':type 'int', type 'str':type 'str'}" - " but should be of" - " type 'dict of basestring:basestring' if defined.") + message = ("The function run_powershell's argument 'variables' was" + " a dict of {{type 'str':type '{}', type 'str':type '{}'}}" + " but should be of" + " type 'dict of basestring:basestring' if defined.") + assert (err_info.value.message == message.format('int', 'str') or + err_info.value.message == message.format('str', 'int')) class TestLibsRunExpect: @@ -647,12 +647,12 @@ def test_run_expect_check_true_exitcode_success(remote_connection): def mock_run_expect(actual_run_expect_request): assert actual_run_expect_request.command == expected_command assert ( - actual_run_expect_request.remote_connection.environment.name - == remote_connection.environment.name + actual_run_expect_request.remote_connection.environment.name + == remote_connection.environment.name ) assert ( - actual_run_expect_request.remote_connection.environment.reference - == remote_connection.environment.reference + actual_run_expect_request.remote_connection.environment.reference + == remote_connection.environment.reference ) return expected_run_expect_response @@ -704,7 +704,6 @@ def test_run_expect_with_actionable_error(remote_connection): @staticmethod def test_run_expect_with_nonactionable_error(remote_connection): - response = libs_pb2.RunExpectResponse() na_error = libs_pb2.NonActionableLibraryError() response.error.non_actionable_error.CopyFrom(na_error) @@ -768,8 +767,9 @@ def test_run_expect_bad_variables(remote_connection): with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_expect(remote_connection, command, variables) - assert err_info.value.message == ( - "The function run_expect's argument 'variables' was" - " a dict of {type 'str':type 'int', type 'str':type 'str'}" - " but should be of" - " type 'dict of basestring:basestring' if defined.") + message = ("The function run_expect's argument 'variables' was" + " a dict of {{type 'str':type '{}', type 'str':type '{}'}}" + " but should be of" + " type 'dict of basestring:basestring' if defined.") + assert (err_info.value.message == message.format('int', 'str') or + err_info.value.message == message.format('str', 'int')) diff --git a/libs/src/test/python/dlpx/virtualization/test_logging.py b/libs/src/test/python/dlpx/virtualization/test_logging.py index 88e51608..022cffa8 100644 --- a/libs/src/test/python/dlpx/virtualization/test_logging.py +++ b/libs/src/test/python/dlpx/virtualization/test_logging.py @@ -7,9 +7,9 @@ import pytest from dlpx.virtualization.libs import PlatformHandler -from dlpx.virtualization.libs_pb2 import LogRequest -from dlpx.virtualization.libs_pb2 import LogResult -from dlpx.virtualization.libs_pb2 import LogResponse +from dlpx.virtualization.api.libs_pb2 import LogRequest +from dlpx.virtualization.api.libs_pb2 import LogResult +from dlpx.virtualization.api.libs_pb2 import LogResponse class TestPythonHandler: diff --git a/platform/.gitignore b/platform/.gitignore deleted file mode 100644 index 74641651..00000000 --- a/platform/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -# Ignore generated Java files -src/main/java/ diff --git a/platform/MANIFEST.in b/platform/MANIFEST.in index eb9b6989..5cbde639 100644 --- a/platform/MANIFEST.in +++ b/platform/MANIFEST.in @@ -2,4 +2,5 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -include LICENSE \ No newline at end of file +include LICENSE +include src/main/python/dlpx/virtualization/platform/VERSION diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock deleted file mode 100644 index 90b16593..00000000 --- a/platform/Pipfile.lock +++ /dev/null @@ -1,208 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "e567de6adc30e8abadddcd29f509b4e4862f1e120f72347a1e9ad335d43f07f9" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "name": "delphix", - "url": "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/", - "verifySsl": true - } - ] - }, - "default": { - "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.0.0.tar.gz", - "version": "== 1.0.0" - }, - "enum34": { - "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" - ], - "index": "delphix", - "markers": "python_version < '3.4'", - "version": "==1.1.6" - }, - "protobuf": { - "hashes": [ - "sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4", - "sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811", - "sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444", - "sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96", - "sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2", - "sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef", - "sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e", - "sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995", - "sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed", - "sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9", - "sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90", - "sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19", - "sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625", - "sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9", - "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", - "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" - ], - "index": "delphix", - "version": "==3.6.1" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - } - }, - "develop": { - "atomicwrites": { - "hashes": [ - "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", - "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" - ], - "version": "==1.3.0" - }, - "attrs": { - "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" - ], - "version": "==19.1.0" - }, - "configparser": { - "hashes": [ - "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", - "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" - ], - "markers": "python_version < '3'", - "version": "==3.7.4" - }, - "contextlib2": { - "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" - ], - "markers": "python_version < '3'", - "version": "==0.5.5" - }, - "funcsigs": { - "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - ], - "markers": "python_version < '3.3'", - "version": "==1.0.2" - }, - "importlib-metadata": { - "hashes": [ - "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8", - "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3" - ], - "version": "==0.19" - }, - "mock": { - "hashes": [ - "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", - "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" - ], - "index": "delphix", - "version": "==3.0.5" - }, - "more-itertools": { - "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" - ], - "markers": "python_version <= '2.7'", - "version": "==5.0.0" - }, - "packaging": { - "hashes": [ - "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9", - "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe" - ], - "version": "==19.1" - }, - "pathlib2": { - "hashes": [ - "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e", - "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8" - ], - "markers": "python_version == '3.4.*' or python_version < '3'", - "version": "==2.3.4" - }, - "pluggy": { - "hashes": [ - "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc", - "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c" - ], - "version": "==0.12.0" - }, - "py": { - "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" - ], - "version": "==1.8.0" - }, - "pyparsing": { - "hashes": [ - "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", - "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4" - ], - "version": "==2.4.2" - }, - "pytest": { - "hashes": [ - "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae", - "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6" - ], - "index": "delphix", - "version": "==4.6.4" - }, - "scandir": { - "hashes": [ - "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", - "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", - "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", - "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", - "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", - "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", - "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", - "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", - "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", - "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", - "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" - ], - "markers": "python_version < '3.5'", - "version": "==1.10.0" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "wcwidth": { - "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" - ], - "version": "==0.1.7" - }, - "zipp": { - "hashes": [ - "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a", - "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec" - ], - "version": "==0.5.2" - } - } -} diff --git a/platform/build.gradle b/platform/build.gradle deleted file mode 100644 index 988066d1..00000000 --- a/platform/build.gradle +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -plugins { - id "java" - id "com.google.protobuf" - id "delphix.python" -} - -repositories { - mavenCentral() -} - -dependencies { - // Necessary to compile generated java protocol buffer libraries. - compile project(":common") - - // Necessary to compile generated python protocol buffer libraries. - python project(path: ":common", configuration: "python") -} - -protobuf { - - protoc { - artifact = "com.google.protobuf:protoc:3.6.1" - } - - // This activates other protoc language targets. - // https://github.com/google/protobuf-gradle-plugin#default-outputs - generateProtoTasks { - all().each { task -> - task.builtins { - python { - } - } - } - } - generatedFilesBaseDir = "$projectDir/src" -} - -artifacts { - python sdist.distFile -} - -dlpxPython { - sources { - delphix { - url = "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/" - } - } - - dist { - name = "dvp-platform" - } - - packages { - protobuf { - version = "==3.6.1" - } - enum34 { - markers = "python_version < '3.4'" - } - - "dvp-common" { - version = "== $project.version" - path = file(tasks.getByPath(":common:sdist").getDistFile().toString()) - } - } - - devPackages { - mock { - version = ">=2.0" - } - } - - supportedPythons { - "python2.7" {} - } -} - -/* - * This is a temporary task. 'src/main/java' only contains compiled protobuf classes. Sometimes these get out of date - * and they need to be cleaned up manually. In the long term, everything should probably be built under 'build' so - * the 'clean' task automatically deletes them. In the short term though, this task cleans them up. - */ -task removeProtobufJava(type: Delete) { - delete "${projectDir}/src/main/java" -} - -/* - * As part of running a packages 'setup.py' file, setuptools creates 'egg-info' directories that contain information - * about the build distribution. These can sometimes cause issues. We should probably build Python distributions in - * 'build' so these would be created there, however they still could be created in the 'src' directory if someone runs - * 'setup.py' manually. This is often done during development to install the package for testing. - */ -task removeEggInfo(type: Delete) { - delete "${projectDir}/src/main/python/dvp_platform.egg-info" -} - -task wheel(type: SetupPyTask) { - setupPyCommand "bdist_wheel" - distFile String.format("%s-%s-%s-%s-%s.whl", dist.name.get().replace("-", "_"), "$project.version".replace("-", "_"), "py2", "none", "any") - - dependsOn makeSetupPy -} - -clean.dependsOn('removeProtobufJava') -clean.dependsOn('removeEggInfo') - -// sdist and the python tests depend on the generated python libraries -// from the protoc plugin. Must manually specify as plugins are not aware -// of each other. -project.afterEvaluate { - tasks["sdist"].dependsOn tasks["generateProto"] - tasks["test_python2.7"].dependsOn tasks["generateProto"] -} diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt deleted file mode 100644 index f4c45d02..00000000 --- a/platform/lock.dev-requirements.txt +++ /dev/null @@ -1,19 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -atomicwrites==1.3.0 -attrs==19.1.0 -configparser==3.7.4 ; python_version < '3' -contextlib2==0.5.5 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' -importlib-metadata==0.19 -mock==3.0.5 -more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.1 -pathlib2==2.3.4 ; python_version == '3.4.*' or python_version < '3' -pluggy==0.12.0 -py==1.8.0 -pyparsing==2.4.2 -pytest==4.6.4 -scandir==1.10.0 ; python_version < '3.5' -six==1.12.0 -wcwidth==0.1.7 -zipp==0.5.2 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt deleted file mode 100644 index b5d44c13..00000000 --- a/platform/lock.requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.0.0.tar.gz -enum34==1.1.6 ; python_version < '3.4' -protobuf==3.6.1 -six==1.12.0 diff --git a/platform/requirements.txt b/platform/requirements.txt new file mode 100644 index 00000000..9875b2d2 --- /dev/null +++ b/platform/requirements.txt @@ -0,0 +1,17 @@ +./../common +bump2version==0.5.11 +contextlib2==0.6.0.post1 ; python_version < '3' +funcsigs==1.0.2 ; python_version < '3.3' +importlib-metadata==1.3.0 ; python_version < '3.8' +mock==3.0.5 +more-itertools==5.0.0 ; python_version <= '2.7' +packaging==20.0 +pathlib2==2.3.5 ; python_version < '3' +pluggy==0.13.1 +py==1.8.1 +pyparsing==2.4.6 +pytest==4.6.9 +scandir==1.10.0 ; python_version < '3.5' +six==1.13.0 +wcwidth==0.1.8 +zipp==0.6.0 diff --git a/platform/setup.py b/platform/setup.py new file mode 100644 index 00000000..e8e6eb5b --- /dev/null +++ b/platform/setup.py @@ -0,0 +1,20 @@ +import os +import setuptools + +PYTHON_SRC = 'src/main/python' + +with open(os.path.join(PYTHON_SRC, 'dlpx/virtualization/platform/VERSION')) as version_file: + version = version_file.read().strip() + +install_requires = [ + "dvp-api == 1.1.0", + "dvp-common == {}".format(version), + "enum34;python_version < '3.4'", +] + +setuptools.setup(name='dvp-platform', + version=version, + install_requires=install_requires, + package_dir={'': PYTHON_SRC}, + packages=setuptools.find_packages(PYTHON_SRC), +) diff --git a/platform/src/main/proto/dlpx/virtualization/platform.proto b/platform/src/main/proto/dlpx/virtualization/platform.proto deleted file mode 100644 index e87c640b..00000000 --- a/platform/src/main/proto/dlpx/virtualization/platform.proto +++ /dev/null @@ -1,463 +0,0 @@ -/* - * Copyright (c) 2018, 2019 by Delphix. All rights reserved. - */ - -syntax = "proto3"; - -import "dlpx/virtualization/common.proto"; - -option java_multiple_files = true; - -package com.delphix.virtualization.platform; - -/* - * NOTE: - * Currently, there are some operations that do not expect a return value - - * e.g - pre-snapshot (direct and staged), start staging, stop staging etc. - * So this file contains empty result types for all such operations as a - * placeholder. Defining a valid return type as a placeholder and still return - * empty message helps if something needs to be added as part of the result in - * future. - */ - -/* ERROR REPORTING */ - -/* - * This indicates that a generic problem has been detected. - * These include syntax errors and any other kind of errors. - */ -message GenericPluginError { - string message = 1; - string type = 2; - string call_stack = 3; -} - -/* - * This indicates that a plugin elected not to handle an error raised by a - * library call. - * - error_id can be used to look up the original exception from the callback - worker. - * - call_stack gives the location in Python code whence the erroring library - call was executed. - */ -message UnhandledLibraryError { - uint32 error_id = 1; - string message = 2; - string call_stack = 3; -} - -/* - * This indicates that there was a Plugin runtime error for example if the - * implemented operation returns incorrect typed object back. - */ -message PluginRuntimeError { - string message = 1; - string call_stack = 2; -} - -/* - * This indicates that one of our generated classes raised an error. For - * example, such an error will happen if the schema defines a string property, - * but the plugin code tries to set it to an integer. - */ -message GeneratedClassesError { - string message = 1; - string call_stack = 2; -} - -/* - * This indicates that there was a UserError raised by plugin author. - */ -message UserError { - string message = 1; - string action = 2; - string output = 3; -} - -/* - * More error types will get inserted here as more specific errors are created. - * When a new error is added, they should be put to the end of the list, - * incrementing the field tag by one. The number should never be modified to - * guarantee backwards compatibility. - */ -message PluginErrorResult { - oneof error { - GenericPluginError generic_plugin_error = 1; - UnhandledLibraryError unhandled_library_error = 2; - PluginRuntimeError plugin_runtime_error = 3; - GeneratedClassesError generated_classes_error = 4; - UserError user_error = 5; - } -} - -/* DISCOVERY */ - -message RepositoryDiscoveryRequest { - com.delphix.virtualization.common.RemoteConnection source_connection = 1; -} - -message RepositoryDiscoveryResult { - repeated com.delphix.virtualization.common.Repository repositories = 1; -} - -message RepositoryDiscoveryResponse { - oneof result { - RepositoryDiscoveryResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message SourceConfigDiscoveryRequest { - com.delphix.virtualization.common.RemoteConnection source_connection = 1; - com.delphix.virtualization.common.Repository repository = 2; -} - -message SourceConfigDiscoveryResult { - repeated com.delphix.virtualization.common.SourceConfig source_configs = 1; -} - -message SourceConfigDiscoveryResponse { - oneof result { - SourceConfigDiscoveryResult return_value = 1; - PluginErrorResult error = 2; - } -} - -/* DIRECT LINKING */ - -message DirectPreSnapshotRequest { - com.delphix.virtualization.common.DirectSource direct_source = 1; - com.delphix.virtualization.common.Repository repository = 2; - com.delphix.virtualization.common.SourceConfig source_config = 3; -} - -message DirectPreSnapshotResult { } - -message DirectPreSnapshotResponse { - oneof result { - DirectPreSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message DirectPostSnapshotRequest { - com.delphix.virtualization.common.DirectSource direct_source = 1; - com.delphix.virtualization.common.Repository repository = 2; - com.delphix.virtualization.common.SourceConfig source_config = 3; -} - -message DirectPostSnapshotResult { - com.delphix.virtualization.common.Snapshot snapshot = 1; -} - -message DirectPostSnapshotResponse { - oneof result { - DirectPostSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -/* STAGED LINKING */ - -message StagedPreSnapshotRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; - com.delphix.virtualization.common.SnapshotParameters snapshot_parameters = 4; -} - -message StagedPreSnapshotResult { } - -message StagedPreSnapshotResponse { - oneof result { - StagedPreSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StagedPostSnapshotRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; - com.delphix.virtualization.common.SnapshotParameters snapshot_parameters = 4; -} - -message StagedPostSnapshotResult { - com.delphix.virtualization.common.Snapshot snapshot = 1; -} - -message StagedPostSnapshotResponse { - oneof result { - StagedPostSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StartStagingRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; -} - -message StartStagingResult { } - -message StartStagingResponse { - oneof result { - StartStagingResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StopStagingRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; -} - -message StopStagingResult { } - -message StopStagingResponse { - oneof result { - StopStagingResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StagedStatusRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; -} - -message StagedStatusResult { - enum Status { - ACTIVE = 0; - INACTIVE = 1; - } - Status status = 1; -} - -message StagedStatusResponse { - oneof result { - StagedStatusResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StagedWorkerRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; -} - -message StagedWorkerResult { } - -message StagedWorkerResponse { - oneof result { - StagedWorkerResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StagedMountSpecRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.StagedSource staged_source = 2; -} - -message StagedMountSpecResult { - com.delphix.virtualization.common.SingleEntireMount staged_mount = 1; - com.delphix.virtualization.common.OwnershipSpec ownership_spec = 2; -} - -message StagedMountSpecResponse { - oneof result { - StagedMountSpecResult return_value = 1; - PluginErrorResult error = 2; - } -} - -/* PROVISIONING */ - -message ConfigureRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.Snapshot snapshot = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message ConfigureResult { - com.delphix.virtualization.common.SourceConfig source_config = 1; -} - -message ConfigureResponse { - oneof result { - ConfigureResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message UnconfigureRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; - bool deleteFlag = 4; -} - -message UnconfigureResult { } - -message UnconfigureResponse { - oneof result { - UnconfigureResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message ReconfigureRequest { - com.delphix.virtualization.common.Snapshot snapshot = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; - com.delphix.virtualization.common.Repository repository = 4; -} - -message ReconfigureResult { - com.delphix.virtualization.common.SourceConfig source_config = 1; -} - -message ReconfigureResponse { - oneof result { - ReconfigureResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StartRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message StartResult { } - -message StartResponse { - oneof result { - StartResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StopRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message StopResult { } - -message StopResponse { - oneof result { - StopResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message VirtualPreSnapshotRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message VirtualPreSnapshotResult { } - -message VirtualPreSnapshotResponse { - oneof result { - VirtualPreSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message VirtualPostSnapshotRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message VirtualPostSnapshotResult { - com.delphix.virtualization.common.Snapshot snapshot = 1; -} - -message VirtualPostSnapshotResponse { - oneof result { - VirtualPostSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message VirtualStatusRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message VirtualStatusResult { - enum Status { - ACTIVE = 0; - INACTIVE = 1; - } - Status status = 1; -} - -message VirtualStatusResponse { - oneof result { - VirtualStatusResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message InitializeRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message InitializeResult { } - -message InitializeResponse { - oneof result { - InitializeResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message VirtualMountSpecRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.VirtualSource virtual_source = 2; -} - -message VirtualMountSpecResult { - com.delphix.virtualization.common.OwnershipSpec ownership_spec = 1; - repeated com.delphix.virtualization.common.SingleSubsetMount mounts = 2; -} - -message VirtualMountSpecResponse { - oneof result { - VirtualMountSpecResult return_value = 1; - PluginErrorResult error = 2; - } -} - -/* UPGRADE */ - -message UpgradeLinkedSourceRequest { - com.delphix.virtualization.common.LinkedSource linked_source = 1; -} - -message UpgradeVirtualSourceRequest { - com.delphix.virtualization.common.VirtualSource virtual_source = 1; -} - -message UpgradeSourceConfigRequest { - com.delphix.virtualization.common.SourceConfig source_config = 1; -} - -message UpgradeSnapshotRequest { - com.delphix.virtualization.common.Snapshot snapshot = 1; -} diff --git a/platform/src/main/python/dlpx/virtualization/platform/VERSION b/platform/src/main/python/dlpx/virtualization/platform/VERSION new file mode 100644 index 00000000..359a5b95 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/VERSION @@ -0,0 +1 @@ +2.0.0 \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/__init__.py b/platform/src/main/python/dlpx/virtualization/platform/__init__.py index 20a2dd90..fe3b144f 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/__init__.py +++ b/platform/src/main/python/dlpx/virtualization/platform/__init__.py @@ -4,5 +4,15 @@ __path__ = __import__('pkgutil').extend_path(__path__, __name__) + +from dlpx.virtualization.platform.migration_id_set import * +from dlpx.virtualization.platform.validation_util import * from dlpx.virtualization.platform._plugin_classes import * +from dlpx.virtualization.platform._discovery import * +from dlpx.virtualization.platform._linked import * +from dlpx.virtualization.platform._upgrade import * +from dlpx.virtualization.platform._virtual import * from dlpx.virtualization.platform._plugin import * +from dlpx.virtualization.platform.util import * +from dlpx.virtualization.platform.import_util import * +from dlpx.virtualization.platform.import_validations import * diff --git a/platform/src/main/python/dlpx/virtualization/platform/_discovery.py b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py new file mode 100644 index 00000000..0bcbd0f6 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py @@ -0,0 +1,163 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""DiscoveryOperations for the Virtualization Platform + +""" +import json +from dlpx.virtualization.common import RemoteConnection +from dlpx.virtualization.api import common_pb2 +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectReturnTypeError, OperationNotDefinedError, + OperationAlreadyDefinedError) + + +__all__ = ['DiscoveryOperations'] + + +class DiscoveryOperations(object): + + def __init__(self): + self.repository_impl = None + self.source_config_impl = None + + def repository(self): + def repository_decorator(repository_impl): + if self.repository_impl: + raise OperationAlreadyDefinedError(Op.DISCOVERY_REPOSITORY) + + self.repository_impl = v.check_function(repository_impl, + Op.DISCOVERY_REPOSITORY) + return repository_impl + return repository_decorator + + def source_config(self): + def source_config_decorator(source_config_impl): + if self.source_config_impl: + raise OperationAlreadyDefinedError(Op.DISCOVERY_SOURCE_CONFIG) + self.source_config_impl = v.check_function( + source_config_impl, Op.DISCOVERY_SOURCE_CONFIG) + return source_config_impl + return source_config_decorator + + def _internal_repository(self, request): + """Repository discovery wrapper. + + Executed just after adding or refreshing an environment. This plugin + operation is run prior to discovering source configs. This plugin + operation returns a list of repositories installed on a environment. + + Discover the repositories on an environment given a source connection. + + Args: + request (RepositoryDiscoveryRequest): Repository + Discovery operation arguments. + + Returns: + RepositoryDiscoveryResponse: The return value of repository + discovery operation. + """ + from generated.definitions import RepositoryDefinition + + def to_protobuf(repository): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(repository.to_dict()) + repository_protobuf = common_pb2.Repository() + repository_protobuf.parameters.CopyFrom(parameters) + return repository_protobuf + + if not self.repository_impl: + raise OperationNotDefinedError(Op.DISCOVERY_REPOSITORY) + + repositories = self.repository_impl( + source_connection=RemoteConnection.from_proto(request.source_connection)) + + # Validate that this is a list of Repository objects + if not isinstance(repositories, list): + raise IncorrectReturnTypeError( + Op.DISCOVERY_REPOSITORY, + type(repositories), + [RepositoryDefinition]) + + if not all(isinstance(repo, RepositoryDefinition) + for repo in repositories): + raise IncorrectReturnTypeError( + Op.DISCOVERY_REPOSITORY, + [type(repo) for repo in repositories], + [RepositoryDefinition]) + + repository_discovery_response = ( + platform_pb2.RepositoryDiscoveryResponse()) + repository_protobuf_list = [to_protobuf(repo) for repo in repositories] + repository_discovery_response.return_value.repositories.extend( + repository_protobuf_list) + return repository_discovery_response + + def _internal_source_config(self, request): + """Source config discovery wrapper. + + Executed when adding or refreshing an environment. This plugin + operation is run after discovering repositories and before + persisting/updating repository and source config data in MDS. This + plugin operation returns a list of source configs from a discovered + repository. + + Discover the source configs on an environment given a discovered + repository. + + Args: + request (SourceConfigDiscoveryRequest): Source + Config Discovery arguments. + + Returns: + SourceConfigDiscoveryResponse: The return value of source config + discovery operation. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + def to_protobuf(source_config): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(source_config.to_dict()) + source_config_protobuf = common_pb2.SourceConfig() + source_config_protobuf.parameters.CopyFrom(parameters) + return source_config_protobuf + + if not self.source_config_impl: + raise OperationNotDefinedError(Op.DISCOVERY_SOURCE_CONFIG) + + repository_definition = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + source_configs = self.source_config_impl( + source_connection=RemoteConnection.from_proto(request.source_connection), + repository=repository_definition) + + # Validate that this is a list of SourceConfigDefinition objects + if not isinstance(source_configs, list): + raise IncorrectReturnTypeError( + Op.DISCOVERY_SOURCE_CONFIG, + type(source_configs), + [SourceConfigDefinition]) + + if not all(isinstance(config, SourceConfigDefinition) + for config in source_configs): + raise IncorrectReturnTypeError( + Op.DISCOVERY_SOURCE_CONFIG, + [type(config) for config in source_configs], + [SourceConfigDefinition]) + + source_config_discovery_response = ( + platform_pb2.SourceConfigDiscoveryResponse()) + source_config_protobuf_list = [to_protobuf(config) + for config in source_configs] + source_config_discovery_response.return_value.source_configs.extend( + source_config_protobuf_list) + return source_config_discovery_response diff --git a/platform/src/main/python/dlpx/virtualization/platform/_linked.py b/platform/src/main/python/dlpx/virtualization/platform/_linked.py new file mode 100644 index 00000000..e06094cc --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_linked.py @@ -0,0 +1,678 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""LinkedOperations for the Virtualization Platform + +""" +import json +from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment +from dlpx.virtualization.api import common_pb2 +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.common.exceptions import PluginRuntimeError +from dlpx.virtualization.platform import Status +from dlpx.virtualization.platform import DirectSource +from dlpx.virtualization.platform import StagedSource +from dlpx.virtualization.platform import Mount +from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectReturnTypeError, OperationNotDefinedError, + OperationAlreadyDefinedError) + + +__all__ = ['LinkedOperations'] + + +class LinkedOperations(object): + + def __init__(self): + self.pre_snapshot_impl = None + self.post_snapshot_impl = None + self.start_staging_impl = None + self.stop_staging_impl = None + self.status_impl = None + self.worker_impl = None + self.mount_specification_impl = None + + def pre_snapshot(self): + def pre_snapshot_decorator(pre_snapshot_impl): + if self.pre_snapshot_impl: + raise OperationAlreadyDefinedError(Op.LINKED_PRE_SNAPSHOT) + self.pre_snapshot_impl = v.check_function(pre_snapshot_impl, + Op.LINKED_PRE_SNAPSHOT) + return pre_snapshot_impl + return pre_snapshot_decorator + + def post_snapshot(self): + def post_snapshot_decorator(post_snapshot_impl): + if self.post_snapshot_impl: + raise OperationAlreadyDefinedError(Op.LINKED_POST_SNAPSHOT) + self.post_snapshot_impl = v.check_function(post_snapshot_impl, + Op.LINKED_POST_SNAPSHOT) + return post_snapshot_impl + return post_snapshot_decorator + + def start_staging(self): + def start_staging_decorator(start_staging_impl): + if self.start_staging_impl: + raise OperationAlreadyDefinedError(Op.LINKED_START_STAGING) + self.start_staging_impl = v.check_function(start_staging_impl, + Op.LINKED_START_STAGING) + return start_staging_impl + return start_staging_decorator + + def stop_staging(self): + def stop_staging_decorator(stop_staging_impl): + if self.stop_staging_impl: + raise OperationAlreadyDefinedError(Op.LINKED_STOP_STAGING) + self.stop_staging_impl = v.check_function(stop_staging_impl, + Op.LINKED_STOP_STAGING) + return stop_staging_impl + return stop_staging_decorator + + def status(self): + def status_decorator(status_impl): + if self.status_impl: + raise OperationAlreadyDefinedError(Op.LINKED_STATUS) + self.status_impl = v.check_function(status_impl, Op.LINKED_STATUS) + return status_impl + return status_decorator + + def worker(self): + def worker_decorator(worker_impl): + if self.worker_impl: + raise OperationAlreadyDefinedError(Op.LINKED_WORKER) + self.worker_impl = v.check_function(worker_impl, Op.LINKED_WORKER) + return worker_impl + return worker_decorator + + def mount_specification(self): + def mount_specification_decorator(mount_specification_impl): + if self.mount_specification_impl: + raise OperationAlreadyDefinedError( + Op.LINKED_MOUNT_SPEC) + self.mount_specification_impl = v.check_function( + mount_specification_impl, Op.LINKED_MOUNT_SPEC) + return mount_specification_impl + return mount_specification_decorator + + def _internal_direct_pre_snapshot(self, request): + """Pre Snapshot Wrapper for direct plugins. + + Executed before creating a snapshot. This plugin + operation is run prior to creating a snapshot for a direct source. + + Run pre-snapshot operation for a direct source. + + Args: + request (DirectPreSnapshotRequest): Pre Snapshot arguments. + + Returns: + DirectPreSnapshotResponse: A response containing + DirectPreSnapshotResult if successful or PluginErrorResult in case + of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.pre_snapshot() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.pre_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) + + direct_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.direct_source.linked_source.parameters.json)) + direct_source = DirectSource( + guid=request.direct_source.linked_source.guid, + connection=RemoteConnection.from_proto(request.direct_source.connection), + parameters=direct_source_definition) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.pre_snapshot_impl( + direct_source=direct_source, + repository=repository, + source_config=source_config) + + direct_pre_snapshot_response = platform_pb2.DirectPreSnapshotResponse() + direct_pre_snapshot_response.return_value.CopyFrom( + platform_pb2.DirectPreSnapshotResult()) + + return direct_pre_snapshot_response + + def _internal_direct_post_snapshot(self, request): + """Post Snapshot Wrapper for direct plugins. + + Executed after creating a snapshot. This plugin + operation is run after creating a snapshot for a direct source. + + Run post-snapshot operation for a direct source. + + Args: + request (DirectPostSnapshotRequest): Post Snapshot arguments. + + Returns: + DirectPostSnapshotResponse: A response containing the return value - + DirectPostSnapshotResult which has the snapshot metadata on success. + In case of errors, response object will contain PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import SnapshotDefinition + + def to_protobuf(snapshot): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(snapshot.to_dict()) + snapshot_protobuf = common_pb2.Snapshot() + snapshot_protobuf.parameters.CopyFrom(parameters) + return snapshot_protobuf + + if not self.post_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) + + direct_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.direct_source.linked_source.parameters.json)) + direct_source = DirectSource( + guid=request.direct_source.linked_source.guid, + connection=RemoteConnection.from_proto(request.direct_source.connection), + parameters=direct_source_definition) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + snapshot = self.post_snapshot_impl( + direct_source=direct_source, + repository=repository, + source_config=source_config) + + # Validate that this is a SnapshotDefinition object + if not isinstance(snapshot, SnapshotDefinition): + raise IncorrectReturnTypeError( + Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + + direct_post_snapshot_response = ( + platform_pb2.DirectPostSnapshotResponse()) + direct_post_snapshot_response.return_value.snapshot.CopyFrom( + to_protobuf(snapshot)) + + return direct_post_snapshot_response + + def _internal_staged_pre_snapshot(self, request): + """Pre Snapshot Wrapper for staged plugins. + + Executed before creating a snapshot. This plugin + operation is run prior to creating a snapshot for a staged source. + + Run pre-snapshot operation for a staged source. + + Args: + request (StagedPreSnapshotRequest): Pre Snapshot arguments. + + Returns: + StagedPreSnapshotResponse: A response containing + StagedPreSnapshotResult if successful or PluginErrorResult + in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import SnapshotParametersDefinition + + # + # While linked.pre_snapshot() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.pre_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) + + linked_source = request.staged_source.linked_source + staged_source_definition = (LinkedSourceDefinition.from_dict( + json.loads(linked_source.parameters.json))) + staged_mount = request.staged_source.staged_mount + mount = Mount( + remote_environment=RemoteEnvironment.from_proto(staged_mount.remote_environment), + mount_path=staged_mount.mount_path, + shared_path=staged_mount.shared_path) + staged_source = StagedSource( + guid=linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + snapshot_parameters = SnapshotParametersDefinition.from_dict( + json.loads(request.snapshot_parameters.parameters.json)) + + self.pre_snapshot_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config, + snapshot_parameters=snapshot_parameters) + + response = platform_pb2.StagedPreSnapshotResponse() + response.return_value.CopyFrom(platform_pb2.StagedPreSnapshotResult()) + + return response + + def _internal_staged_post_snapshot(self, request): + """Post Snapshot Wrapper for staged plugins. + + Executed after creating a snapshot. This plugin + operation is run after creating a snapshot for a staged source. + + Run post-snapshot operation for a staged source. + + Args: + request (StagedPostSnapshotRequest): Post Snapshot arguments. + + Returns: + StagedPostSnapshotResponse: A response containing the return value + StagedPostSnapshotResult which has the snapshot metadata on + success. In case of errors, response object will contain + PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SnapshotParametersDefinition + + def to_protobuf(snapshot): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(snapshot.to_dict()) + snapshot_protobuf = common_pb2.Snapshot() + snapshot_protobuf.parameters.CopyFrom(parameters) + return snapshot_protobuf + + if not self.post_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment= + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + snapshot_parameters = SnapshotParametersDefinition.from_dict( + json.loads(request.snapshot_parameters.parameters.json)) + + snapshot = self.post_snapshot_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config, + snapshot_parameters=snapshot_parameters) + + # Validate that this is a SnapshotDefinition object + if not isinstance(snapshot, SnapshotDefinition): + raise IncorrectReturnTypeError( + Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + + response = platform_pb2.StagedPostSnapshotResponse() + response.return_value.snapshot.CopyFrom(to_protobuf(snapshot)) + + return response + + def _internal_start_staging(self, request): + """Start staging Wrapper for staged plugins. + + Executed when enabling the staging source. This plugin + operation is run to start the staging source as part + of the enable operation. + + Run start operation for a staged source. + + Args: + request (StartStagingRequest): Start arguments. + + Returns: + StartStagingResponse: A response containing StartStagingResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.start_staging() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.start_staging_impl: + raise OperationNotDefinedError(Op.LINKED_START_STAGING) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.start_staging_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + start_staging_response = platform_pb2.StartStagingResponse() + start_staging_response.return_value.CopyFrom( + platform_pb2.StartStagingResult()) + + return start_staging_response + + def _internal_stop_staging(self, request): + """Stop staging Wrapper for staged plugins. + + Executed when disabling the staging source. This plugin + operation is run to stop the staging source as part + of the disable operation. + + Run stop operation for a staged source. + + Args: + request (StopStagingRequest): Stop arguments. + + Returns: + StopStagingResponse: A response containing StopStagingResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.stop_staging() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.stop_staging_impl: + raise OperationNotDefinedError(Op.LINKED_STOP_STAGING) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.stop_staging_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + stop_staging_response = platform_pb2.StopStagingResponse() + stop_staging_response.return_value.CopyFrom( + platform_pb2.StopStagingResult()) + + return stop_staging_response + + def _internal_status(self, request): + """Staged Status Wrapper for staged plugins. + + Executed as part of several operations to get the status + of a staged source - active or inactive. + + Run status operation for a staged source. + + Args: + request (StagedStatusRequest): Post Snapshot arguments. + + Returns: + StagedStatusResponse: A response containing the return value - + StagedStatusResult which has active or inactive status. In + case of errors, response object will contain PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.status() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.status_impl: + raise OperationNotDefinedError(Op.LINKED_STATUS) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + status = self.status_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + # Validate that this is a Status object. + if not isinstance(status, Status): + raise IncorrectReturnTypeError( + Op.LINKED_STATUS, type(status), Status) + + staged_status_response = platform_pb2.StagedStatusResponse() + staged_status_response.return_value.status = status.value + + return staged_status_response + + def _internal_worker(self, request): + """Staged Worker Wrapper for staged plugins. + + Executed as part of validated sync. This plugin + operation is run to sync staging source as part + of the validated sync operation. + + Run worker operation for a staged source. + + Args: + request (StagedWorkerRequest): Worker arguments. + + Returns: + StagedWorkerResponse: A response containing StagedWorkerResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.worker() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.worker_impl: + raise OperationNotDefinedError(Op.LINKED_WORKER) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.worker_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + staged_worker_response = platform_pb2.StagedWorkerResponse() + staged_worker_response.return_value.CopyFrom( + platform_pb2.StagedWorkerResult()) + + return staged_worker_response + + def _internal_mount_specification(self, request): + """Staged Mount/Ownership Spec Wrapper for staged plugins. + + Executed before creating a snapshot during sync or before + enable/disable. This plugin operation is run before mounting datasets + on staging to set the mount path and/or ownership. + + Run mount/ownership spec operation for a staged source. + + Args: + request (StagedMountSpecRequest): Mount Spec arguments. + + Returns: + StagedMountSpecResponse: A response containing the return value - + StagedMountSpecResult which has the mount/ownership metadata on + success. In case of errors, response object will contain + PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + + def to_protobuf_single_mount(single_mount): + if single_mount.shared_path: + raise PluginRuntimeError( + 'Shared path is not supported for linked sources.') + + single_mount_protobuf = common_pb2.SingleEntireMount() + single_mount_protobuf.mount_path = single_mount.mount_path + single_mount_protobuf.remote_environment.CopyFrom( + single_mount.remote_environment.to_proto()) + return single_mount_protobuf + + def to_protobuf_ownership_spec(ownership_spec): + ownership_spec_protobuf = common_pb2.OwnershipSpec() + ownership_spec_protobuf.uid = ownership_spec.uid + ownership_spec_protobuf.gid = ownership_spec.gid + return ownership_spec_protobuf + + if not self.mount_specification_impl: + raise OperationNotDefinedError(Op.LINKED_MOUNT_SPEC) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + mount_spec = self.mount_specification_impl( + staged_source=staged_source, + repository=repository) + + # Validate that this is a MountSpecification object. + if not isinstance(mount_spec, MountSpecification): + raise IncorrectReturnTypeError( + Op.LINKED_MOUNT_SPEC, + type(mount_spec), + MountSpecification) + + # Only one mount is supported for linked sources. + mount_len = len(mount_spec.mounts) + if mount_len != 1: + raise PluginRuntimeError( + 'Exactly one mount must be provided for staging sources.' + ' Found {}'.format(mount_len)) + + staged_mount = to_protobuf_single_mount(mount_spec.mounts[0]) + + staged_mount_spec_response = platform_pb2.StagedMountSpecResponse() + staged_mount_spec_response.return_value.staged_mount.CopyFrom( + staged_mount) + + # Ownership spec is optional for linked sources. + if mount_spec.ownership_specification: + ownership_spec = to_protobuf_ownership_spec( + mount_spec.ownership_specification) + staged_mount_spec_response.return_value.ownership_spec.CopyFrom( + ownership_spec) + + return staged_mount_spec_response \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/_plugin.py b/platform/src/main/python/dlpx/virtualization/platform/_plugin.py index 4e0debcd..2758f34b 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_plugin.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_plugin.py @@ -80,1489 +80,21 @@ def my_configure_implementation(source, repository, snapshot): fail. The internal methods should only be called by the platform so it's safe to have the import in the methods as the objects will exist at runtime. """ -import json -from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment -from dlpx.virtualization import common_pb2 -from dlpx.virtualization import platform_pb2 -from dlpx.virtualization.common.exceptions import PluginRuntimeError -from dlpx.virtualization.platform import VirtualSource -from dlpx.virtualization.platform import DirectSource -from dlpx.virtualization.platform import StagedSource -from dlpx.virtualization.platform import Status -from dlpx.virtualization.platform import Mount -from dlpx.virtualization.platform import MountSpecification -from dlpx.virtualization.platform.operation import Operation as Op -from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationNotDefinedError, - OperationAlreadyDefinedError) +from dlpx.virtualization.platform import (DiscoveryOperations, + LinkedOperations, + VirtualOperations, + UpgradeOperations) __all__ = ['Plugin'] -class DiscoveryOperations(object): - - def __init__(self): - self.repository_impl = None - self.source_config_impl = None - - def repository(self): - def repository_decorator(repository_impl): - if self.repository_impl: - raise OperationAlreadyDefinedError(Op.DISCOVERY_REPOSITORY) - - self.repository_impl = repository_impl - return repository_impl - return repository_decorator - - def source_config(self): - def source_config_decorator(source_config_impl): - if self.source_config_impl: - raise OperationAlreadyDefinedError(Op.DISCOVERY_SOURCE_CONFIG) - self.source_config_impl = source_config_impl - return source_config_impl - return source_config_decorator - - def _internal_repository(self, request): - """Repository discovery wrapper. - - Executed just after adding or refreshing an environment. This plugin - operation is run prior to discovering source configs. This plugin - operation returns a list of repositories installed on a environment. - - Discover the repositories on an environment given a source connection. - - Args: - request (RepositoryDiscoveryRequest): Repository - Discovery operation arguments. - - Returns: - RepositoryDiscoveryResponse: The return value of repository - discovery operation. - """ - from generated.definitions import RepositoryDefinition - - def to_protobuf(repository): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(repository.to_dict()) - repository_protobuf = common_pb2.Repository() - repository_protobuf.parameters.CopyFrom(parameters) - return repository_protobuf - - if not self.repository_impl: - raise OperationNotDefinedError(Op.DISCOVERY_REPOSITORY) - - repositories = self.repository_impl( - source_connection=RemoteConnection.from_proto(request.source_connection)) - - # Validate that this is a list of Repository objects - if not isinstance(repositories, list): - raise IncorrectReturnTypeError( - Op.DISCOVERY_REPOSITORY, - type(repositories), - [RepositoryDefinition]) - - if not all(isinstance(repo, RepositoryDefinition) - for repo in repositories): - raise IncorrectReturnTypeError( - Op.DISCOVERY_REPOSITORY, - [type(repo) for repo in repositories], - [RepositoryDefinition]) - - repository_discovery_response = ( - platform_pb2.RepositoryDiscoveryResponse()) - repository_protobuf_list = [to_protobuf(repo) for repo in repositories] - repository_discovery_response.return_value.repositories.extend( - repository_protobuf_list) - return repository_discovery_response - - def _internal_source_config(self, request): - """Source config discovery wrapper. - - Executed when adding or refreshing an environment. This plugin - operation is run after discovering repositories and before - persisting/updating repository and source config data in MDS. This - plugin operation returns a list of source configs from a discovered - repository. - - Discover the source configs on an environment given a discovered - repository. - - Args: - request (SourceConfigDiscoveryRequest): Source - Config Discovery arguments. - - Returns: - SourceConfigDiscoveryResponse: The return value of source config - discovery operation. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - def to_protobuf(source_config): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(source_config.to_dict()) - source_config_protobuf = common_pb2.SourceConfig() - source_config_protobuf.parameters.CopyFrom(parameters) - return source_config_protobuf - - if not self.source_config_impl: - raise OperationNotDefinedError(Op.DISCOVERY_SOURCE_CONFIG) - - repository_definition = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - source_configs = self.source_config_impl( - source_connection=RemoteConnection.from_proto(request.source_connection), - repository=repository_definition) - - # Validate that this is a list of SourceConfigDefinition objects - if not isinstance(source_configs, list): - raise IncorrectReturnTypeError( - Op.DISCOVERY_SOURCE_CONFIG, - type(source_configs), - [SourceConfigDefinition]) - - if not all(isinstance(config, SourceConfigDefinition) - for config in source_configs): - raise IncorrectReturnTypeError( - Op.DISCOVERY_SOURCE_CONFIG, - [type(config) for config in source_configs], - [SourceConfigDefinition]) - - source_config_discovery_response = ( - platform_pb2.SourceConfigDiscoveryResponse()) - source_config_protobuf_list = [to_protobuf(config) - for config in source_configs] - source_config_discovery_response.return_value.source_configs.extend( - source_config_protobuf_list) - return source_config_discovery_response - - -class LinkedOperations(object): - - def __init__(self): - self.pre_snapshot_impl = None - self.post_snapshot_impl = None - self.start_staging_impl = None - self.stop_staging_impl = None - self.status_impl = None - self.worker_impl = None - self.mount_specification_impl = None - - def pre_snapshot(self): - def pre_snapshot_decorator(pre_snapshot_impl): - if self.pre_snapshot_impl: - raise OperationAlreadyDefinedError(Op.LINKED_PRE_SNAPSHOT) - self.pre_snapshot_impl = pre_snapshot_impl - return pre_snapshot_impl - return pre_snapshot_decorator - - def post_snapshot(self): - def post_snapshot_decorator(post_snapshot_impl): - if self.post_snapshot_impl: - raise OperationAlreadyDefinedError(Op.LINKED_POST_SNAPSHOT) - self.post_snapshot_impl = post_snapshot_impl - return post_snapshot_impl - return post_snapshot_decorator - - def start_staging(self): - def start_staging_decorator(start_staging_impl): - if self.start_staging_impl: - raise OperationAlreadyDefinedError(Op.LINKED_START_STAGING) - self.start_staging_impl = start_staging_impl - return start_staging_impl - return start_staging_decorator - - def stop_staging(self): - def stop_staging_decorator(stop_staging_impl): - if self.stop_staging_impl: - raise OperationAlreadyDefinedError(Op.LINKED_STOP_STAGING) - self.stop_staging_impl = stop_staging_impl - return stop_staging_impl - return stop_staging_decorator - - def status(self): - def status_decorator(status_impl): - if self.status_impl: - raise OperationAlreadyDefinedError(Op.LINKED_STATUS) - self.status_impl = status_impl - return status_impl - return status_decorator - - def worker(self): - def worker_decorator(worker_impl): - if self.worker_impl: - raise OperationAlreadyDefinedError(Op.LINKED_WORKER) - self.worker_impl = worker_impl - return worker_impl - return worker_decorator - - def mount_specification(self): - def mount_specification_decorator(mount_specification_impl): - if self.mount_specification_impl: - raise OperationAlreadyDefinedError( - Op.LINKED_MOUNT_SPEC) - self.mount_specification_impl = mount_specification_impl - return mount_specification_impl - return mount_specification_decorator - - def _internal_direct_pre_snapshot(self, request): - """Pre Snapshot Wrapper for direct plugins. - - Executed before creating a snapshot. This plugin - operation is run prior to creating a snapshot for a direct source. - - Run pre-snapshot operation for a direct source. - - Args: - request (DirectPreSnapshotRequest): Pre Snapshot arguments. - - Returns: - DirectPreSnapshotResponse: A response containing - DirectPreSnapshotResult if successful or PluginErrorResult in case - of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.pre_snapshot() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.pre_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) - - direct_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.direct_source.linked_source.parameters.json)) - direct_source = DirectSource( - guid=request.direct_source.linked_source.guid, - connection=RemoteConnection.from_proto(request.direct_source.connection), - parameters=direct_source_definition) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.pre_snapshot_impl( - direct_source=direct_source, - repository=repository, - source_config=source_config) - - direct_pre_snapshot_response = platform_pb2.DirectPreSnapshotResponse() - direct_pre_snapshot_response.return_value.CopyFrom( - platform_pb2.DirectPreSnapshotResult()) - - return direct_pre_snapshot_response - - def _internal_direct_post_snapshot(self, request): - """Post Snapshot Wrapper for direct plugins. - - Executed after creating a snapshot. This plugin - operation is run after creating a snapshot for a direct source. - - Run post-snapshot operation for a direct source. - - Args: - request (DirectPostSnapshotRequest): Post Snapshot arguments. - - Returns: - DirectPostSnapshotResponse: A response containing the return value - - DirectPostSnapshotResult which has the snapshot metadata on success. - In case of errors, response object will contain PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import SnapshotDefinition - - def to_protobuf(snapshot): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(snapshot.to_dict()) - snapshot_protobuf = common_pb2.Snapshot() - snapshot_protobuf.parameters.CopyFrom(parameters) - return snapshot_protobuf - - if not self.post_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) - - direct_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.direct_source.linked_source.parameters.json)) - direct_source = DirectSource( - guid=request.direct_source.linked_source.guid, - connection=RemoteConnection.from_proto(request.direct_source.connection), - parameters=direct_source_definition) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - snapshot = self.post_snapshot_impl( - direct_source=direct_source, - repository=repository, - source_config=source_config) - - # Validate that this is a SnapshotDefinition object - if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) - - direct_post_snapshot_response = ( - platform_pb2.DirectPostSnapshotResponse()) - direct_post_snapshot_response.return_value.snapshot.CopyFrom( - to_protobuf(snapshot)) - - return direct_post_snapshot_response - - def _internal_staged_pre_snapshot(self, request): - """Pre Snapshot Wrapper for staged plugins. - - Executed before creating a snapshot. This plugin - operation is run prior to creating a snapshot for a staged source. - - Run pre-snapshot operation for a staged source. - - Args: - request (StagedPreSnapshotRequest): Pre Snapshot arguments. - - Returns: - StagedPreSnapshotResponse: A response containing - StagedPreSnapshotResult if successful or PluginErrorResult - in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import SnapshotParametersDefinition - - # - # While linked.pre_snapshot() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.pre_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) - - linked_source = request.staged_source.linked_source - staged_source_definition = (LinkedSourceDefinition.from_dict( - json.loads(linked_source.parameters.json))) - staged_mount = request.staged_source.staged_mount - mount = Mount( - remote_environment=RemoteEnvironment.from_proto(staged_mount.remote_environment), - mount_path=staged_mount.mount_path, - shared_path=staged_mount.shared_path) - staged_source = StagedSource( - guid=linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - snapshot_parameters = SnapshotParametersDefinition.from_dict( - json.loads(request.snapshot_parameters.parameters.json)) - - self.pre_snapshot_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config, - snapshot_parameters=snapshot_parameters) - - response = platform_pb2.StagedPreSnapshotResponse() - response.return_value.CopyFrom(platform_pb2.StagedPreSnapshotResult()) - - return response - - def _internal_staged_post_snapshot(self, request): - """Post Snapshot Wrapper for staged plugins. - - Executed after creating a snapshot. This plugin - operation is run after creating a snapshot for a staged source. - - Run post-snapshot operation for a staged source. - - Args: - request (StagedPostSnapshotRequest): Post Snapshot arguments. - - Returns: - StagedPostSnapshotResponse: A response containing the return value - StagedPostSnapshotResult which has the snapshot metadata on - success. In case of errors, response object will contain - PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SnapshotParametersDefinition - - def to_protobuf(snapshot): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(snapshot.to_dict()) - snapshot_protobuf = common_pb2.Snapshot() - snapshot_protobuf.parameters.CopyFrom(parameters) - return snapshot_protobuf - - if not self.post_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment= - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - snapshot_parameters = SnapshotParametersDefinition.from_dict( - json.loads(request.snapshot_parameters.parameters.json)) - - snapshot = self.post_snapshot_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config, - snapshot_parameters=snapshot_parameters) - - # Validate that this is a SnapshotDefinition object - if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) - - response = platform_pb2.StagedPostSnapshotResponse() - response.return_value.snapshot.CopyFrom(to_protobuf(snapshot)) - - return response - - def _internal_start_staging(self, request): - """Start staging Wrapper for staged plugins. - - Executed when enabling the staging source. This plugin - operation is run to start the staging source as part - of the enable operation. - - Run start operation for a staged source. - - Args: - request (StartStagingRequest): Start arguments. - - Returns: - StartStagingResponse: A response containing StartStagingResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.start_staging() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.start_staging_impl: - raise OperationNotDefinedError(Op.LINKED_START_STAGING) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.start_staging_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - start_staging_response = platform_pb2.StartStagingResponse() - start_staging_response.return_value.CopyFrom( - platform_pb2.StartStagingResult()) - - return start_staging_response - - def _internal_stop_staging(self, request): - """Stop staging Wrapper for staged plugins. - - Executed when disabling the staging source. This plugin - operation is run to stop the staging source as part - of the disable operation. - - Run stop operation for a staged source. - - Args: - request (StopStagingRequest): Stop arguments. - - Returns: - StopStagingResponse: A response containing StopStagingResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.stop_staging() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.stop_staging_impl: - raise OperationNotDefinedError(Op.LINKED_STOP_STAGING) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.stop_staging_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - stop_staging_response = platform_pb2.StopStagingResponse() - stop_staging_response.return_value.CopyFrom( - platform_pb2.StopStagingResult()) - - return stop_staging_response - - def _internal_status(self, request): - """Staged Status Wrapper for staged plugins. - - Executed as part of several operations to get the status - of a staged source - active or inactive. - - Run status operation for a staged source. - - Args: - request (StagedStatusRequest): Post Snapshot arguments. - - Returns: - StagedStatusResponse: A response containing the return value - - StagedStatusResult which has active or inactive status. In - case of errors, response object will contain PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.status() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.status_impl: - raise OperationNotDefinedError(Op.LINKED_STATUS) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - status = self.status_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - # Validate that this is a Status object. - if not isinstance(status, Status): - raise IncorrectReturnTypeError( - Op.LINKED_STATUS, type(status), Status) - - staged_status_response = platform_pb2.StagedStatusResponse() - staged_status_response.return_value.status = status.value - - return staged_status_response - - def _internal_worker(self, request): - """Staged Worker Wrapper for staged plugins. - - Executed as part of validated sync. This plugin - operation is run to sync staging source as part - of the validated sync operation. - - Run worker operation for a staged source. - - Args: - request (StagedWorkerRequest): Worker arguments. - - Returns: - StagedWorkerResponse: A response containing StagedWorkerResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.worker() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.worker_impl: - raise OperationNotDefinedError(Op.LINKED_WORKER) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.worker_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - staged_worker_response = platform_pb2.StagedWorkerResponse() - staged_worker_response.return_value.CopyFrom( - platform_pb2.StagedWorkerResult()) - - return staged_worker_response - - def _internal_mount_specification(self, request): - """Staged Mount/Ownership Spec Wrapper for staged plugins. - - Executed before creating a snapshot during sync or before - enable/disable. This plugin operation is run before mounting datasets - on staging to set the mount path and/or ownership. - - Run mount/ownership spec operation for a staged source. - - Args: - request (StagedMountSpecRequest): Mount Spec arguments. - - Returns: - StagedMountSpecResponse: A response containing the return value - - StagedMountSpecResult which has the mount/ownership metadata on - success. In case of errors, response object will contain - PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - - def to_protobuf_single_mount(single_mount): - if single_mount.shared_path: - raise PluginRuntimeError( - 'Shared path is not supported for linked sources.') - - single_mount_protobuf = common_pb2.SingleEntireMount() - single_mount_protobuf.mount_path = single_mount.mount_path - single_mount_protobuf.remote_environment.CopyFrom( - single_mount.remote_environment.to_proto()) - return single_mount_protobuf - - def to_protobuf_ownership_spec(ownership_spec): - ownership_spec_protobuf = common_pb2.OwnershipSpec() - ownership_spec_protobuf.uid = ownership_spec.uid - ownership_spec_protobuf.gid = ownership_spec.gid - return ownership_spec_protobuf - - if not self.mount_specification_impl: - raise OperationNotDefinedError(Op.LINKED_MOUNT_SPEC) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - mount_spec = self.mount_specification_impl( - staged_source=staged_source, - repository=repository) - - # Validate that this is a MountSpecification object. - if not isinstance(mount_spec, MountSpecification): - raise IncorrectReturnTypeError( - Op.LINKED_MOUNT_SPEC, - type(mount_spec), - MountSpecification) - - # Only one mount is supported for linked sources. - mount_len = len(mount_spec.mounts) - if mount_len != 1: - raise PluginRuntimeError( - 'Exactly one mount must be provided for staging sources.' - ' Found {}'.format(mount_len)) - - staged_mount = to_protobuf_single_mount(mount_spec.mounts[0]) - - staged_mount_spec_response = platform_pb2.StagedMountSpecResponse() - staged_mount_spec_response.return_value.staged_mount.CopyFrom( - staged_mount) - - # Ownership spec is optional for linked sources. - if mount_spec.ownership_specification: - ownership_spec = to_protobuf_ownership_spec( - mount_spec.ownership_specification) - staged_mount_spec_response.return_value.ownership_spec.CopyFrom( - ownership_spec) - - return staged_mount_spec_response - - -class VirtualOperations(object): - - def __init__(self): - self.configure_impl = None - self.unconfigure_impl = None - self.reconfigure_impl = None - self.start_impl = None - self.stop_impl = None - self.pre_snapshot_impl = None - self.post_snapshot_impl = None - self.status_impl = None - self.initialize_impl = None - self.mount_specification_impl = None - - def configure(self): - def configure_decorator(configure_impl): - if self.configure_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_CONFIGURE) - self.configure_impl = configure_impl - return configure_impl - return configure_decorator - - def unconfigure(self): - def unconfigure_decorator(unconfigure_impl): - if self.unconfigure_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_UNCONFIGURE) - self.unconfigure_impl = unconfigure_impl - return unconfigure_impl - return unconfigure_decorator - - def reconfigure(self): - def reconfigure_decorator(reconfigure_impl): - if self.reconfigure_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_RECONFIGURE) - self.reconfigure_impl = reconfigure_impl - return reconfigure_impl - return reconfigure_decorator - - def start(self): - def start_decorator(start_impl): - if self.start_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_START) - self.start_impl = start_impl - return start_impl - return start_decorator - - def stop(self): - def stop_decorator(stop_impl): - if self.stop_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_STOP) - self.stop_impl = stop_impl - return stop_impl - return stop_decorator - - def pre_snapshot(self): - def pre_snapshot_decorator(pre_snapshot_impl): - if self.pre_snapshot_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) - self.pre_snapshot_impl = pre_snapshot_impl - return pre_snapshot_impl - return pre_snapshot_decorator - - def post_snapshot(self): - def post_snapshot_decorator(post_snapshot_impl): - if self.post_snapshot_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_POST_SNAPSHOT) - self.post_snapshot_impl = post_snapshot_impl - return post_snapshot_impl - return post_snapshot_decorator - - def status(self): - def status_decorator(status_impl): - if self.status_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_STATUS) - self.status_impl = status_impl - return status_impl - return status_decorator - - def initialize(self): - def initialize_decorator(initialize_impl): - if self.initialize_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_INITIALIZE) - self.initialize_impl = initialize_impl - return initialize_impl - return initialize_decorator - - def mount_specification(self): - def mount_specification_decorator(mount_specification_impl): - if self.mount_specification_impl: - raise OperationAlreadyDefinedError( - Op.VIRTUAL_MOUNT_SPEC) - self.mount_specification_impl = mount_specification_impl - return mount_specification_impl - return mount_specification_decorator - - @staticmethod - def _from_protobuf_single_subset_mount(single_subset_mount): - return Mount( - remote_environment=RemoteEnvironment.from_proto(single_subset_mount.remote_environment), - mount_path=single_subset_mount.mount_path, - shared_path=single_subset_mount.shared_path) - - def _internal_configure(self, request): - """Configure operation wrapper. - - Executed just after cloning the captured data and mounting it to a - target environment. Specifically, this plugin operation is run during - provision and refresh, prior to taking the initial snapshot of the - clone. This plugin operation is run before the user-customizable - Configure Clone and Before Refresh operations are run. It must return - a sourceConfig object that represents the new dataset. - - Configure the data to be usable on the target environment. For database - data files, this may mean recovering from a crash consistent format or - backup. For application files, this may mean reconfiguring XML files or - rewriting hostnames and symlinks. - - Args: - request (ConfigureRequest): Configure operation arguments. - - Returns: - ConfigureResponse: A response containing the return value of the - configure operation, as a ConfigureResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SourceConfigDefinition - - if not self.configure_impl: - raise OperationNotDefinedError(Op.VIRTUAL_CONFIGURE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - snapshot = SnapshotDefinition.from_dict( - json.loads(request.snapshot.parameters.json)) - - config = self.configure_impl( - virtual_source=virtual_source, - repository=repository, - snapshot=snapshot) - - # Validate that this is a SourceConfigDefinition object. - if not isinstance(config, SourceConfigDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_CONFIGURE, type(config), SourceConfigDefinition) - - configure_response = platform_pb2.ConfigureResponse() - configure_response.return_value.source_config.parameters.json = ( - json.dumps(config.to_dict())) - return configure_response - - def _internal_unconfigure(self, request): - """Unconfigure operation wrapper. - - Executed when disabling or deleting an existing virtual source which - has already been mounted to a target environment. This plugin operation - is run before unmounting the virtual source from the target - environment. - - Args: - request (UnconfigureRequest): Unconfigure operation arguments. - - Returns: - UnconfigureResponse: A response containing UnconfigureResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.unconfigure() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.unconfigure_impl: - raise OperationNotDefinedError(Op.VIRTUAL_UNCONFIGURE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.unconfigure_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - unconfigure_response = platform_pb2.UnconfigureResponse() - unconfigure_response.return_value.CopyFrom( - platform_pb2.UnconfigureResult()) - return unconfigure_response - - def _internal_reconfigure(self, request): - """Reconfigure operation wrapper. - - Executed while attaching a VDB during a virtual source enable job and - returns a virtual source config. - - Args: - request (ReconfigureRequest): Reconfigure operation arguments. - - Returns: - ReconfigureResponse: A response containing the return value of the - reconfigure operation, as a ReconfigureResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import RepositoryDefinition - - if not self.reconfigure_impl: - raise OperationNotDefinedError(Op.VIRTUAL_RECONFIGURE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - snapshot = SnapshotDefinition.from_dict( - json.loads(request.snapshot.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - config = self.reconfigure_impl( - snapshot=snapshot, - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - # Validate that this is a SourceConfigDefinition object. - if not isinstance(config, SourceConfigDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_RECONFIGURE, type(config), SourceConfigDefinition) - - reconfigure_response = platform_pb2.ReconfigureResponse() - reconfigure_response.return_value.source_config.parameters.json = ( - json.dumps(config.to_dict())) - return reconfigure_response - - def _internal_start(self, request): - """Start operation wrapper. - - Executed after attaching a VDB during a virtual source enable job to - start the database. - - Args: - request (StartRequest): Start operation arguments. - - Returns: - StartResponse: A response containing StartResult if successful or - PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.start() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.start_impl: - raise OperationNotDefinedError(Op.VIRTUAL_START) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.start_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - start_response = platform_pb2.StartResponse() - start_response.return_value.CopyFrom(platform_pb2.StartResult()) - return start_response - - def _internal_stop(self, request): - """Stop operation wrapper. - - Executed before unmounting a VDB during a virtual source stop job. - - Args: - request (StopRequest): Stop operation arguments. - - Returns: - StopResponse: A response containing StopResult if successful or - PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.stop() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.stop_impl: - raise OperationNotDefinedError(Op.VIRTUAL_STOP) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.stop_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - stop_response = platform_pb2.StopResponse() - stop_response.return_value.CopyFrom(platform_pb2.StopResult()) - return stop_response - - def _internal_pre_snapshot(self, request): - """Virtual pre snapshot operation wrapper. - - Executed before creating a ZFS snapshot. This plugin operation is run - prior to creating a snapshot for a virtual source. - - Run pre-snapshot operation for a virtual source. - - Args: - virtual_pre_snapshot_request (VirtualPreSnapshotRequest): - Virtual pre snapshot operation arguments. - - Returns: - VirtualPreSnapshotResponse: A response containing - VirtualPreSnapshotResult if successful or PluginErrorResult in case - of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.pre_snapshot() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.pre_snapshot_impl: - raise OperationNotDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.pre_snapshot_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - virtual_pre_snapshot_response = ( - platform_pb2.VirtualPreSnapshotResponse()) - virtual_pre_snapshot_response.return_value.CopyFrom( - platform_pb2.VirtualPreSnapshotResult()) - return virtual_pre_snapshot_response - - def _internal_post_snapshot(self, request): - """Virtual post snapshot operation wrapper. - - Executed after creating a ZFS snapshot. This plugin operation is run - after creating a snapshot for a virtual source. - - Run post-snapshot operation for a virtual source. - - Args: - request (VirtualPostSnapshotRequest): Virtual post snapshot operation - arguments. - - Returns: - VirtualPostSnapshotResponse: A response containing the return value - of the virtual post snapshot operation, as a - VirtualPostSnapshotResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SourceConfigDefinition - - def to_protobuf(snapshot): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(snapshot.to_dict()) - snapshot_protobuf = common_pb2.Snapshot() - snapshot_protobuf.parameters.CopyFrom(parameters) - return snapshot_protobuf - - if not self.post_snapshot_impl: - raise OperationNotDefinedError(Op.VIRTUAL_POST_SNAPSHOT) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - snapshot = self.post_snapshot_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - # Validate that this is a SnapshotDefinition object - if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) - - virtual_post_snapshot_response = ( - platform_pb2.VirtualPostSnapshotResponse()) - virtual_post_snapshot_response.return_value.snapshot.CopyFrom( - to_protobuf(snapshot)) - return virtual_post_snapshot_response - - def _internal_status(self, request): - """Virtual status operation wrapper. - - Executed to get the status of a virtual source - active or inactive. - - Run status operation for a virtual source. - - Args: - request (VirtualStatusRequest): - Virtual status operation arguments. - - Returns: - VirtualStatusResponse: A response containing VirtualStatusResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.status() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.status_impl: - raise OperationNotDefinedError(Op.VIRTUAL_STATUS) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - virtual_status = self.status_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - # Validate that this is a Status object. - if not isinstance(virtual_status, Status): - raise IncorrectReturnTypeError( - Op.VIRTUAL_STATUS, type(virtual_status), Status) - - virtual_status_response = platform_pb2.VirtualStatusResponse() - virtual_status_response.return_value.status = virtual_status.value - return virtual_status_response - - def _internal_initialize(self, request): - """Initialize operation wrapper. - - Executed during VDB creation after mounting onto the target - environment. - - Run initialize operation for an empty virtual source. - - Args: - request (InitializeRequest): Initialize operation arguments. - - Returns: - InitializeResponse: A response containing InitializeResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - if not self.initialize_impl: - raise OperationNotDefinedError(Op.VIRTUAL_INITIALIZE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.initialize_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - initialize_response = platform_pb2.InitializeResponse() - initialize_response.return_value.CopyFrom( - platform_pb2.InitializeResult()) - return initialize_response - - def _internal_mount_specification(self, request): - """Virtual mount spec operation wrapper. - - Executed to fetch the ownership spec before mounting onto a target - environment. - - Run mount spec operation for a virtual source. - - Args: - virtual_mount_spec_request (VirtualMountSpecRequest): - Virtual mount spec operation arguments. - - Returns: - VirtualMountSpecResponse: A response containing the return value of - the virtual mount spec operation, as a VirtualMountSpecResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - - def to_protobuf_single_mount(single_mount): - single_mount_protobuf = common_pb2.SingleSubsetMount() - - environment_protobuf = single_mount.remote_environment.to_proto() - - single_mount_protobuf.remote_environment.CopyFrom( - environment_protobuf) - single_mount_protobuf.mount_path = single_mount.mount_path - - if single_mount.shared_path: - single_mount_protobuf.shared_path = single_mount.shared_path - - return single_mount_protobuf - - def to_protobuf_ownership_spec(ownership_spec): - ownership_spec_protobuf = common_pb2.OwnershipSpec() - ownership_spec_protobuf.uid = ownership_spec.uid - ownership_spec_protobuf.gid = ownership_spec.gid - return ownership_spec_protobuf - - if not self.mount_specification_impl: - raise OperationNotDefinedError(Op.VIRTUAL_MOUNT_SPEC) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - virtual_mount_spec = self.mount_specification_impl( - repository=repository, - virtual_source=virtual_source) - - # Validate that this is a MountSpecification object - if not isinstance(virtual_mount_spec, MountSpecification): - raise IncorrectReturnTypeError( - Op.VIRTUAL_MOUNT_SPEC, - type(virtual_mount_spec), - MountSpecification) - - virtual_mount_spec_response = platform_pb2.VirtualMountSpecResponse() - - if virtual_mount_spec.ownership_specification: - ownership_spec = to_protobuf_ownership_spec( - virtual_mount_spec.ownership_specification) - virtual_mount_spec_response.return_value.ownership_spec.CopyFrom( - ownership_spec) - - mounts_list = [to_protobuf_single_mount(m) - for m in virtual_mount_spec.mounts] - virtual_mount_spec_response.return_value.mounts.extend(mounts_list) - return virtual_mount_spec_response - - class Plugin(object): def __init__(self): self.__discovery = DiscoveryOperations() self.__linked = LinkedOperations() self.__virtual = VirtualOperations() + self.__upgrade = UpgradeOperations() @property def discovery(self): @@ -1575,3 +107,7 @@ def linked(self): @property def virtual(self): return self.__virtual + + @property + def upgrade(self): + return self.__upgrade diff --git a/platform/src/main/python/dlpx/virtualization/platform/_plugin_classes.py b/platform/src/main/python/dlpx/virtualization/platform/_plugin_classes.py index e7cdc5b4..5768acde 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_plugin_classes.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_plugin_classes.py @@ -1,10 +1,15 @@ # # Copyright (c) 2019 by Delphix. All rights reserved. # +import re import enum -from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment +import six +from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment, \ + RemoteHost from dlpx.virtualization.common.exceptions import IncorrectTypeError +from dlpx.virtualization.platform.exceptions import \ + IncorrectReferenceFormatError """Classes used for Plugin Operations @@ -157,23 +162,60 @@ class Status(enum.Enum): class Mount(object): + + def __init__(self, remote_environment, mount_path, shared_path=None): - if not isinstance(remote_environment, RemoteEnvironment): + + """A Mount object asks for multiple Python objects (RemoteEnvironment, + RemoteHost), which have parameters (such as name, binary_path and + scratch_path) that require the plugin writer to provide values for. + Plugin writers will not know the values for these, nor are these values + ever used by engine code, except for RemoteHost's reference parameter. + This check allows plugin writers to provide either a RemoteEnvironment + object with fully populated parameters, or a reference string, leaving + the other parameters to be populated with dummy values. This saves the + plugin writer from attempting to provide parameter values that they + won't have access to.""" + def __is_correct_reference_format(reference): + unix_format = re.compile("^UNIX_HOST_ENVIRONMENT-\d+$") + win_format = re.compile("^WINDOWS_HOST_ENVIRONMENT-\d+$") + return bool(unix_format.match(reference)) or \ + bool(win_format.match(reference)) + + + def __make_remote_environment_from_reference(reference): + dummy_host = RemoteHost("dummy host", "dummy reference", "dummy binary path", "dummy scratch path") + if not isinstance(remote_environment, RemoteEnvironment) and not \ + __is_correct_reference_format(remote_environment): + raise RuntimeError("Reference '{}' is not a valid host environment reference.".format(reference)) + return RemoteEnvironment("dummy name", reference, dummy_host) + + # if reference is not a RemoteEnvironment nor a string + if not isinstance(remote_environment, RemoteEnvironment) and not \ + isinstance(remote_environment, six.string_types): raise IncorrectTypeError( Mount, 'remote_environment', type(remote_environment), - RemoteEnvironment) - self._remote_environment = remote_environment - - if not isinstance(mount_path, basestring): + [RemoteEnvironment, six.string_types[0]]) + # if reference is a string, but incorrectly formatted + if isinstance(remote_environment, six.string_types) and not __is_correct_reference_format(remote_environment): + raise IncorrectReferenceFormatError(remote_environment) + + # If the plugin has provided us with just a valid reference string, + # convert to a real Python object + if isinstance(remote_environment, six.string_types): + self._remote_environment = __make_remote_environment_from_reference(remote_environment) + else: + self._remote_environment = remote_environment + if not isinstance(mount_path, six.string_types): raise IncorrectTypeError( - Mount, 'mount_path', type(mount_path), basestring) + Mount, 'mount_path', type(mount_path), six.string_types[0]) self._mount_path = mount_path - if shared_path and not isinstance(shared_path, basestring): + if shared_path and not isinstance(shared_path, six.string_types[0]): raise IncorrectTypeError( - Mount, 'shared_path', type(shared_path), basestring, False) + Mount, 'shared_path', type(shared_path), six.string_types[0], False) self._shared_path = shared_path @property diff --git a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py new file mode 100644 index 00000000..db13d731 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py @@ -0,0 +1,179 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""UpgradeOperations for the Virtualization Platform + +There are 5 different objects that we can upgrade. All migration ids must be +unique. To upgrade a specific schema, the plugin author would use that specific +decorator specifying the migration id. We save the implementations of each of +the upgrade functions in a dict for the specific schema. For each new upgrade +operation of the same schema, the key will be the migration id, and the value +will be the function that was implemented. +""" +import json +import logging +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.platform import MigrationIdSet +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectUpgradeObjectTypeError) + +logger = logging.getLogger(__name__) + +__all__ = ['UpgradeOperations'] + + +class UpgradeOperations(object): + + def __init__(self): + self.__migration_id_set = MigrationIdSet() + + self.repository_id_to_impl = {} + self.source_config_id_to_impl = {} + self.linked_source_id_to_impl = {} + self.virtual_source_id_to_impl = {} + self.snapshot_id_to_impl = {} + + def repository(self, migration_id): + def repository_decorator(repository_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, repository_impl.__name__) + self.repository_id_to_impl[std_mig_id] = v.check_function( + repository_impl, Op.UPGRADE_REPOSITORY) + return repository_impl + return repository_decorator + + def source_config(self, migration_id): + def source_config_decorator(source_config_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, source_config_impl.__name__) + self.source_config_id_to_impl[std_mig_id] = v.check_function( + source_config_impl, Op.UPGRADE_SOURCE_CONFIG) + return source_config_impl + return source_config_decorator + + def linked_source(self, migration_id): + def linked_source_decorator(linked_source_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, linked_source_impl.__name__) + self.linked_source_id_to_impl[std_mig_id] = v.check_function( + linked_source_impl, Op.UPGRADE_LINKED_SOURCE) + return linked_source_impl + return linked_source_decorator + + def virtual_source(self, migration_id): + def virtual_source_decorator(virtual_source_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, virtual_source_impl.__name__) + self.virtual_source_id_to_impl[std_mig_id] = v.check_function( + virtual_source_impl, Op.UPGRADE_VIRTUAL_SOURCE) + return virtual_source_impl + return virtual_source_decorator + + def snapshot(self, migration_id): + def snapshot_decorator(snapshot_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, snapshot_impl.__name__) + self.snapshot_id_to_impl[std_mig_id] = v.check_function( + snapshot_impl, Op.UPGRADE_SNAPSHOT) + return snapshot_impl + return snapshot_decorator + + @property + def migration_id_list(self): + return self.__migration_id_set.get_sorted_ids() + + @staticmethod + def _success_upgrade_response(upgraded_dict): + upgrade_result = platform_pb2.UpgradeResult( + post_upgrade_parameters=upgraded_dict) + upgrade_response = platform_pb2.UpgradeResponse( + return_value=upgrade_result) + return upgrade_response + + def __process_upgrade_request(self, request, id_to_impl): + """Iterate through all objects in the pre_upgrade_parameters map, + invoke all available migrations on each object and its metadata, + and return a map containing the updated metadata for each object. + """ + post_upgrade_parameters = {} + for (object_ref, metadata) in request.pre_upgrade_parameters.items(): + # Load the object metadata into a dictionary + current_metadata = json.loads(metadata) + # + # Loop through all migrations that were passed into the upgrade + # request. Protobuf will preserve the ordering of repeated + # elements, so we can rely on the backend to sort the migration + # ids before packing them into the request. + # + for migration_id in request.migration_ids: + # Only try to execute the function if the id exists in the map. + if migration_id in id_to_impl: + current_metadata = id_to_impl[migration_id](current_metadata) + post_upgrade_parameters[object_ref] = json.dumps(current_metadata) + + return self._success_upgrade_response(post_upgrade_parameters) + + def _internal_repository(self, request): + """Upgrade repositories for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.REPOSITORY: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.REPOSITORY) + + logger.debug('Upgrade repositories [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.repository_id_to_impl) + + def _internal_source_config(self, request): + """Upgrade source configs for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.SOURCECONFIG: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.SOURCECONFIG) + + logger.debug('Upgrade source configs [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.source_config_id_to_impl) + + def _internal_linked_source(self, request): + """Upgrade linked source for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.LINKEDSOURCE: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.LINKEDSOURCE) + + logger.debug('Upgrade linked sources [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.linked_source_id_to_impl) + + def _internal_virtual_source(self, request): + """Upgrade virtual sources for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.VIRTUALSOURCE: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.VIRTUALSOURCE) + + logger.debug('Upgrade virtual sources [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.virtual_source_id_to_impl) + + def _internal_snapshot(self, request): + """Upgrade snapshots for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.SNAPSHOT: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.SNAPSHOT) + + logger.debug('Upgrade snapshots [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.snapshot_id_to_impl) diff --git a/platform/src/main/python/dlpx/virtualization/platform/_virtual.py b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py new file mode 100644 index 00000000..76976c2a --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py @@ -0,0 +1,704 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""VirtualOperations for the Virtualization Platform + +""" +import json +from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment +from dlpx.virtualization.api import common_pb2 +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.platform import VirtualSource +from dlpx.virtualization.platform import Status +from dlpx.virtualization.platform import Mount +from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectReturnTypeError, OperationNotDefinedError, + OperationAlreadyDefinedError) + + +__all__ = ['VirtualOperations'] + + +class VirtualOperations(object): + + def __init__(self): + self.configure_impl = None + self.unconfigure_impl = None + self.reconfigure_impl = None + self.start_impl = None + self.stop_impl = None + self.pre_snapshot_impl = None + self.post_snapshot_impl = None + self.status_impl = None + self.initialize_impl = None + self.mount_specification_impl = None + + def configure(self): + def configure_decorator(configure_impl): + if self.configure_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_CONFIGURE) + self.configure_impl = v.check_function(configure_impl, + Op.VIRTUAL_CONFIGURE) + return configure_impl + return configure_decorator + + def unconfigure(self): + def unconfigure_decorator(unconfigure_impl): + if self.unconfigure_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_UNCONFIGURE) + self.unconfigure_impl = v.check_function(unconfigure_impl, + Op.VIRTUAL_UNCONFIGURE) + return unconfigure_impl + return unconfigure_decorator + + def reconfigure(self): + def reconfigure_decorator(reconfigure_impl): + if self.reconfigure_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_RECONFIGURE) + self.reconfigure_impl = v.check_function(reconfigure_impl, + Op.VIRTUAL_RECONFIGURE) + return reconfigure_impl + return reconfigure_decorator + + def start(self): + def start_decorator(start_impl): + if self.start_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_START) + self.start_impl = v.check_function(start_impl, Op.VIRTUAL_START) + return start_impl + return start_decorator + + def stop(self): + def stop_decorator(stop_impl): + if self.stop_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_STOP) + self.stop_impl = v.check_function(stop_impl, Op.VIRTUAL_STOP) + return stop_impl + return stop_decorator + + def pre_snapshot(self): + def pre_snapshot_decorator(pre_snapshot_impl): + if self.pre_snapshot_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) + self.pre_snapshot_impl = v.check_function(pre_snapshot_impl, + Op.VIRTUAL_PRE_SNAPSHOT) + return pre_snapshot_impl + return pre_snapshot_decorator + + def post_snapshot(self): + def post_snapshot_decorator(post_snapshot_impl): + if self.post_snapshot_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_POST_SNAPSHOT) + self.post_snapshot_impl = v.check_function( + post_snapshot_impl, Op.VIRTUAL_POST_SNAPSHOT) + return post_snapshot_impl + return post_snapshot_decorator + + def status(self): + def status_decorator(status_impl): + if self.status_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_STATUS) + self.status_impl = v.check_function(status_impl, Op.VIRTUAL_STATUS) + return status_impl + return status_decorator + + def initialize(self): + def initialize_decorator(initialize_impl): + if self.initialize_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_INITIALIZE) + self.initialize_impl = v.check_function(initialize_impl, + Op.VIRTUAL_INITIALIZE) + return initialize_impl + return initialize_decorator + + def mount_specification(self): + def mount_specification_decorator(mount_specification_impl): + if self.mount_specification_impl: + raise OperationAlreadyDefinedError( + Op.VIRTUAL_MOUNT_SPEC) + self.mount_specification_impl = v.check_function( + mount_specification_impl, Op.VIRTUAL_MOUNT_SPEC) + return mount_specification_impl + return mount_specification_decorator + + @staticmethod + def _from_protobuf_single_subset_mount(single_subset_mount): + return Mount( + remote_environment=RemoteEnvironment.from_proto(single_subset_mount.remote_environment), + mount_path=single_subset_mount.mount_path, + shared_path=single_subset_mount.shared_path) + + def _internal_configure(self, request): + """Configure operation wrapper. + + Executed just after cloning the captured data and mounting it to a + target environment. Specifically, this plugin operation is run during + provision and refresh, prior to taking the initial snapshot of the + clone. This plugin operation is run before the user-customizable + Configure Clone and Before Refresh operations are run. It must return + a sourceConfig object that represents the new dataset. + + Configure the data to be usable on the target environment. For database + data files, this may mean recovering from a crash consistent format or + backup. For application files, this may mean reconfiguring XML files or + rewriting hostnames and symlinks. + + Args: + request (ConfigureRequest): Configure operation arguments. + + Returns: + ConfigureResponse: A response containing the return value of the + configure operation, as a ConfigureResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SourceConfigDefinition + + if not self.configure_impl: + raise OperationNotDefinedError(Op.VIRTUAL_CONFIGURE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + snapshot = SnapshotDefinition.from_dict( + json.loads(request.snapshot.parameters.json)) + + config = self.configure_impl( + virtual_source=virtual_source, + repository=repository, + snapshot=snapshot) + + # Validate that this is a SourceConfigDefinition object. + if not isinstance(config, SourceConfigDefinition): + raise IncorrectReturnTypeError( + Op.VIRTUAL_CONFIGURE, type(config), SourceConfigDefinition) + + configure_response = platform_pb2.ConfigureResponse() + configure_response.return_value.source_config.parameters.json = ( + json.dumps(config.to_dict())) + return configure_response + + def _internal_unconfigure(self, request): + """Unconfigure operation wrapper. + + Executed when disabling or deleting an existing virtual source which + has already been mounted to a target environment. This plugin operation + is run before unmounting the virtual source from the target + environment. + + Args: + request (UnconfigureRequest): Unconfigure operation arguments. + + Returns: + UnconfigureResponse: A response containing UnconfigureResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.unconfigure() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.unconfigure_impl: + raise OperationNotDefinedError(Op.VIRTUAL_UNCONFIGURE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.unconfigure_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + unconfigure_response = platform_pb2.UnconfigureResponse() + unconfigure_response.return_value.CopyFrom( + platform_pb2.UnconfigureResult()) + return unconfigure_response + + def _internal_reconfigure(self, request): + """Reconfigure operation wrapper. + + Executed while attaching a VDB during a virtual source enable job and + returns a virtual source config. + + Args: + request (ReconfigureRequest): Reconfigure operation arguments. + + Returns: + ReconfigureResponse: A response containing the return value of the + reconfigure operation, as a ReconfigureResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import RepositoryDefinition + + if not self.reconfigure_impl: + raise OperationNotDefinedError(Op.VIRTUAL_RECONFIGURE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + snapshot = SnapshotDefinition.from_dict( + json.loads(request.snapshot.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + config = self.reconfigure_impl( + snapshot=snapshot, + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + # Validate that this is a SourceConfigDefinition object. + if not isinstance(config, SourceConfigDefinition): + raise IncorrectReturnTypeError( + Op.VIRTUAL_RECONFIGURE, type(config), SourceConfigDefinition) + + reconfigure_response = platform_pb2.ReconfigureResponse() + reconfigure_response.return_value.source_config.parameters.json = ( + json.dumps(config.to_dict())) + return reconfigure_response + + def _internal_start(self, request): + """Start operation wrapper. + + Executed after attaching a VDB during a virtual source enable job to + start the database. + + Args: + request (StartRequest): Start operation arguments. + + Returns: + StartResponse: A response containing StartResult if successful or + PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.start() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.start_impl: + raise OperationNotDefinedError(Op.VIRTUAL_START) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.start_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + start_response = platform_pb2.StartResponse() + start_response.return_value.CopyFrom(platform_pb2.StartResult()) + return start_response + + def _internal_stop(self, request): + """Stop operation wrapper. + + Executed before unmounting a VDB during a virtual source stop job. + + Args: + request (StopRequest): Stop operation arguments. + + Returns: + StopResponse: A response containing StopResult if successful or + PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.stop() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.stop_impl: + raise OperationNotDefinedError(Op.VIRTUAL_STOP) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.stop_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + stop_response = platform_pb2.StopResponse() + stop_response.return_value.CopyFrom(platform_pb2.StopResult()) + return stop_response + + def _internal_pre_snapshot(self, request): + """Virtual pre snapshot operation wrapper. + + Executed before creating a ZFS snapshot. This plugin operation is run + prior to creating a snapshot for a virtual source. + + Run pre-snapshot operation for a virtual source. + + Args: + virtual_pre_snapshot_request (VirtualPreSnapshotRequest): + Virtual pre snapshot operation arguments. + + Returns: + VirtualPreSnapshotResponse: A response containing + VirtualPreSnapshotResult if successful or PluginErrorResult in case + of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.pre_snapshot() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.pre_snapshot_impl: + raise OperationNotDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.pre_snapshot_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + virtual_pre_snapshot_response = ( + platform_pb2.VirtualPreSnapshotResponse()) + virtual_pre_snapshot_response.return_value.CopyFrom( + platform_pb2.VirtualPreSnapshotResult()) + return virtual_pre_snapshot_response + + def _internal_post_snapshot(self, request): + """Virtual post snapshot operation wrapper. + + Executed after creating a ZFS snapshot. This plugin operation is run + after creating a snapshot for a virtual source. + + Run post-snapshot operation for a virtual source. + + Args: + request (VirtualPostSnapshotRequest): Virtual post snapshot operation + arguments. + + Returns: + VirtualPostSnapshotResponse: A response containing the return value + of the virtual post snapshot operation, as a + VirtualPostSnapshotResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SourceConfigDefinition + + def to_protobuf(snapshot): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(snapshot.to_dict()) + snapshot_protobuf = common_pb2.Snapshot() + snapshot_protobuf.parameters.CopyFrom(parameters) + return snapshot_protobuf + + if not self.post_snapshot_impl: + raise OperationNotDefinedError(Op.VIRTUAL_POST_SNAPSHOT) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + snapshot = self.post_snapshot_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + # Validate that this is a SnapshotDefinition object + if not isinstance(snapshot, SnapshotDefinition): + raise IncorrectReturnTypeError( + Op.VIRTUAL_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + + virtual_post_snapshot_response = ( + platform_pb2.VirtualPostSnapshotResponse()) + virtual_post_snapshot_response.return_value.snapshot.CopyFrom( + to_protobuf(snapshot)) + return virtual_post_snapshot_response + + def _internal_status(self, request): + """Virtual status operation wrapper. + + Executed to get the status of a virtual source - active or inactive. + + Run status operation for a virtual source. + + Args: + request (VirtualStatusRequest): + Virtual status operation arguments. + + Returns: + VirtualStatusResponse: A response containing VirtualStatusResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.status() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.status_impl: + raise OperationNotDefinedError(Op.VIRTUAL_STATUS) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + virtual_status = self.status_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + # Validate that this is a Status object. + if not isinstance(virtual_status, Status): + raise IncorrectReturnTypeError( + Op.VIRTUAL_STATUS, type(virtual_status), Status) + + virtual_status_response = platform_pb2.VirtualStatusResponse() + virtual_status_response.return_value.status = virtual_status.value + return virtual_status_response + + def _internal_initialize(self, request): + """Initialize operation wrapper. + + Executed during VDB creation after mounting onto the target + environment. + + Run initialize operation for an empty virtual source. + + Args: + request (InitializeRequest): Initialize operation arguments. + + Returns: + InitializeResponse: A response containing InitializeResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + if not self.initialize_impl: + raise OperationNotDefinedError(Op.VIRTUAL_INITIALIZE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.initialize_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + initialize_response = platform_pb2.InitializeResponse() + initialize_response.return_value.CopyFrom( + platform_pb2.InitializeResult()) + return initialize_response + + def _internal_mount_specification(self, request): + """Virtual mount spec operation wrapper. + + Executed to fetch the ownership spec before mounting onto a target + environment. + + Run mount spec operation for a virtual source. + + Args: + virtual_mount_spec_request (VirtualMountSpecRequest): + Virtual mount spec operation arguments. + + Returns: + VirtualMountSpecResponse: A response containing the return value of + the virtual mount spec operation, as a VirtualMountSpecResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + + def to_protobuf_single_mount(single_mount): + single_mount_protobuf = common_pb2.SingleSubsetMount() + + environment_protobuf = single_mount.remote_environment.to_proto() + + single_mount_protobuf.remote_environment.CopyFrom( + environment_protobuf) + single_mount_protobuf.mount_path = single_mount.mount_path + + if single_mount.shared_path: + single_mount_protobuf.shared_path = single_mount.shared_path + + return single_mount_protobuf + + def to_protobuf_ownership_spec(ownership_spec): + ownership_spec_protobuf = common_pb2.OwnershipSpec() + ownership_spec_protobuf.uid = ownership_spec.uid + ownership_spec_protobuf.gid = ownership_spec.gid + return ownership_spec_protobuf + + if not self.mount_specification_impl: + raise OperationNotDefinedError(Op.VIRTUAL_MOUNT_SPEC) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + virtual_mount_spec = self.mount_specification_impl( + repository=repository, + virtual_source=virtual_source) + + # Validate that this is a MountSpecification object + if not isinstance(virtual_mount_spec, MountSpecification): + raise IncorrectReturnTypeError( + Op.VIRTUAL_MOUNT_SPEC, + type(virtual_mount_spec), + MountSpecification) + + virtual_mount_spec_response = platform_pb2.VirtualMountSpecResponse() + + if virtual_mount_spec.ownership_specification: + ownership_spec = to_protobuf_ownership_spec( + virtual_mount_spec.ownership_specification) + virtual_mount_spec_response.return_value.ownership_spec.CopyFrom( + ownership_spec) + + mounts_list = [to_protobuf_single_mount(m) + for m in virtual_mount_spec.mounts] + virtual_mount_spec_response.return_value.mounts.extend(mounts_list) + return virtual_mount_spec_response \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py index a4e27f5a..d800120f 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py +++ b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py @@ -57,6 +57,27 @@ def __init__(self, operation, actual_type, expected_type): super(IncorrectReturnTypeError, self).__init__(message) +class IncorrectUpgradeObjectTypeError(PluginRuntimeError): + """IncorrectUpgradeObjectTypeError gets thrown when an upgrade workflow was + called with the incorrect object type to upgrade. + + Args: + actual type (platform_pb2.UpgradeRequest.Type): type that was passed in + expected_type (platform_pb2.UpgradeRequest.Type): expected type + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + + """ + + def __init__(self, actual_type, expected_type): + message = ( + 'The upgrade operation received objects with {} type but should' + ' have had type {}.'.format(actual_type, expected_type)) + super(IncorrectUpgradeObjectTypeError, self).__init__(message) + + class OperationAlreadyDefinedError(PlatformError): """OperationAlreadyDefinedError gets thrown when the plugin writer tries to define an operation more than ones. @@ -89,3 +110,124 @@ def __init__(self, operation): message = ('An implementation for the {} operation has not been' ' defined.'.format(operation.value)) super(OperationNotDefinedError, self).__init__(message) + + +class MigrationIdIncorrectTypeError(PlatformError): + """MigrationIdIncorrectType gets thrown when the provided migration id is + not a string. + + Args: + migration_id (str): The migration id assigned for this operation + function_name (str): The name of the function that used the + decorator with the same migration id. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, migration_id, function_name): + message = ("The migration id '{}' used in the function '{}' should" + " be a string.".format(migration_id, function_name)) + super(MigrationIdIncorrectTypeError, self).__init__(message) + + +class MigrationIdIncorrectFormatError(PlatformError): + """MigrationIdIncorrectFormat gets thrown when the migration id given is + not in the correct format. It should be one or more positive integers + separated by periods. + + Args: + migration_id (str): The migration id assigned for this operation + function_name (str): The name of the function that used the + decorator with the same migration id. + format (str): The format expected of the migration_id. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, message): + super(MigrationIdIncorrectFormatError, self).__init__(message) + + @classmethod + def from_fields(cls, migration_id, function_name, format): + message = ("The migration id '{}' used in the function '{}' does not" + " follow the correct format '{}'.".format(migration_id, + function_name, + format)) + return cls(message) + + +class MigrationIdAlreadyUsedError(PlatformError): + """MigrationIdAlreadyUsedError gets thrown when the same migration id is + used for the same upgrade operation + + Args: + migration_id (str): The migration id assigned for this operation + function_name (str): The name of the function that used the + decorator with the same migration id. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, migration_id, std_migration_id, function_name): + message = ("The migration id '{}' used in the function '{}' has the" + " same canonical form '{}' as another migration.".format( + migration_id, function_name, std_migration_id)) + super(MigrationIdAlreadyUsedError, self).__init__(message) + +class DecoratorNotFunctionError(PlatformError): + """DecoratorNotFunctionError gets thrown when the decorated variable is + not a function when it should be. + + Args: + object_name (str): The name of the variable that should have been a + decorator_name (str): The decorator that is being incorrectly used. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, object_name, decorator_name): + message = ("The object '{}' decorated by '{}' is" + " not a function.".format(object_name, decorator_name)) + super(DecoratorNotFunctionError, self).__init__(message) + + +class IncorrectReferenceFormatError(PluginRuntimeError): + """There are 2 possible errors that can be thrown with an incorrect + reference. The reference passed in can be a non-string, throwing an + IncorrectTypeError. The second error that can be thrown is + IncorrectReferenceFormatError, which gets thrown when the reference is not + of the format "UNIX_HOST_ENVIRONMENT-#" nor of + "WINDOWS_HOST_ENVIRONMENT-#". + + Args: + reference (str): The incorrectly formatted reference + + Attributes: + message (str): A user-readable message describing the exception. + """ + def __init__(self, reference): + message = ("Reference '{}' is not a correctly formatted host" + " environment reference.".format(reference)) + super(IncorrectReferenceFormatError, self).__init__(message) + +class IncorrectPluginCodeError(PluginRuntimeError): + """ + This gets thrown if the import validations come across invalid plugin + code that causes import to fail, or if the expected plugin entry point is + not found in the plugin code. + Args: + message (str): A user-readable message describing the exception. + + Attributes: + message (str): A user-readable message describing the exception. + """ + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(IncorrectPluginCodeError, self).__init__(message) \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_util.py b/platform/src/main/python/dlpx/virtualization/platform/import_util.py new file mode 100644 index 00000000..a8e8f807 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/import_util.py @@ -0,0 +1,138 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# +import inspect + +from dlpx.virtualization.platform import exceptions + + +_IMPORT_CHECKS = {} +_POST_IMPORT_CHECKS = {} + + +class PluginModule: + """ + Import helper class for the plugin. An instance of this class helps to pass + state of imported module and relevant info to all the validation methods. + """ + def __init__(self, + src_dir, + module, + entry_point, + plugin_type, + module_content, + v_maps, + validate_args=False): + self.__src_dir = src_dir + self.__module = module + self.__entry_point = entry_point + self.__type = plugin_type + self.__module_content = module_content + self.__expected_direct_args_by_op =\ + v_maps['EXPECTED_DIRECT_ARGS_BY_OP'] + self.__expected_staged_args_by_op =\ + v_maps['EXPECTED_STAGED_ARGS_BY_OP'] + self.__expected_upgrade_args = v_maps['EXPECTED_UPGRADE_ARGS'] + self.__validate_args = validate_args + + @property + def src_dir(self): + return self.__src_dir + + @property + def module(self): + return self.__module + + @property + def entry_point(self): + return self.__entry_point + + @property + def plugin_type(self): + return self.__type + + @property + def module_content(self): + return self.__module_content + + @property + def expected_direct_args_by_op(self): + return self.__expected_direct_args_by_op + + @property + def expected_staged_args_by_op(self): + return self.__expected_staged_args_by_op + + @property + def expected_upgrade_args(self): + return self.__expected_upgrade_args + + @property + def validate_args(self): + return self.__validate_args + + +def import_check(ordinal): + """ + This is the import check decorator. Ordinal here signifies the order in + which the checks are executed. + """ + def import_check_decorator(f): + assert inspect.isfunction(f) + assert ordinal not in _IMPORT_CHECKS + + _IMPORT_CHECKS[ordinal] = f + + return f + + return import_check_decorator + + +def post_import_check(ordinal): + """ + This is the post import check decorator. Ordinal here signifies the order + in which the checks are executed. + """ + def post_import_check_decorator(f): + assert inspect.isfunction(f) + assert ordinal not in _POST_IMPORT_CHECKS + + _POST_IMPORT_CHECKS[ordinal] = f + + return f + + return post_import_check_decorator + + +def validate_import(plugin_module): + """ + Runs validations on the module imported and checks if import was fine + and imported content is valid or not. + NOTE: Dependency checks are not handled well. A failure in one validation + should not impact the next one if each validation defines its dependencies + well. For now, any exception from one is considered failure of all + validations. This can be enhanced to define dependencies well. + """ + for key in sorted(_IMPORT_CHECKS.keys()): + try: + _IMPORT_CHECKS[key](plugin_module) + except exceptions.IncorrectPluginCodeError as plugin_err: + return [plugin_err.message] + except exceptions.UserError as user_err: + return [user_err.message] + return [] + + +def validate_post_import(plugin_module): + """ + Runs post import validations on the module content. + """ + warnings = [] + + # + # warnings.extend is used below since each import check returns a list of + # warnings. + # + for key in sorted(_POST_IMPORT_CHECKS.keys()): + warnings.extend(_POST_IMPORT_CHECKS[key](plugin_module)) + return warnings \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_validations.py b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py new file mode 100644 index 00000000..1a7ef3da --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py @@ -0,0 +1,191 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# +import inspect + +from dlpx.virtualization.platform.import_util import (import_check, + post_import_check, + PluginModule) +from dlpx.virtualization.platform import exceptions + + +@import_check(ordinal=1) +def validate_module_content(plugin_module): + # This should never happen and if it does, flag an error. + if plugin_module.module_content is None: + raise exceptions.IncorrectPluginCodeError( + 'Plugin module content is None.') + + +@import_check(ordinal=2) +def validate_entry_point(plugin_module): + # + # Schema validation on plugin config file would have ensured entry is a + # string and should never be none - so raise an error if it does. + # + if plugin_module.entry_point is None: + raise exceptions.IncorrectPluginCodeError( + 'Plugin entry point object is None.') + + if not hasattr(plugin_module.module_content, plugin_module.entry_point): + raise exceptions.UserError( + 'Entry point \'{}:{}\' does not exist. \'{}\' is not a symbol' + ' in module \'{}\'.'.format(plugin_module.module, + plugin_module.entry_point, + plugin_module.entry_point, + plugin_module.module)) + + +@import_check(ordinal=3) +def validate_plugin_object(plugin_module): + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point, + None) + + if plugin_object is None: + raise exceptions.UserError('Plugin object retrieved from the entry' + ' point {} is None'.format + (plugin_module.entry_point)) + + +@post_import_check(ordinal=1) +def validate_named_args(plugin_module): + """ + Does named argument validation based on the plugin type. + """ + warnings = [] + + if plugin_module.validate_args: + + # + # Validated methods args against expected args and return any + # resulting warnings to the caller to process. + # These warnings should be treated as an exception to make + # sure build fails. + # + + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point) + + # Iterate over attributes objects of the Plugin object + for plugin_attrib in plugin_object.__dict__.values(): + # + # For each plugin attribute object, its __dict__.keys will give + # us the name of the plugin implemntation method name. That name + # is useful in looking up named arguments expected and what is + # actually in the plugin code. And plugin_op_type can be, for e.g. + # LinkedOperations, DiscoveryOperations, VirtualOperations + # + plugin_op_type = plugin_attrib.__class__.__name__ + + # UpgradeOperations are validated differently, so ignore. + if plugin_op_type == 'UpgradeOperations': + continue + + for op_name_key, op_name in plugin_attrib.__dict__.items(): + if op_name is None: + continue + actual_args = inspect.getargspec(op_name) + warnings.extend( + _check_args(method_name=op_name.__name__, + expected_args=_lookup_expected_args( + plugin_module, plugin_op_type, + op_name_key), + actual_args=actual_args.args)) + + return warnings + + +@post_import_check(ordinal=2) +def check_upgrade_operations(plugin_module): + """ + Does named argument validation on UpgradeOperations. + """ + warnings = [] + + if plugin_module.validate_args: + + # + # Validated methods args against expected args and return any + # resulting warnings to the caller to process. + # These warnings should be treated as an exception to make + # sure build fails. + # + + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point) + + # Iterate over attributes objects of the Plugin object + for plugin_attrib in plugin_object.__dict__.values(): + # + # For each plugin attribute object, its __dict__.keys will give + # us the name of the plugin implemntation method name. That name + # is useful in looking up named arguments expected and what is + # actually in the plugin code. And plugin_op_type can be, for e.g. + # LinkedOperations, DiscoveryOperations, VirtualOperations + # + plugin_op_type = plugin_attrib.__class__.__name__ + + if plugin_op_type != 'UpgradeOperations': + continue + + warnings.extend(_check_upgrade_args( + plugin_attrib, plugin_module.expected_upgrade_args)) + + return warnings + + +def _check_upgrade_args(upgrade_operations, expected_upgrade_args): + """ + Does named argument validation of all functions in dictionaries by looping + first through all the attributes in the UpgradeOperations for this plugin. + Any attributes that are not dictionaries that map migration_id -> + upgrade_function are skipped. We then loop through every key/value pair + of each of the dictionaries and validate that the argument in the defined + function has the expected name. + """ + warnings = [] + + for attribute_name, attribute in vars(upgrade_operations).items(): + if attribute_name not in expected_upgrade_args.keys(): + # Skip if not in one of the operation dicts we store functions in. + continue + # + # If the attribute_name was in the expected upgrade dicts then we know + # it is a dict containing migration id -> upgrade function that we can + # iterate on. + # + for migration_id, migration_func in attribute.items(): + actual = inspect.getargspec(migration_func).args + expected = expected_upgrade_args[attribute_name] + warnings.extend( + _check_args(method_name=migration_func.__name__, + expected_args=expected, + actual_args=actual)) + + return warnings + + +def _check_args(method_name, expected_args, actual_args): + warnings = [] + + if len(expected_args) != len(actual_args): + warnings.append('Number of arguments do not match in method {}.' + ' Expected: {}, Found: {}.'.format( + method_name, list(expected_args), actual_args)) + + if not all(arg in expected_args for arg in actual_args): + warnings.append('Named argument mismatch in method {}.' + ' Expected: {}, Found: {}.'.format( + method_name, list(expected_args), actual_args)) + + return warnings + + +def _lookup_expected_args(plugin_module, plugin_op_type, plugin_op_name): + if plugin_module.plugin_type == 'DIRECT': + return plugin_module.expected_direct_args_by_op[plugin_op_type][ + plugin_op_name] + else: + return plugin_module.expected_staged_args_by_op[plugin_op_type][ + plugin_op_name] \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py b/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py new file mode 100644 index 00000000..9dc6c142 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py @@ -0,0 +1,102 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import logging +import re + +from dlpx.virtualization.platform.exceptions import ( + MigrationIdAlreadyUsedError, MigrationIdIncorrectTypeError, + MigrationIdIncorrectFormatError) + +MIGRATION_ID_REGEX = re.compile(r'^\d+(\.\d+)*$') +logger = logging.getLogger(__name__) + + +class MigrationIdSet: + """ + Keeps track of all migrations and validites/standardizes them as they are + added / parsed. + + Exceptions can be thrown when trying to add a new migration id. Otherwise + at the end of reading in all migration functions can be gotten in the + correct order. + """ + def __init__(self): + """ + The list of migration ids will store migrations as an array of ids + where the id is represented by the standardized array of positive + integers. For example if there were these ids: 1.0.0, 1.2.03, and + 2.0.1.0, __migration_ids would be [ [1], [1, 2, 3], [2, 0, 1]] + """ + self.__migration_ids = [] + + def add(self, migration_id, impl_name): + """ + Validates that the migration id is the correct type/format and then + return the canonical format of the id. Add the id as an array of + integers into the list of migration ids. + """ + # First validate that the migration_id is the correct type/format. + self.validate_migration_id(migration_id, impl_name) + + # Then we must standardize the migration_id. + std_migration_id = self.standardize_migration_id_to_array( + migration_id, impl_name) + std_string = '.'.join(str(i) for i in std_migration_id) + + # Then we should check if this migration_id has already been used + if std_migration_id in self.__migration_ids: + raise MigrationIdAlreadyUsedError(migration_id, + std_string, + impl_name) + + # Lastly we should add this new array into the internal migration list. + self.__migration_ids.append(std_migration_id) + + # Return back the standardized format of the migration id + return std_string + + @staticmethod + def validate_migration_id(migration_id, impl_name): + # First validate that the id is a string + if not isinstance(migration_id, basestring): + raise MigrationIdIncorrectTypeError(migration_id, impl_name) + + # Next check if the id is the right format + if not MIGRATION_ID_REGEX.match(migration_id): + raise MigrationIdIncorrectFormatError.from_fields( + migration_id, impl_name, MIGRATION_ID_REGEX.pattern) + + @staticmethod + def standardize_migration_id_to_array(migration_id, impl_name): + # Split on the period and convert to integer + array = [int(i) for i in migration_id.split('.')] + + # + # We cannot allow a migration id of essentially '0' because otherwise + # there would be no way to add a migration that goes before this. + # + if not any(array): + raise MigrationIdIncorrectFormatError( + "The migration id '{}' used in the function '{}' cannot be" + " used because a 0 migration id is not allowed.".format( + migration_id, impl_name)) + + # Next we want to trim all trailing zeros so ex: 5.3.0.0 == 5.3 + while array: + if not array[-1]: + # Remove the last element which is a zero from array + array.pop() + else: + break + + return array + + def get_sorted_ids(self): + # First sort the migration ids + self.__migration_ids.sort() + + # Then convert all these arrays to the usual string format. + return ['.'.join(str(i) for i in migration_id) + for migration_id in self.__migration_ids] diff --git a/platform/src/main/python/dlpx/virtualization/platform/operation.py b/platform/src/main/python/dlpx/virtualization/platform/operation.py index 67b5bdd3..b938c270 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/operation.py +++ b/platform/src/main/python/dlpx/virtualization/platform/operation.py @@ -27,3 +27,9 @@ class Operation(Enum): VIRTUAL_STATUS = 'virtual.status()' VIRTUAL_INITIALIZE = 'virtual.initialize()' VIRTUAL_MOUNT_SPEC = 'virtual.mount_specification()' + + UPGRADE_REPOSITORY = 'upgrade.repository()' + UPGRADE_SOURCE_CONFIG = 'upgrade.source_config()' + UPGRADE_LINKED_SOURCE = 'upgrade.linked_source()' + UPGRADE_VIRTUAL_SOURCE = 'upgrade.virtual_source()' + UPGRADE_SNAPSHOT = 'upgrade.snapshot()' diff --git a/platform/src/main/python/dlpx/virtualization/platform/util.py b/platform/src/main/python/dlpx/virtualization/platform/util.py new file mode 100644 index 00000000..5e0a15c3 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/util.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +import dlpx.virtualization.api + +def get_virtualization_api_version(): + """Returns the Virutalization API version string. + + :return: version string + """ + return dlpx.virtualization.api.__version__ \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/validation_util.py b/platform/src/main/python/dlpx/virtualization/platform/validation_util.py new file mode 100644 index 00000000..bc39d098 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/validation_util.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +import inspect +from dlpx.virtualization.platform.exceptions import DecoratorNotFunctionError + + +def check_function(impl, operation): + if not inspect.isfunction(impl) and not inspect.ismethod(impl): + raise DecoratorNotFunctionError(impl.__name__, operation.value) + return impl diff --git a/platform/src/test/java/NotUsed.java b/platform/src/test/java/NotUsed.java deleted file mode 100644 index 6608cff8..00000000 --- a/platform/src/test/java/NotUsed.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -/** - * Gradle will fail when running the test task if there are not classes in the - * Java test jar. This class is simply here to prevent that from happening. - * If a test is introduced in the future this file will be deleted. - */ -public class NotUsed { -} diff --git a/platform/src/test/python/dlpx/virtualization/test_delphix_platform_generated.py b/platform/src/test/python/dlpx/virtualization/test_delphix_platform_generated.py index b7264066..dae4325a 100644 --- a/platform/src/test/python/dlpx/virtualization/test_delphix_platform_generated.py +++ b/platform/src/test/python/dlpx/virtualization/test_delphix_platform_generated.py @@ -6,5 +6,5 @@ def test_import_common(): - from dlpx.virtualization import common_pb2 + from dlpx.virtualization.api import common_pb2 assert issubclass(common_pb2.Repository, message.Message) diff --git a/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py b/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py new file mode 100644 index 00000000..42db7f92 --- /dev/null +++ b/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py @@ -0,0 +1,107 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import pytest +from dlpx.virtualization.platform.exceptions import ( + MigrationIdAlreadyUsedError, MigrationIdIncorrectTypeError, + MigrationIdIncorrectFormatError) +from dlpx.virtualization.platform import migration_id_set as m + + +class TestMigrationIdSet: + @staticmethod + @pytest.fixture + def migration_set(): + yield m.MigrationIdSet() + + @staticmethod + @pytest.mark.parametrize('migration_id,expected_std_id', [ + ('5.3.2.1', '5.3.2.1'), + ('1000', '1000'), + ('50.0.0', '50'), + ('50.0.0000.1', '50.0.0.1'), + ('2019.10.04', '2019.10.4')]) + def test_basic_add(migration_set, migration_id, expected_std_id): + actual_std_id = migration_set.add(migration_id, 'function') + + assert actual_std_id == expected_std_id + + @staticmethod + @pytest.mark.parametrize('id_one,id_two', [ + ('5.3.2.1', '5.3.2.1'), + ('1000', '1000.0.0'), + ('50.0.0', '50'), + ('50.0.0000.1', '50.0.0.1.0000'), + ('2019.0010.0004', '2019.10.4')]) + def test_same_migration_id_used(migration_set, id_one, id_two): + std_id = migration_set.add(id_one, 'function') + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info: + migration_set.add(id_two, 'function2') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'function2' has the" + " same canonical form '{}' as another migration.".format(id_two, + std_id)) + + @staticmethod + @pytest.mark.parametrize('migration_id', [True, + 1000, + {'random set'}, + ['random', 'list']]) + def test_migration_incorrect_type(migration_set, migration_id): + with pytest.raises(MigrationIdIncorrectTypeError) as err_info: + migration_set.add(migration_id, 'upgrade') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' should" + " be a string.".format(migration_id)) + + @staticmethod + @pytest.mark.parametrize('migration_id', ['Not integers', + '1000.', + '2019 10 20']) + def test_migration_incorrect_format(migration_set, migration_id): + with pytest.raises(MigrationIdIncorrectFormatError) as err_info: + migration_set.add(migration_id, 'upgrade') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' does not" + " follow the correct format '{}'.".format( + migration_id, m.MIGRATION_ID_REGEX.pattern)) + + @staticmethod + @pytest.mark.parametrize('migration_id', ['0.0', + '0', + '0.000.000.00.0']) + def test_migration_id_is_zero(migration_set, migration_id): + with pytest.raises(MigrationIdIncorrectFormatError) as err_info: + migration_set.add(migration_id, 'upgrade') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' cannot be" + " used because a 0 migration id is not allowed.".format( + migration_id)) + + @staticmethod + def test_get_sorted_ids(migration_set): + migration_set.add('2019.04.01', 'one') + migration_set.add('4.10.04', 'two') + migration_set.add('20190.10.006', 'three') + migration_set.add('1.2.3.4', 'four') + migration_set.add('5.4.3.2.1.0', 'five') + migration_set.add('1', 'six') + migration_set.add('10.01.10.00.1.0.0', 'seven') + + assert migration_set.get_sorted_ids() == ['1', + '1.2.3.4', + '4.10.4', + '5.4.3.2.1', + '10.1.10.0.1', + '2019.4.1', + '20190.10.6'] diff --git a/platform/src/test/python/dlpx/virtualization/test_plugin.py b/platform/src/test/python/dlpx/virtualization/test_plugin.py index 827f574c..90f97e7a 100755 --- a/platform/src/test/python/dlpx/virtualization/test_plugin.py +++ b/platform/src/test/python/dlpx/virtualization/test_plugin.py @@ -4,14 +4,10 @@ import json import pytest -import sys -from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.api import (platform_pb2, common_pb2) from dlpx.virtualization.common import (RemoteConnection, RemoteEnvironment, RemoteHost, RemoteUser) -from dlpx.virtualization import common_pb2 -from dlpx.virtualization.platform import _plugin from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationAlreadyDefinedError, - PlatformError, PluginRuntimeError) + IncorrectReturnTypeError, IncorrectUpgradeObjectTypeError, OperationAlreadyDefinedError, PluginRuntimeError) from mock import MagicMock, patch import fake_generated_definitions @@ -51,6 +47,17 @@ TEST_STAGED_SOURCE_JSON = SIMPLE_JSON.format(TEST_STAGED_SOURCE) TEST_VIRTUAL_SOURCE_JSON = SIMPLE_JSON.format(TEST_VIRTUAL_SOURCE) TEST_SNAPSHOT_PARAMS_JSON = '{"resync": false}' +TEST_PRE_UPGRADE_PARAMS = {'obj': json.dumps({'name': 'upgrade'})} +TEST_POST_MIGRATION_METADATA_1 = ( + json.dumps({'obj': {'name': 'upgrade', 'prettyName': 'prettyUpgrade'}})) +TEST_POST_MIGRATION_METADATA_2 = ( + json.dumps({'obj': {'name': 'upgrade', 'prettyName': 'prettyUpgrade', + 'metadata': 'metadata'}})) +TEST_POST_UPGRADE_PARAMS = ( + {u'obj': '"{\\"obj\\": {\\"prettyName\\": \\"prettyUpgrade\\", ' + '\\"name\\": \\"upgrade\\", \\"metadata\\": \\"metadata\\"}}"'} +) +MIGRATION_IDS = ('2020.1.1', '2020.2.2') class TestPlugin: @@ -284,7 +291,7 @@ def virtual_source(connection, mount): virtual_source.guid = TEST_GUID virtual_source.connection.CopyFrom(connection) virtual_source.parameters.json = TEST_VIRTUAL_SOURCE_JSON - virtual_source.mounts.MergeFrom([mount]) + virtual_source.mounts.extend([mount]) return virtual_source @staticmethod @@ -1090,3 +1097,207 @@ def staged_mount_spec_impl(staged_source, repository): message = err_info.value.message assert message == 'Shared path is not supported for linked sources.' + + @staticmethod + def test_upgrade_repository_success(my_plugin): + + @my_plugin.upgrade.repository('2020.1.1') + def upgrade_repository(old_repository): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.repository('2020.2.2') + def upgrade_repository(old_repository): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.REPOSITORY + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_repository(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters\ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_source_config_success(my_plugin): + + @my_plugin.upgrade.source_config('2020.1.1') + def upgrade_source_config(old_source_config): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.source_config('2020.2.2') + def upgrade_source_config(old_source_config): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.SOURCECONFIG + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_source_config(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_linked_source_success(my_plugin): + + @my_plugin.upgrade.linked_source('2020.1.1') + def upgrade_linked_source(old_linked_source): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.linked_source('2020.2.2') + def upgrade_linked_source(old_linked_source): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.LINKEDSOURCE + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_linked_source(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_virtual_source_success(my_plugin): + + @my_plugin.upgrade.virtual_source('2020.1.1') + def upgrade_virtual_source(old_virtual_source): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.virtual_source('2020.2.2') + def upgrade_virtual_source(old_virtual_source): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.VIRTUALSOURCE + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_virtual_source(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_snapshot_success(my_plugin): + + @my_plugin.upgrade.snapshot('2020.1.1') + def upgrade_snapshot(old_snapshot): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.snapshot('2020.2.2') + def upgrade_snapshot(old_snapshot): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.SNAPSHOT + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_snapshot(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_repository_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_repository(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 1.") + + @staticmethod + def test_upgrade_source_config_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_source_config(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 0.") + + @staticmethod + def test_upgrade_linked_source_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_linked_source(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 2.") + + @staticmethod + def test_upgrade_virtual_source_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_virtual_source(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 3.") + + @staticmethod + def test_upgrade_snapshot_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SOURCECONFIG + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_snapshot(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 0 type" + " but should have had type 4.") + + @staticmethod + def test_upgrade_snapshot_fail_with_runtime_error(my_plugin): + + @my_plugin.upgrade.snapshot('2020.1.1') + def upgrade_snapshot(old_snapshot): + raise RuntimeError('RuntimeError in snapshot migration') + + @my_plugin.upgrade.snapshot('2020.2.2') + def upgrade_snapshot(old_snapshot): + raise RuntimeError('RuntimeError in snapshot migration') + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.SNAPSHOT + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + with pytest.raises(RuntimeError): + my_plugin.upgrade._internal_snapshot(upgrade_request) diff --git a/platform/src/test/python/dlpx/virtualization/test_plugin_classes.py b/platform/src/test/python/dlpx/virtualization/test_plugin_classes.py index 8da03350..4c36927e 100644 --- a/platform/src/test/python/dlpx/virtualization/test_plugin_classes.py +++ b/platform/src/test/python/dlpx/virtualization/test_plugin_classes.py @@ -8,6 +8,8 @@ from dlpx.virtualization.platform import Mount from dlpx.virtualization.platform import OwnershipSpecification from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform.exceptions import \ + IncorrectReferenceFormatError @pytest.fixture @@ -29,12 +31,11 @@ def test_init_mount_success(remote_environment): @staticmethod def test_init_mount_bad_remote_env(): - with pytest.raises(IncorrectTypeError) as err_info: + with pytest.raises(IncorrectReferenceFormatError) as err_info: Mount('bad string', 'mount_path', 'shared_path') assert err_info.value.message == ( - "Mount's parameter 'remote_environment' was type 'str' but" - " should be of" - " class 'dlpx.virtualization.common._common_classes.RemoteEnvironment'.") + "Reference 'bad string' is not a correctly formatted host environment " + "reference.") @staticmethod def test_init_mount_bad_mount_path(remote_environment): @@ -77,6 +78,33 @@ def test_init_mount_spec(remote_environment): mount = Mount(remote_environment, 'mount_path', 'shared_path') MountSpecification([mount], OwnershipSpecification(10, 10)) + # Test for passing in a reference string instead of a remote_environment object, + # which a plugin author would want to do when creating an additional mount + @staticmethod + @pytest.mark.parametrize("reference_string", ['UNIX_HOST_ENVIRONMENT-10', 'WINDOWS_HOST_ENVIRONMENT-24']) + def test_init_mount_reference_string_success(reference_string): + mount = Mount(reference_string, 'mount_path', 'shared_path') + assert mount.remote_environment.reference == reference_string and mount.remote_environment.host.reference == 'dummy reference' + + @staticmethod + @pytest.mark.parametrize("reference_string", ['UNIX_HOST-ENVIRONMENT-15', 'UNIX-10', 'USER-9', 'ALERT-17', 'HOST-24', 'random string']) + def test_init_mount_incorrect_format_reference_string(reference_string): + with pytest.raises(IncorrectReferenceFormatError) as err_info: + Mount(reference_string, 'mount_path', 'shared_path') + assert err_info.value.message == ( + "Reference '{}' is not a correctly formatted host environment reference.".format(reference_string) + ) + + @staticmethod + @pytest.mark.parametrize("reference", [False, None, 1010]) + def test_init_mount_invalid_reference_type(reference): + with pytest.raises(IncorrectTypeError) as err_info: + Mount(reference, 'mount_path', 'shared_path') + assert err_info.value.message == ( + "Mount's parameter 'remote_environment' was type '{}' but " + "should be of any one of the following types: '['dlpx.virtualization.common._common_classes.RemoteEnvironment', 'basestring']'.".format(type(reference).__name__) + ) + @staticmethod def test_init_mount_spec_mounts_not_list(): with pytest.raises(IncorrectTypeError) as err_info: diff --git a/platform/src/test/python/dlpx/virtualization/test_upgrade.py b/platform/src/test/python/dlpx/virtualization/test_upgrade.py new file mode 100755 index 00000000..1ede1e5e --- /dev/null +++ b/platform/src/test/python/dlpx/virtualization/test_upgrade.py @@ -0,0 +1,307 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import pytest +import logging +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.platform.exceptions import ( + DecoratorNotFunctionError, MigrationIdAlreadyUsedError) +from dlpx.virtualization.platform.operation import Operation as Op + + +class TestUpgrade: + @staticmethod + @pytest.fixture + def my_plugin(): + from dlpx.virtualization.platform import Plugin + yield Plugin() + + @staticmethod + def basic_upgrade_helper(decorator, id_to_impl, upgrade_operation): + @decorator('2019.10.01') + def repo_upgrade_one(input_dict): + output_dict = {'in': input_dict['in'], 'out': 'first'} + return output_dict + + @decorator('2019.10.02') + def repo_upgrade_two(input_dict): + output_dict = {'in': input_dict['in'], 'out': 'second'} + return output_dict + + migration_one = id_to_impl['2019.10.1'] + migration_two = id_to_impl['2019.10.2'] + + assert migration_one == repo_upgrade_one + assert migration_two == repo_upgrade_two + assert migration_one({'in':'in_one'}) == {'in': 'in_one', + 'out': 'first'} + assert migration_two({'in':'in_two'}) == {'in': 'in_two', + 'out': 'second'} + + assert upgrade_operation.migration_id_list == ['2019.10.1', + '2019.10.2'] + + @staticmethod + def decorator_not_function_helper(decorator, op): + + with pytest.raises(DecoratorNotFunctionError) as err_info: + @decorator('2019.10.03') + class RandomClass(object): + pass + + message = err_info.value.message + assert message == ("The object '{}' decorated by '{}' is" + " not a function.".format('RandomClass', + op.value)) + + @staticmethod + def test_upgrade_repository(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.repository, + my_plugin.upgrade.repository_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.repository, Op.UPGRADE_REPOSITORY) + + @staticmethod + def test_upgrade_source_config(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.source_config, + my_plugin.upgrade.source_config_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.source_config, Op.UPGRADE_SOURCE_CONFIG) + + @staticmethod + def test_upgrade_linked_source(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.linked_source, + my_plugin.upgrade.linked_source_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.linked_source, Op.UPGRADE_LINKED_SOURCE) + + @staticmethod + def test_upgrade_virtual_source(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.virtual_source, + my_plugin.upgrade.virtual_source_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.virtual_source, Op.UPGRADE_VIRTUAL_SOURCE) + + @staticmethod + def test_upgrade_snapshot(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.snapshot, + my_plugin.upgrade.snapshot_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.snapshot, Op.UPGRADE_SNAPSHOT) + + @staticmethod + def test_upgrade_same_migration_id_used(my_plugin): + @my_plugin.upgrade.repository('2019.10.01') + def repo_upgrade_one(): + return 'repo_one' + + @my_plugin.upgrade.repository('2019.10.04') + def repo_upgrade_two(): + return 'repo_two' + + @my_plugin.upgrade.repository('2019.10.006') + def repo_upgrade_three(): + return 'repo_three' + + @my_plugin.upgrade.source_config('2019.10.02') + def sc_upgrade_one(): + return 'sc_one' + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info_one: + @my_plugin.upgrade.source_config('2019.10.0004') + def sc_upgrade_two(): + return 'sc_two' + + @my_plugin.upgrade.linked_source('2019.10.3.000.0') + def ls_upgrade_one(): + return 'ls_one' + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info_two: + @my_plugin.upgrade.virtual_source('2019.10.03') + def vs_upgrade_one(): + return 'vs_one' + + @my_plugin.upgrade.virtual_source('2019.10.05') + def vs_upgrade_two(): + return 'vs_two' + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info_three: + @my_plugin.upgrade.snapshot('2019.010.001') + def snap_upgrade_one(): + return 'snap_one' + + @my_plugin.upgrade.snapshot('2019.10.12') + def snap_upgrade_two(): + return 'snap_two' + + assert my_plugin.upgrade.migration_id_list == ['2019.10.1', + '2019.10.2', + '2019.10.3', + '2019.10.4', + '2019.10.5', + '2019.10.6', + '2019.10.12'] + + repo_one = my_plugin.upgrade.repository_id_to_impl['2019.10.1'] + repo_two = my_plugin.upgrade.repository_id_to_impl['2019.10.4'] + repo_three = my_plugin.upgrade.repository_id_to_impl['2019.10.6'] + assert repo_one == repo_upgrade_one + assert repo_two == repo_upgrade_two + assert repo_three == repo_upgrade_three + + sc_one = my_plugin.upgrade.source_config_id_to_impl['2019.10.2'] + assert sc_one == sc_upgrade_one + + ls_one = my_plugin.upgrade.linked_source_id_to_impl['2019.10.3'] + assert ls_one == ls_upgrade_one + + vs_two = my_plugin.upgrade.virtual_source_id_to_impl['2019.10.5'] + assert vs_two == vs_upgrade_two + + snap_two = my_plugin.upgrade.snapshot_id_to_impl['2019.10.12'] + assert snap_two == snap_upgrade_two + + assert err_info_one.value.message == ( + "The migration id '2019.10.0004' used in the function" + " 'sc_upgrade_two' has the same canonical form '2019.10.4'" + " as another migration.") + + assert err_info_two.value.message == ( + "The migration id '2019.10.03' used in the function" + " 'vs_upgrade_one' has the same canonical form '2019.10.3'" + " as another migration.") + + assert err_info_three.value.message == ( + "The migration id '2019.010.001' used in the function" + " 'snap_upgrade_one' has the same canonical form '2019.10.1'" + " as another migration.") + + @staticmethod + @pytest.fixture + def caplog(caplog): + caplog.set_level(logging.DEBUG) + return caplog + + @staticmethod + @pytest.fixture + def upgrade_request(fake_map_param, upgrade_type): + return platform_pb2.UpgradeRequest( + pre_upgrade_parameters=fake_map_param, + type=upgrade_type, + migration_ids=[] + ) + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_REPOSITORY-1': '{}', + 'APPDATA_REPOSITORY-2': '{}', + 'APPDATA_REPOSITORY-3': '{}' + }, platform_pb2.UpgradeRequest.REPOSITORY, + )]) + def test_repository(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_repository( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade repositories [APPDATA_REPOSITORY-1,' + ' APPDATA_REPOSITORY-2, APPDATA_REPOSITORY-3]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_SOURCE_CONFIG-1': '{}', + 'APPDATA_SOURCE_CONFIG-2': '{}', + 'APPDATA_SOURCE_CONFIG-3': '{}', + 'APPDATA_SOURCE_CONFIG-4': '{}' + }, platform_pb2.UpgradeRequest.SOURCECONFIG, + )]) + def test_source_config(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_source_config( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade source configs [APPDATA_SOURCE_CONFIG-1,' + ' APPDATA_SOURCE_CONFIG-2, APPDATA_SOURCE_CONFIG-3,' + ' APPDATA_SOURCE_CONFIG-4]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_STAGED_SOURCE-1': '{}', + 'APPDATA_STAGED_SOURCE-2': '{}', + 'APPDATA_STAGED_SOURCE-3': '{}' + }, platform_pb2.UpgradeRequest.LINKEDSOURCE, + )]) + def test_linked_source(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_linked_source( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade linked sources [APPDATA_STAGED_SOURCE-1,' + ' APPDATA_STAGED_SOURCE-2, APPDATA_STAGED_SOURCE-3]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_VIRTUAL_SOURCE-1': '{}', + 'APPDATA_VIRTUAL_SOURCE-2': '{}' + }, platform_pb2.UpgradeRequest.VIRTUALSOURCE, + )]) + def test_virtual_source( + my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_virtual_source( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade virtual sources [APPDATA_VIRTUAL_SOURCE-1,' + ' APPDATA_VIRTUAL_SOURCE-2]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_SNAPSHOT-1': '{}' + }, platform_pb2.UpgradeRequest.SNAPSHOT, + )]) + def test_snapshot(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_snapshot( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade snapshots [APPDATA_SNAPSHOT-1]') diff --git a/settings.gradle b/settings.gradle deleted file mode 100644 index d3d945fc..00000000 --- a/settings.gradle +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -pluginManagement { - repositories { - gradlePluginPortal() - mavenCentral() - mavenLocal() // Allows use of locally build Python plugins. - maven { - name "delphix" - url "http://artifactory.delphix.com/artifactory/delphix-gradle-plugins" - } - } -} - -include ":common" -include ":libs" -include ":platform" -include ":tools" -include ":dvp" - -rootProject.name = 'virtualization-sdk' - -if (file("../delphix-gradle-python").isDirectory()) { - includeBuild "../delphix-gradle-python" -} diff --git a/tools/MANIFEST.in b/tools/MANIFEST.in index 3e446699..a9138fbd 100644 --- a/tools/MANIFEST.in +++ b/tools/MANIFEST.in @@ -11,3 +11,4 @@ recursive-include src/main/python/dlpx/virtualization/_internal/codegen/template recursive-include src/main/python/dlpx/virtualization/_internal/commands/plugin_template * recursive-include src/main/python/dlpx/virtualization/_internal/validation_schemas * recursive-include src/main/python *.cfg +include src/main/python/dlpx/virtualization/_internal/VERSION diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock deleted file mode 100644 index fdb6bc53..00000000 --- a/tools/Pipfile.lock +++ /dev/null @@ -1,561 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "4fae75364b5aae007c1c8c90c765e2fcac5b420f039af4bd19110572dc496139" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "name": "delphix", - "url": "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/", - "verifySsl": true - } - ] - }, - "default": { - "attrs": { - "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" - ], - "version": "==19.1.0" - }, - "certifi": { - "hashes": [ - "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939", - "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695" - ], - "version": "==2019.6.16" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "click": { - "hashes": [ - "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", - "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" - ], - "index": "delphix", - "version": "==7.0" - }, - "click-configfile": { - "hashes": [ - "sha256:95beec13bee950e98f43c81dcdabef4f644091559ea66298f9dadf59351d90d1", - "sha256:af2ae7123af57d850cd18edd915893e655b6b1bc30d1302fd040b1059bec073d" - ], - "index": "delphix", - "version": "==0.2.3" - }, - "configparser": { - "hashes": [ - "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", - "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" - ], - "markers": "python_version < '3.2'", - "version": "==3.7.4" - }, - "entrypoints": { - "hashes": [ - "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", - "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451" - ], - "version": "==0.3" - }, - "enum34": { - "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" - ], - "index": "delphix", - "markers": null, - "version": "==1.1.6" - }, - "flake8": { - "hashes": [ - "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548", - "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696" - ], - "index": "delphix", - "version": "==3.7.8" - }, - "functools32": { - "hashes": [ - "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", - "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" - ], - "markers": "python_version < '3.2'", - "version": "==3.2.3.post2" - }, - "idna": { - "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" - ], - "version": "==2.8" - }, - "jinja2": { - "hashes": [ - "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", - "sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b" - ], - "index": "delphix", - "version": "==2.10.1" - }, - "jsonschema": { - "hashes": [ - "sha256:0c0a81564f181de3212efa2d17de1910f8732fa1b71c42266d983cd74304e20d", - "sha256:a5f6559964a3851f59040d3b961de5e68e70971afb88ba519d27e6a039efff1a" - ], - "index": "delphix", - "version": "==3.0.1" - }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" - ], - "version": "==1.1.1" - }, - "mccabe": { - "hashes": [ - "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", - "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" - ], - "version": "==0.6.1" - }, - "protobuf": { - "hashes": [ - "sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4", - "sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811", - "sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444", - "sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96", - "sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2", - "sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef", - "sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e", - "sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995", - "sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed", - "sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9", - "sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90", - "sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19", - "sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625", - "sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9", - "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", - "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" - ], - "index": "delphix", - "version": "==3.6.1" - }, - "pycodestyle": { - "hashes": [ - "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", - "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c" - ], - "version": "==2.5.0" - }, - "pyflakes": { - "hashes": [ - "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", - "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2" - ], - "version": "==2.1.1" - }, - "pyrsistent": { - "hashes": [ - "sha256:34b47fa169d6006b32e99d4b3c4031f155e6e68ebcc107d6454852e8e0ee6533" - ], - "version": "==0.15.4" - }, - "pyyaml": { - "hashes": [ - "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", - "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", - "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", - "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", - "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", - "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", - "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", - "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", - "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", - "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", - "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", - "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", - "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" - ], - "index": "delphix", - "version": "==5.1.2" - }, - "requests": { - "hashes": [ - "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", - "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31" - ], - "index": "delphix", - "version": "==2.22.0" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "typing": { - "hashes": [ - "sha256:38566c558a0a94d6531012c8e917b1b8518a41e418f7f15f00e129cc80162ad3", - "sha256:53765ec4f83a2b720214727e319607879fec4acde22c4fbb54fa2604e79e44ce", - "sha256:84698954b4e6719e912ef9a42a2431407fe3755590831699debda6fba92aac55" - ], - "index": "delphix", - "markers": "python_version < '3.5'", - "version": "==3.7.4" - }, - "urllib3": { - "hashes": [ - "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1", - "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232" - ], - "version": "==1.25.3" - } - }, - "develop": { - "atomicwrites": { - "hashes": [ - "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", - "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" - ], - "version": "==1.3.0" - }, - "attrs": { - "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" - ], - "version": "==19.1.0" - }, - "backports.functools-lru-cache": { - "hashes": [ - "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a", - "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd" - ], - "markers": "python_version < '3.2'", - "version": "==1.5" - }, - "configparser": { - "hashes": [ - "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32", - "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75" - ], - "markers": "python_version < '3.2'", - "version": "==3.7.4" - }, - "contextlib2": { - "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" - ], - "markers": "python_version < '3'", - "version": "==0.5.5" - }, - "coverage": { - "hashes": [ - "sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6", - "sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650", - "sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5", - "sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d", - "sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351", - "sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755", - "sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef", - "sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca", - "sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca", - "sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9", - "sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc", - "sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5", - "sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f", - "sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe", - "sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888", - "sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5", - "sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce", - "sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5", - "sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e", - "sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e", - "sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9", - "sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437", - "sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1", - "sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c", - "sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24", - "sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47", - "sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2", - "sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28", - "sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c", - "sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7", - "sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0", - "sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025" - ], - "index": "delphix", - "version": "==4.5.4" - }, - "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.0.0.tar.gz", - "version": "== 1.0.0" - }, - "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.0.0.tar.gz", - "version": "== 1.0.0" - }, - "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.0.0.tar.gz", - "version": "== 1.0.0" - }, - "entrypoints": { - "hashes": [ - "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", - "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451" - ], - "version": "==0.3" - }, - "enum34": { - "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" - ], - "index": "delphix", - "markers": null, - "version": "==1.1.6" - }, - "flake8": { - "hashes": [ - "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548", - "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696" - ], - "index": "delphix", - "version": "==3.7.8" - }, - "funcsigs": { - "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" - ], - "markers": "python_version < '3.3'", - "version": "==1.0.2" - }, - "functools32": { - "hashes": [ - "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", - "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" - ], - "markers": "python_version < '3.2'", - "version": "==3.2.3.post2" - }, - "futures": { - "hashes": [ - "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16", - "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794" - ], - "markers": "python_version < '3.2'", - "version": "==3.3.0" - }, - "httpretty": { - "hashes": [ - "sha256:01b52d45077e702eda491f4fe75328d3468fd886aed5dcc530003e7b2b5939dc" - ], - "index": "delphix", - "version": "==0.9.6" - }, - "importlib-metadata": { - "hashes": [ - "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8", - "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3" - ], - "version": "==0.19" - }, - "isort": { - "hashes": [ - "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", - "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" - ], - "index": "delphix", - "version": "==4.3.21" - }, - "mccabe": { - "hashes": [ - "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", - "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" - ], - "version": "==0.6.1" - }, - "mock": { - "hashes": [ - "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", - "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" - ], - "index": "delphix", - "version": "==3.0.5" - }, - "more-itertools": { - "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" - ], - "markers": "python_version <= '2.7'", - "version": "==5.0.0" - }, - "packaging": { - "hashes": [ - "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9", - "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe" - ], - "version": "==19.1" - }, - "pathlib2": { - "hashes": [ - "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e", - "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8" - ], - "markers": "python_version == '3.4.*' or python_version < '3'", - "version": "==2.3.4" - }, - "pluggy": { - "hashes": [ - "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc", - "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c" - ], - "version": "==0.12.0" - }, - "py": { - "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" - ], - "version": "==1.8.0" - }, - "pycodestyle": { - "hashes": [ - "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", - "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c" - ], - "version": "==2.5.0" - }, - "pyflakes": { - "hashes": [ - "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", - "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2" - ], - "version": "==2.1.1" - }, - "pyparsing": { - "hashes": [ - "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", - "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4" - ], - "version": "==2.4.2" - }, - "pytest": { - "hashes": [ - "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae", - "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6" - ], - "index": "delphix", - "version": "==4.6.4" - }, - "pytest-cov": { - "hashes": [ - "sha256:2b097cde81a302e1047331b48cadacf23577e431b61e9c6f49a1170bbe3d3da6", - "sha256:e00ea4fdde970725482f1f35630d12f074e121a23801aabf2ae154ec6bdd343a" - ], - "index": "delphix", - "version": "==2.7.1" - }, - "scandir": { - "hashes": [ - "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", - "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", - "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", - "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", - "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", - "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", - "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", - "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", - "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", - "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", - "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" - ], - "markers": "python_version < '3.5'", - "version": "==1.10.0" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "typing": { - "hashes": [ - "sha256:38566c558a0a94d6531012c8e917b1b8518a41e418f7f15f00e129cc80162ad3", - "sha256:53765ec4f83a2b720214727e319607879fec4acde22c4fbb54fa2604e79e44ce", - "sha256:84698954b4e6719e912ef9a42a2431407fe3755590831699debda6fba92aac55" - ], - "index": "delphix", - "markers": "python_version < '3.5'", - "version": "==3.7.4" - }, - "wcwidth": { - "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" - ], - "version": "==0.1.7" - }, - "yapf": { - "hashes": [ - "sha256:02ace10a00fa2e36c7ebd1df2ead91dbfbd7989686dc4ccbdc549e95d19f5780", - "sha256:6f94b6a176a7c114cfa6bad86d40f259bbe0f10cf2fa7f2f4b3596fc5802a41b" - ], - "index": "delphix", - "version": "==0.28.0" - }, - "zipp": { - "hashes": [ - "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a", - "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec" - ], - "version": "==0.5.2" - } - } -} diff --git a/tools/README-dev.md b/tools/README-dev.md index af138447..740a1e1d 100644 --- a/tools/README-dev.md +++ b/tools/README-dev.md @@ -1,89 +1,74 @@ # Delphix Virtualization SDK Tools ## Purpose -These tools will be one piece of the Delphix Virtualization SDK. Plugin developers will install these tools locally and use them to develop, build, and ship virtualization plugins. - -## NOTE -This guide assumes that the current working directory is the root of the tools directory where this README is. - -## The absolute basics -- `../gradlew build` to get started developing -- `../gradlew check` must pass for code reviews. Copy and paste the output into reviews +The `tools` package represents a CLI of the Delphix Virtualization SDK. Plugin developers will install the CLI to build +and upload virtualization plugins. ## Getting Started ### Development Environment -To setup a development environment: - -`../gradlew build` - -This task will take few minutes to run and does few things like setup the required python binaries, virtualenv, format source code and sort the imports. - -Once the task has completed, check the output to setup recommended environment variables (e.g. export PATH=/Users//src/v-sdk/tools/build/pipsi/bin:$PATH). Next, setup virtualenv by running the following commands: - -1. ../gradlew makeSetupPy (this command will regenerate the setup.py file that pipenv uses to create the local virtual environment) -2. `pipenv install -e .` - -NOTE: If build reports any issues with 'virtualenv' not found and/or errors finding virtualenv in the PATH, check and make sure virtualenv from Python 2.7 is being used and not from 3.x. Few places to check, for e.g. on Mac - /usr/local/bin/virtualenv, ~/Library/Python/2.7/virtualenv. - +To setup the development environment, follow the instructions in [README-dev.md](https://github.com/delphix/virtualization-sdk/blob/develop/README-dev.md) -You’re ready to start developing. +For quick iterations, install the `tools` package in editable mode (`pip install -e .`). This means that changes to the +code will automatically be reflected in your environment. You will not need to reinstall the tools module each time +a change is made. -The development environment will already have all development and production dependencies installed in it. The tools module will be installed in ‘editable’ mode. This means that changes to the code will automatically be reflected in your environment. You will not need to reinstall the tools module each time a change is made. +Changes to the code can be tested using `dvp `. `dvp` is the CLI tool that is built from the source here that +helps build and upload a plugin to DE. -Changes to the code can be tested using "pipenv run dvp ". 'dvp' is the CLI tool that is built from the source here that helps build and upload a plugin to DE. Alternatively, dvp can be run by doing the following - +### Adding a command +The CLI uses [Click](https://click.palletsprojects.com/en/7.x/). This is a decorator driven library that builds a full +CLI. -1. Make sure 'pipenv install -e .' completes successfully. -2. Run 'pipenv shell' to activate the virtual environment created above in step 1. -3. Install dvp-common, dvp-libs, dv-platform packages (either from Artifactory or the respective local build/pyhton-dist directory) using the command `pip install `. -3. Now, run dvp just like any other command, e.g. `dvp -h` -4. To exit out of the virtual environment shell, type `exit`. +The starting point for the CLI is `virtualization._internal.cli:delphix_sdk`. This is annotated with the `@click.group` +annotation indicating there will be subcommands. -NOTE: If you run into: `ERROR: The Python zlib extension was not compiled. Missing the zlib?` this is because you're missing a piece of XCode. Install the requirements listed [here](https://github.com/pyenv/pyenv/wiki/Common-build-problems). +All that is needed to add a command is to define a new method in `virtualization._internal.cli`. **The method's name +will be the name of the command.** Annotate the method with `@delphix_sdk.command()`. It will automatically be added +as a subcommand. Look at the [Click documentation](https://click.palletsprojects.com/en/7.x/) to see other annotations +available to configure the command. -#### virtualenv care -For the most part, the virtual environment should be managed through gradle. However, it is just a virtual environment at the end of the day. Anything can be installed into it. Be careful with this. If a new dependency is added it may work on one development environment but not another unless it’s been added to the dependencies specified in the Gradle build file. +Click should be contained to `virtualization._internal.cli`. The implementation of the method should call immediately +into a method in another module. `virtualization._internal.cli` is the single source of truth for our CLI and should not +contain any business logic. -`../gradlew clear` will recreate your virtualenv without needing to download the Python source code and compile it. - -#### PyCharm -Feel free to use whichever IDE you want. PyCharm makes iterating and executing tests particularly easy. PyCharm needs to be setup with the Python binary from the virtualenv otherwise dependencies will not be configured correctly. - -TODO: commands here - -## Adding a command -The CLI uses [Click](https://click.palletsprojects.com/en/7.x/). This is a decorator driven library builds a full CLI. - -The starting point for the CLI is `virtualization._internal.cli:delphix_sdk`. This is annotated with the `@click.group` annotation indicating there will be subcommands. +### Testing -All that is needed to add a command is to define a new method in `virtualization._internal.cli`. **The method's name will be the name of the command.** Annotate the method with `@delphix_sdk.command()`. It will automatically be added as a subcommand. Look at the [Click documentation](https://click.palletsprojects.com/en/7.x/) to see other annotations available to configure the command. +#### Manual testing +It's easy to manually test the CLI as you can invoke different SDK CLI commands from command line. -Click should be contained to `virtualization._internal.cli`. The implementation of the method should call immediately into a method in another module. `virtualization._internal.cli` is the single source of truth for our CLI and should not contain any business logic. +To test the build command, run `dvp build --dev`. -## Committing Code -`../gradlew check` and `../gradlew sdist` must pass for code to be committed. `check` does three things at a high level: lint, format, and test. This will actually format the whitespace in both the src and test directories and organize imports. All tests will be executed and coverage will be printed. +When you run `dvp build`, the wrappers are built with the plugin. This builds `common`, `libs`, and `platform` +locally and bundles them with the plugin. If you don't pass `--dev` flag, `dvp build` will search for wrappers on PyPI. +To enable building wrappers from source instead, a special configuration entry is needed in your dvp config file which +is located at `~/.dvp/config`: -The output of `../gradlew check` command should be copied and pasted into reviews. +``` +[dev] +vsdk_root = /path/to/vsdk_repo_root +``` -## Distribution -Make sure `../gradlew build` reports success and then run `./gradlew sdist` from the root directory after making the SDK version change in build.gradle. This will make sure all dvp packages are created with the right version. Upload the SDK and dvp packages to Artifactory as described in the README.md file located at the root of the source repository. +#### Unit and functional (blackbox) testing +Refer to [../README-dev.md](https://github.com/delphix/virtualization-sdk/blob/develop/README-dev.md). ## Decisions ### CLI -We chose [Click](https://click.palletsprojects.com/en/7.x/) to build our CLI. It is simple, powerful, and dramatically reduces the amount of code we would have to write if we used argparse. Click is more limiting in ways. It is opinionated and not completely flexible. This is a limitation we're okay with. The benefits of having a feature full library out way the risks. Our CLI is also relatively simple which reduces the impact of using an opinionated library. +We chose [Click](https://click.palletsprojects.com/en/7.x/) to build our CLI. It is simple, powerful, and dramatically +reduces the amount of code we would have to write if we used argparse. Click is more limiting in ways. It is opinionated +and not completely flexible. This is a limitation we're okay with. The benefits of having a feature full library out way +the risks. Our CLI is also relatively simple which reduces the impact of using an opinionated library. -We looked at cement. This was heavier weight and only supported Python 3. We also looked at argparse, but chose Click due to the reasons above. +We looked at cement. This was heavier weight and only supported Python 3. We also looked at argparse, but chose Click +due to the reasons above. ### Testing -We chose [PyTest](https://docs.pytest.org/en/latest/) in combination with [coverage](https://pytest-cov.readthedocs.io/en/latest/). The builtin unittest is the other popular option. PyTest seems to be more popular and flexible. It was also recommended internally by people at Delphix. This is a decision we can revisit if we see that PyTest is insufficient. +We chose [PyTest](https://docs.pytest.org/en/latest/) in combination with [coverage](https://pytest-cov.readthedocs.io/en/latest/). +The builtin unittest is the other popular option. PyTest seems to be more popular and flexible. It was also recommended +internally by people at Delphix. This is a decision we can revisit if we see that PyTest is insufficient. ### Formatting -We chose [yapf](https://github.com/google/yapf) as our formatter. It's easy and does what we want right out of the box. We would like to use [black](https://github.com/ambv/black) as this is what the QA gate uses but it only supports Python 3. - -### virtualenv managmenet -We chose JetBrain's [Gradle Python Envs](https://github.com/JetBrains/gradle-python-envs) plugin. This has some major limitations. It downloads Python source code and compiles it. This leads to long setup times. It also only does virtualenv management. Functionality to execute python inside of the virtual environment is left to us to build. For example, in order to run tests in both Python 2 and Python 3 we would need to write logic to active the Python 2 venv, run the tests, deactivate the Python 2 venv, active the Python 3 venv, run the tests, and finally deactivate the Python 3 venv all the while keeping track of where each binary is for each venv. This quickly leads to complex build files. - -However, there aren't many other solutions to create reproducible development environments. Pipfile is great when deploying an application, but does not work very well when you're shipping a library. pipsi is designed to isolate applications. It's something a _user_ of the CLI might opt into, but doesn't work as well for development. +We chose [yapf](https://github.com/google/yapf) as our formatter. It's easy and does what we want right out of the box. +We would like to use [black](https://github.com/ambv/black) as this is what the QA gate uses but it only supports Python 3. -Additionally, the QA and devops gates use this plugin. We would all like to move off of it, but figured it was better to move off one technology than multiple. diff --git a/tools/build.gradle b/tools/build.gradle deleted file mode 100644 index 551b1b42..00000000 --- a/tools/build.gradle +++ /dev/null @@ -1,271 +0,0 @@ -/** - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -plugins { - id "delphix.python" -} - -def devVenvDir = project.layout.buildDirectory.file("test-venvs/python2.7") -def bootstrapVEnvBinary = project.layout.buildDirectory.file("pipsi/bin/virtualenv").get().asFile - -/* - * DEVELOPMENT TASKS - */ - -def devPythonBinary = "${buildDir}/test-venvs/python2.7/bin/python" - -def htmlReportDir = "${buildDir}/html-coverage" -def htmlReportFiles = fileTree(dir: htmlReportDir, - includes: [ - "**/*.html", - "**/*.js", - "**/*.png", - "**/*.json", - "**/*.css", - ]) - - -// The "check" task should also lint and format. -check { - dependsOn 'lint' - dependsOn 'format' - dependsOn 'coverage' -} - -// A convenience task that formats both source and test code. -task format() { - dependsOn 'formatSrc' - dependsOn 'formatTest' - dependsOn 'sortSrcImports' - dependsOn 'sortTestImports' -} - -task coverage(type: Exec) { - commandLine devPythonBinary, '-m', 'pytest', '--cov=src/main/python', "${projectDir}/src/test/python" - - environment([ - PYTHONPATH: "${projectDir}/src/main/python:${projectDir}/src/test/python", - ]) - - dependsOn "sync_python2.7" - - outputs.file("$buildDir/.coverage") - outputs.upToDateWhen {false} -} - -task runHtmlReport(type: Exec, dependsOn: 'coverage') { - commandLine devPythonBinary, '-m', 'coverage', 'html', '--dir', "${buildDir}/html-coverage", - '--include', "*/${project.name}/src/main/python/*" - - outputs.files(htmlReportFiles) -} - -task lint() { - dependsOn 'lintSrc' - dependsOn 'lintTest' -} - -task clearVEnv(type: Exec) { - commandLine bootstrapVEnvBinary.path, devVenvDir.get().asFile.path, '--clear' - - dependsOn "sync_python2.7" -} - -/* - * As part of running a packages 'setup.py' file, setuptools creates 'egg-info' directories that contain information - * about the build distribution. These can sometimes cause issues. We should probably build Python distributions in - * 'build' so these would be created there, however they still could be created in the 'src' directory if someone runs - * 'setup.py' manually. This is often done during development to install the package for testing. - */ -task removeEggInfo(type: Delete) { - delete "${projectDir}/src/main/python/dvp_tools.egg-info" -} - -clean.dependsOn('removeEggInfo') - -/* -* BUILD TASKS -*/ - -/* - * HELPER TASKS - * - * This are just wrappers around Python commands and need to be moved into - * a plugin. - */ - -def pySrc = "${projectDir}/src/main/python" -def pyTestSrc = "${projectDir}/src/test/python" - -task formatSrc(type: Exec) { - commandLine devPythonBinary, '-m', 'yapf', '--recursive', '--in-place', pySrc - - dependsOn "sync_python2.7" -} - -task formatTest(type: Exec) { - commandLine devPythonBinary, '-m', 'yapf', '--recursive', '--in-place', pyTestSrc - - dependsOn "sync_python2.7" -} - -task sortSrcImports(type: Exec) { - commandLine devPythonBinary, '-m', 'isort', '-rc', pySrc - - dependsOn "sync_python2.7" -} - -task sortTestImports(type: Exec) { - commandLine devPythonBinary, '-m', 'isort', '-rc', pyTestSrc - - dependsOn "sync_python2.7" -} - -task lintSrc(type: Exec) { - commandLine devPythonBinary, '-m', 'flake8', pySrc - - dependsOn "sync_python2.7" -} - -task lintTest(type: Exec) { - commandLine devPythonBinary, '-m', 'flake8', pyTestSrc - - dependsOn "sync_python2.7" -} - -artifacts { - python sdist.distFile -} - -dlpxPython { - sources { - delphix { - url = "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/" - } - } - - dist { - name = "dvp-tools" - } - - packages { - click { - version = ">= 7.0" - } - - "click-configfile" { - version = "== 0.2.3" - } - - flake8 { - version = ">= 3.6" - } - - protobuf { - version = "== 3.6.1" - } - - pyyaml { - version = ">= 3" - } - - jsonschema { - version = ">= 3" - } - - requests { - version = ">= 2.21.0" - } - - typing { - markers = "python_version < '3.5'" - } - - jinja2 { - version = ">= 2.10" - } - - enum34 { - version = ">= 1.1.6" - } - - } - - devPackages { - coverage { - version = ">= 4.5" - } - - flake8 { - version = ">= 3.6" - } - - httpretty { - version = ">= 0.9.6" - } - - isort { - version = ">= 4.3" - } - - mock { - version = ">=2.0" - } - - pytestCov { - dist = "pytest-cov" - version = ">= 2.6" - } - - yapf { - version = ">= 0.25" - } - - /* - * Following dvp-* packages are just meant for dev/test envs so that - * dependent packages are available in path when running unit tests - * under the tools package. These packages should never be production - * dependencies as they need to be shipped independently. - */ - "dvp-common" { - version = "== $project.version" - path = file(tasks.getByPath(":common:sdist").getDistFile().toString()) - } - - "dvp-platform" { - version = "== $project.version" - path = file(tasks.getByPath(":platform:sdist").getDistFile().toString()) - } - - "dvp-libs" { - version = "== $project.version" - path = file(tasks.getByPath(":libs:sdist").getDistFile().toString()) - } - } - - supportedPythons { - "python2.7" {} - } -} - -task "testDelphixBin_python2.7"(type: Exec) { - executable "${buildDir}/install-test-venvs/python2.7/bin/dvp" - args "--help" - - dependsOn "testInstallation_python2.7" -} - -task wheel(type: SetupPyTask) { - setupPyCommand "bdist_wheel" - distFile String.format("%s-%s-%s-%s-%s.whl", dist.name.get().replace("-", "_"), "$project.version".replace("-", "_"), "py2", "none", "any") - - dependsOn makeSetupPy -} - -// sdist and the python tests depend on the generated python libraries -// from the protoc plugin. Must manually specify as plugins are not aware -// of each other. -project.afterEvaluate { - tasks["test"].dependsOn tasks["testDelphixBin_python2.7"] -} diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt deleted file mode 100644 index 7171f660..00000000 --- a/tools/lock.dev-requirements.txt +++ /dev/null @@ -1,37 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.0.0.tar.gz -./../libs/build/python-dist/dvp-libs-1.0.0.tar.gz -./../platform/build/python-dist/dvp-platform-1.0.0.tar.gz -atomicwrites==1.3.0 -attrs==19.1.0 -backports.functools-lru-cache==1.5 ; python_version < '3.2' -configparser==3.7.4 ; python_version < '3.2' -contextlib2==0.5.5 ; python_version < '3' -coverage==4.5.4 -entrypoints==0.3 -enum34==1.1.6 -flake8==3.7.8 -funcsigs==1.0.2 ; python_version < '3.3' -functools32==3.2.3.post2 ; python_version < '3.2' -futures==3.3.0 ; python_version < '3.2' -httpretty==0.9.6 -importlib-metadata==0.19 -isort==4.3.21 -mccabe==0.6.1 -mock==3.0.5 -more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.1 -pathlib2==2.3.4 ; python_version == '3.4.*' or python_version < '3' -pluggy==0.12.0 -py==1.8.0 -pycodestyle==2.5.0 -pyflakes==2.1.1 -pyparsing==2.4.2 -pytest-cov==2.7.1 -pytest==4.6.4 -scandir==1.10.0 ; python_version < '3.5' -six==1.12.0 -typing==3.7.4 ; python_version < '3.5' -wcwidth==0.1.7 -yapf==0.28.0 -zipp==0.5.2 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt deleted file mode 100644 index 8bd06e4e..00000000 --- a/tools/lock.requirements.txt +++ /dev/null @@ -1,25 +0,0 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -attrs==19.1.0 -certifi==2019.6.16 -chardet==3.0.4 -click-configfile==0.2.3 -click==7.0 -configparser==3.7.4 ; python_version < '3.2' -entrypoints==0.3 -enum34==1.1.6 -flake8==3.7.8 -functools32==3.2.3.post2 ; python_version < '3.2' -idna==2.8 -jinja2==2.10.1 -jsonschema==3.0.1 -markupsafe==1.1.1 -mccabe==0.6.1 -protobuf==3.6.1 -pycodestyle==2.5.0 -pyflakes==2.1.1 -pyrsistent==0.15.4 -pyyaml==5.1.2 -requests==2.22.0 -six==1.12.0 -typing==3.7.4 ; python_version < '3.5' -urllib3==1.25.3 diff --git a/tools/requirements.txt b/tools/requirements.txt new file mode 100644 index 00000000..bc7f0831 --- /dev/null +++ b/tools/requirements.txt @@ -0,0 +1,34 @@ +./../common +./../libs +./../platform +backports.functools-lru-cache==1.6.1 ; python_version < '3.2' +bump2version==0.5.11 +contextlib2==0.6.0.post1 ; python_version < '3' +coverage==5.0.2 +entrypoints==0.3 +enum34==1.1.6 +flake8==3.7.9 +funcsigs==1.0.2 ; python_version < '3.3' +functools32==3.2.3.post2 ; python_version < '3' +futures==3.3.0 ; python_version < '3.2' +httpretty==0.9.7 +importlib-metadata==1.3.0 ; python_version < '3.8' +isort==4.3.21 +mccabe==0.6.1 +mock==3.0.5 +more-itertools==5.0.0 +packaging==20.0 +pathlib2==2.3.5 ; python_version < '3' +pluggy==0.13.1 +py==1.8.1 +pycodestyle==2.5.0 +pyflakes==2.1.1 +pyparsing==2.4.6 +pytest-cov==2.8.1 +pytest==4.6.9 +scandir==1.10.0 ; python_version < '3.5' +six==1.13.0 +typing==3.7.4.1 ; python_version < '3.5' +wcwidth==0.1.8 +yapf==0.28 +zipp==0.6.0 diff --git a/tools/setup.py b/tools/setup.py new file mode 100644 index 00000000..b5e0edc1 --- /dev/null +++ b/tools/setup.py @@ -0,0 +1,26 @@ +import os +import setuptools + +PYTHON_SRC = 'src/main/python' + +with open(os.path.join(PYTHON_SRC, 'dlpx/virtualization/_internal/VERSION')) as version_file: + version = version_file.read().strip() + +install_requires = [ + "click >= 7.1", + "click-configfile == 0.2.3", + "dvp-platform == {}".format(version), + "enum34 >= 1.1.6", + "flake8 >= 3.6", + "jinja2 >= 2.10", + "jsonschema >= 3", + "pyyaml >= 3", + "requests >= 2.21.0", +] + +setuptools.setup(name='dvp-tools', + version=version, + install_requires=install_requires, + package_dir={'': PYTHON_SRC}, + packages=setuptools.find_packages(PYTHON_SRC), +) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/VERSION b/tools/src/main/python/dlpx/virtualization/_internal/VERSION new file mode 100644 index 00000000..359a5b95 --- /dev/null +++ b/tools/src/main/python/dlpx/virtualization/_internal/VERSION @@ -0,0 +1 @@ +2.0.0 \ No newline at end of file diff --git a/tools/src/main/python/dlpx/virtualization/_internal/cli.py b/tools/src/main/python/dlpx/virtualization/_internal/cli.py index 2c37afc0..227c3ff3 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/cli.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/cli.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import logging @@ -9,9 +9,8 @@ from contextlib import contextmanager import click -from dlpx.virtualization._internal import (click_util, exceptions, - logging_util, package_util, - util_classes) +from dlpx.virtualization._internal import (click_util, const, exceptions, + logging_util, package_util) from dlpx.virtualization._internal.commands import build as build_internal from dlpx.virtualization._internal.commands import \ download_logs as download_logs_internal @@ -32,6 +31,22 @@ # This is needed to add -h as an option for the help menu. CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'], obj=click_util.ConfigFileProcessor.read_config()) +# +# This setting is added to workaround the bug in click 7.1 on windows +# when case_sensitive=False is used on click.Options. Line 187 of click +# code at https://github.com/pallets/click/blob/7.x/src/click/types.py +# fails when lower() method is called on normed_value as unicode type is +# received on windows instead of string type. Removing case_sensitive=False +# is not a good workaround as the behaviour of command changes. + +# This workaround uses token_normalize_func to convert normed_value +# into an ascii string so that when lower() is called on it, it wont fail. +# Also, chose to separate out this into a different settings instead of +# adding it to CONTEXT_SETTINGS to avoid any side-effects on other commands. +# +CONTEXT_SETTINGS_INIT = dict(help_option_names=['-h', '--help'], + obj=click_util.ConfigFileProcessor.read_config(), + token_normalize_func=lambda x: x.encode("ascii")) DVP_CONFIG_MAP = CONTEXT_SETTINGS['obj'] @@ -44,6 +59,10 @@ def command_error_handler(): logger.error(err.message) logger.debug(traceback.format_exc()) exit(1) + except Exception as err: + logger.debug(err) + logger.error('Internal error, please contact Delphix.') + exit(2) @click.group(context_settings=CONTEXT_SETTINGS) @@ -84,7 +103,7 @@ def delphix_sdk(verbose, quiet): 'Supported version is 2.7.x, found {}'.format(sys.version_info)) -@delphix_sdk.command() +@delphix_sdk.command(context_settings=CONTEXT_SETTINGS_INIT) @click.option('-r', '--root-dir', 'root', @@ -105,20 +124,27 @@ def delphix_sdk(verbose, quiet): @click.option( '-s', '--ingestion-strategy', - default=util_classes.DIRECT_TYPE, + default=const.DIRECT_TYPE, show_default=True, - type=click.Choice([util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE], + type=click.Choice([const.DIRECT_TYPE, const.STAGED_TYPE], case_sensitive=False), help=('Set the ingestion strategy of the plugin. A "direct" plugin ' 'ingests without a staging server while a "staged" plugin ' 'requires a staging server.')) -def init(root, ingestion_strategy, name): +@click.option('-t', + '--host-type', + default=const.UNIX_HOST_TYPE, + show_default=True, + type=click.Choice( + [const.UNIX_HOST_TYPE, const.WINDOWS_HOST_TYPE]), + help='Set the host platform supported by the plugin.') +def init(root, ingestion_strategy, name, host_type): """ Create a plugin in the root directory. The plugin will be valid but have no functionality. """ with command_error_handler(): - init_internal.init(root, ingestion_strategy, name) + init_internal.init(root, ingestion_strategy, name, host_type) @delphix_sdk.command() @@ -167,7 +193,13 @@ def init(root, ingestion_strategy, name): hidden=True, help=('An internal flag that does not enforce the format ' 'of the id. Use of this flag is unsupported.')) -def build(plugin_config, upload_artifact, generate_only, skip_id_validation): +@click.option('--dev', + is_flag=True, + hidden=True, + help=('An internal flag that installs dev builds of the ' + 'wrappers. This should only be used by SDK developers.')) +def build(plugin_config, upload_artifact, generate_only, skip_id_validation, + dev): """ Build the plugin code and generate upload artifact file using the configuration provided in the plugin config file. @@ -175,9 +207,26 @@ def build(plugin_config, upload_artifact, generate_only, skip_id_validation): # Set upload artifact to None if -g is true. if generate_only: upload_artifact = None + + local_vsdk_root = None + with command_error_handler(): - build_internal.build(plugin_config, upload_artifact, generate_only, - skip_id_validation) + if dev: + if not DVP_CONFIG_MAP.get('dev') or not DVP_CONFIG_MAP.get( + 'dev').get('vsdk_root'): + raise RuntimeError("The dev flag was specified but there is " + "not a vsdk_root entry in the dvp config " + "file. Please look in the SDK's README for " + "details on configuring the vsdk_root " + "property.") + + local_vsdk_root = DVP_CONFIG_MAP.get('dev').get('vsdk_root') + + build_internal.build(plugin_config, + upload_artifact, + generate_only, + skip_id_validation, + local_vsdk_root=local_vsdk_root) @delphix_sdk.command() @@ -207,11 +256,14 @@ def build(plugin_config, upload_artifact, generate_only, skip_id_validation): resolve_path=True), callback=click_util.validate_option_exists, help='Path to the upload artifact that was generated through build.') +@click.option('--wait', + is_flag=True, + help='Wait for the upload job to complete before returning.') @click.password_option(cls=click_util.PasswordPromptIf, default=DVP_CONFIG_MAP.get('password'), confirmation_prompt=False, help='Authenticate using the provided password.') -def upload(engine, user, upload_artifact, password): +def upload(engine, user, upload_artifact, password, wait): """ Upload the generated upload artifact (the plugin JSON file) that was built to a target Delphix Engine. @@ -219,7 +271,7 @@ def upload(engine, user, upload_artifact, password): the build command and will fail if it's not readable or valid. """ with command_error_handler(): - upload_internal.upload(engine, user, upload_artifact, password) + upload_internal.upload(engine, user, upload_artifact, password, wait) @delphix_sdk.command() diff --git a/tools/src/main/python/dlpx/virtualization/_internal/click_util.py b/tools/src/main/python/dlpx/virtualization/_internal/click_util.py index 4f288dfb..b8b0941c 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/click_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/click_util.py @@ -24,6 +24,10 @@ class DvpProperties(SectionSchema): user = Param(type=str) password = Param(type=str) + @matches_section('dev') + class DevProperties(SectionSchema): + vsdk_root = Param(type=str) + class ConfigFileProcessor(ConfigFileReader): """ @@ -34,7 +38,9 @@ class ConfigFileProcessor(ConfigFileReader): os.path.expanduser(os.path.join('~', CONFIG_DIR_NAME, CONFIG_FILE_NAME)) ] - config_section_schemas = [ConfigSectionSchema.DvpProperties] + config_section_schemas = [ + ConfigSectionSchema.DvpProperties, ConfigSectionSchema.DevProperties + ] def validate_option_exists(ctx, param, value): diff --git a/tools/src/main/python/dlpx/virtualization/_internal/codegen.py b/tools/src/main/python/dlpx/virtualization/_internal/codegen.py index 14b748f6..9eda39b7 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/codegen.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/codegen.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import copy @@ -10,7 +10,7 @@ import shutil import subprocess -from dlpx.virtualization._internal import exceptions, file_util, util_classes +from dlpx.virtualization._internal import const, exceptions, file_util logger = logging.getLogger(__name__) UNKNOWN_ERR = 'UNKNOWN_ERR' @@ -77,15 +77,15 @@ def generate_python(name, source_dir, plugin_config_dir, schema_content): # relevant to the plugin writer. We want to always force this to be # recreated. # - output_dir = os.path.join(plugin_config_dir, util_classes.OUTPUT_DIR_NAME) - logger.info('Creating new output directory: {!r}'.format(output_dir)) + output_dir = os.path.join(plugin_config_dir, const.OUTPUT_DIR_NAME) + logger.info('Creating new output directory: {}'.format(output_dir)) file_util.make_dir(output_dir, True) # # Create the json with the correct Swagger JSON specification required to # generate the objects. Write it to the output dir that we created above. # - logger.info('Writing the swagger file in {!r}'.format(output_dir)) + logger.info('Writing the swagger file in {}'.format(output_dir)) swagger_file = _write_swagger_file(name, schema_content, output_dir) # @@ -94,7 +94,7 @@ def generate_python(name, source_dir, plugin_config_dir, schema_content): # output_dir again. # logger.info('Executing swagger codegen generate with' - ' swagger file {!r}'.format(swagger_file)) + ' swagger file {}'.format(swagger_file)) _execute_swagger_codegen(swagger_file, output_dir) # @@ -104,7 +104,7 @@ def generate_python(name, source_dir, plugin_config_dir, schema_content): # classes were generated properly. # logger.info('Copying generated python files to' - ' source directory {!r}'.format(source_dir)) + ' source directory {}'.format(source_dir)) _copy_generated_to_dir(output_dir, source_dir) @@ -116,7 +116,7 @@ def _write_swagger_file(name, schema_dict, output_dir): swagger_json['definitions'].update(SNAPSHOT_PARAMS_JSON) swagger_file = os.path.join(output_dir, SWAGGER_FILE_NAME) - logger.info('Writing swagger file to {!r}'.format(swagger_file)) + logger.info('Writing swagger file to {}'.format(swagger_file)) # # Dump JSON into swagger json file. This should work since we just created # the dir `output_dir`. If this fails just let the full failure go through @@ -222,7 +222,7 @@ def _copy_generated_to_dir(src_location, dst_location): destination_dir = os.path.join(dst_location, CODEGEN_PACKAGE) file_util.make_dir(destination_dir, True) - logger.info('Copying generated files {} from {!r} to {!r}.'.format( + logger.info('Copying generated files {} from {} to {}.'.format( CODEGEN_COPY_FILES, source_dir, destination_dir)) for name in CODEGEN_COPY_FILES: @@ -233,7 +233,7 @@ def _copy_generated_to_dir(src_location, dst_location): # must include the name of of the dir for it to be copied there. # shutil.copytree(src, os.path.join(destination_dir, name)) - logger.info('Successfully copied directory {!r}.'.format(name)) + logger.info('Successfully copied directory {}.'.format(name)) except OSError as err: if err.errno == errno.ENOTDIR or err.errno == errno.EINVAL: # @@ -245,7 +245,7 @@ def _copy_generated_to_dir(src_location, dst_location): # errno.EINVAL is received on windows # shutil.copy2(src, destination_dir) - logger.info('Successfully copied file {!r}.'.format(name)) + logger.info('Successfully copied file {}.'.format(name)) else: # # Since we're not expecting any other errors raise anything diff --git a/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache b/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache index 73972217..80f49d0b 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache +++ b/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache @@ -39,6 +39,28 @@ class Model(object): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) + if value is None: + # Plugins use the JSON schema specification to define their + # datatypes. JSON schemas, and therefore plugin data + # definitions, distinguish between these two independent + # situations: + # - The property X exists, and has the value `null` + # - The property X does not exist + # + # Unfortunately, Swagger's generated code conflates these two + # cases together. In either case, we'll receive `None` here. + # + # We don't know of a way that we can 100% reliably know which of + # these two cases is what the plugin code intended. However, + # we expect that real-world plugin code will almost always + # intend the "does not exist" case. + # + # So, for now, we'll simply omit these properties from the dict. + # If we want to be more sophisticated in future, we could start + # analyzing the property's subschema, or we could perhaps + # customize Swagger's generated code so it can distinguish + # these two cases. + continue attr = self.attribute_map[attr] if isinstance(value, list): result[attr] = list(map( diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py index 2e067b1e..79c353a5 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py @@ -12,8 +12,8 @@ import zipfile from dlpx.virtualization._internal import (codegen, exceptions, file_util, - package_util, plugin_util, - util_classes) + package_util, + plugin_dependency_util, plugin_util) logger = logging.getLogger(__name__) @@ -24,8 +24,14 @@ STAGED_LINKED_SOURCE_TYPE = 'PluginLinkedStagedSourceDefinition' DIRECT_LINKED_SOURCE_TYPE = 'PluginLinkedDirectSourceDefinition' +BUILD_DIR_NAME = 'build' -def build(plugin_config, upload_artifact, generate_only, skip_id_validation): + +def build(plugin_config, + upload_artifact, + generate_only, + skip_id_validation, + local_vsdk_root=None): """This builds the plugin using the configurations provided in config yaml file provided as input. It reads schemas and source code from the files given in yaml file, generates an encoded string of zip of source code, @@ -36,16 +42,21 @@ def build(plugin_config, upload_artifact, generate_only, skip_id_validation): upload_artifact: The file to which output of build is written to. generate_only: Only generate python classes from schema definitions. skip_id_validation: Skip validation of the plugin id. + local_vsdk_root: The local path to the root of the Virtualization SDK + repository. """ logger.debug( 'Build parameters include plugin_config: %s, upload_artifact: %s,' ' generate_only: %s', plugin_config, upload_artifact, generate_only) + if local_vsdk_root: + local_vsdk_root = os.path.expanduser(local_vsdk_root) + # Read content of the plugin config file provided and perform validations - logger.info('Reading and validating plugin config file %s', plugin_config) + logger.info('Validating plugin config file %s', plugin_config) try: - result = plugin_util.read_and_validate_plugin_config_file( - plugin_config, not generate_only, False, skip_id_validation) + result = plugin_util.validate_plugin_config_file( + plugin_config, not generate_only, skip_id_validation) except exceptions.UserError as err: raise exceptions.BuildFailedError(err) @@ -57,11 +68,11 @@ def build(plugin_config, upload_artifact, generate_only, skip_id_validation): plugin_config, plugin_config_content['schemaFile']) # Read schemas from the file provided in the config and validate them - logger.info('Reading and validating schemas from %s', schema_file) + logger.info('Validating schemas from %s', schema_file) try: - result = plugin_util.read_and_validate_schema_file( - schema_file, not generate_only) + result = plugin_util.validate_schema_file(schema_file, + not generate_only) except exceptions.UserError as err: raise exceptions.BuildFailedError(err) @@ -101,22 +112,34 @@ def build(plugin_config, upload_artifact, generate_only, skip_id_validation): plugin_config_content, not generate_only, skip_id_validation) - except exceptions.UserError as err: + except (exceptions.UserError, exceptions.SDKToolingError) as err: raise exceptions.BuildFailedError(err) plugin_manifest = {} if result: plugin_manifest = result.plugin_manifest - if result.warnings: - warning_msg = util_classes.MessageUtils.warning_msg( - result.warnings) - logger.warn('{}\n{} Warning(s). {} Error(s).'.format( - warning_msg, len(result.warnings['warning']), 0)) + + # + # Setup a build directory for the plugin in its root. Dependencies are + # packaged with the plugin and should not be installed into the original + # source directory. + # + root = os.path.dirname(plugin_config) + build_dir = os.path.join(root, BUILD_DIR_NAME) + build_src_dir = os.path.join(build_dir, os.path.basename(src_dir)) + + # Copy everything from the source directory into the build directory. + file_util.clean_copy(src_dir, build_src_dir) + + # Install dependencies in the plugin's source root in the build directory. + plugin_dependency_util.install_deps(build_src_dir, + local_vsdk_root=local_vsdk_root) # Prepare the output artifact. try: - plugin_output = prepare_upload_artifact(plugin_config_content, src_dir, - schemas, plugin_manifest) + plugin_output = prepare_upload_artifact(plugin_config_content, + build_src_dir, schemas, + plugin_manifest) except exceptions.UserError as err: raise exceptions.BuildFailedError(err) @@ -136,7 +159,7 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): # This is the output dictionary that will be written # to the upload_artifact. # - return { + artifact = { # Hard code the type to a set default. 'type': TYPE, @@ -151,8 +174,6 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): plugin_config_content['id'].lower(), 'prettyName': plugin_config_content['name'], - 'version': - plugin_config_content['version'], # set default value of locale to en-us 'defaultLocale': plugin_config_content.get('defaultLocale', LOCALE_DEFAULT), @@ -163,6 +184,9 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): plugin_config_content['hostTypes'], 'entryPoint': plugin_config_content['entryPoint'], + 'buildNumber': + plugin_util.get_standardized_build_number( + plugin_config_content['buildNumber']), 'buildApi': package_util.get_build_api_version(), 'engineApi': @@ -187,6 +211,11 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): manifest } + if plugin_config_content.get('externalVersion'): + artifact['externalVersion'] = plugin_config_content['externalVersion'] + + return artifact + def get_linked_source_definition_type(plugin_config_content): if 'STAGED' == plugin_config_content['pluginType'].upper(): diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py index 86a3f640..7d6d04b5 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import logging @@ -10,8 +10,8 @@ import jinja2 import yaml -from dlpx.virtualization._internal import (codegen, exceptions, file_util, - plugin_util, util_classes) +from dlpx.virtualization._internal import (codegen, const, exceptions, + file_util, plugin_util) logger = logging.getLogger(__name__) @@ -23,6 +23,7 @@ DEFAULT_ENTRY_POINT_SYMBOL = 'plugin' DEFAULT_ENTRY_POINT = '{}:{}'.format(DEFAULT_ENTRY_POINT_FILE[:-3], DEFAULT_ENTRY_POINT_SYMBOL) +DEFAULT_BUILD_NUMBER = '0.1.0' # Internal constants for the template directory. ENTRY_POINT_TEMPLATE_NAME = 'entry_point.py.template' @@ -34,7 +35,7 @@ 'schema_template.json') -def init(root, ingestion_strategy, name): +def init(root, ingestion_strategy, name, host_type): """ Creates a valid plugin in a given directory. The plugin created will be able to be built and uploaded immediately. @@ -48,13 +49,16 @@ def init(root, ingestion_strategy, name): root (str): The path of the plugin's root directory ingestion_strategy (str): The plugin type. Either DIRECT or STAGED name (str): The name of the plugin to display. + host_type (list of str): The host type supported by the plugin """ logger.info('Initializing directory: %s', root) - logger.debug('init parameters: %s', { - 'Root': root, - 'Ingestion Strategy': ingestion_strategy, - 'Name': name - }) + logger.debug( + 'init parameters: %s', { + 'Root': root, + 'Ingestion Strategy': ingestion_strategy, + 'Name': name, + 'Host Types': host_type + }) # Files paths based on 'root' to be used throughout src_dir_path = os.path.join(root, DEFAULT_SRC_DIRECTORY) @@ -69,7 +73,7 @@ def init(root, ingestion_strategy, name): # Make an UUID for the plugin plugin_id = str(uuid.uuid4()) - logger.debug("Using % r as the plugin id.", plugin_id) + logger.debug("Using %s as the plugin id.", plugin_id) # if name is not provided the name will be equal to plugin_id. if not name: @@ -83,7 +87,7 @@ def init(root, ingestion_strategy, name): OrderedDict, lambda dumper, data: dumper.represent_mapping( 'tag:yaml.org,2002:map', data.items())) - logger.debug("Using %r as the plugin's entry point.", DEFAULT_ENTRY_POINT) + logger.debug("Using %s as the plugin's entry point.", DEFAULT_ENTRY_POINT) try: # # Create the source directory. We've already validated that this @@ -97,12 +101,11 @@ def init(root, ingestion_strategy, name): # file is static and doesn't depend on any input so it can just be # copied. By copying we can also avoid dealing with ordering issues. # - logger.info('Writing schema file at %r.', schema_file_path) + logger.info('Writing schema file at %s.', schema_file_path) shutil.copyfile(SCHEMA_TEMPLATE_PATH, schema_file_path) - # Read and valida the schema file - result = plugin_util.read_and_validate_schema_file( - schema_file_path, False) + # Validate the schema file. + result = plugin_util.validate_schema_file(schema_file_path, False) # Generate the definitions based on the schema file codegen.generate_python(name, src_dir_path, @@ -115,12 +118,13 @@ def init(root, ingestion_strategy, name): # must be done only after both the schema file and src dir have been # created since the paths need to exist. # - logger.info('Writing config file at %r.', config_file_path) + logger.info('Writing config file at %s.', config_file_path) with open(config_file_path, 'w+') as f: config = _get_default_plugin_config(plugin_id, ingestion_strategy, name, DEFAULT_ENTRY_POINT, DEFAULT_SRC_DIRECTORY, - DEFAULT_SCHEMA_FILE) + DEFAULT_SCHEMA_FILE, host_type, + DEFAULT_BUILD_NUMBER) yaml.dump(config, f, default_flow_style=False) # @@ -128,10 +132,10 @@ def init(root, ingestion_strategy, name): # point file is static and doesn't depend on any input so it can just # be copied. # - logger.info('Writing entry file at %r.', entry_point_file_path) + logger.info('Writing entry file at %s.', entry_point_file_path) with open(entry_point_file_path, 'w+') as f: entry_point_content = _get_entry_point_contents( - plugin_id, ingestion_strategy) + plugin_id, ingestion_strategy, host_type) f.write(entry_point_content) except Exception as e: @@ -139,10 +143,10 @@ def init(root, ingestion_strategy, name): file_util.delete_paths(config_file_path, schema_file_path, src_dir_path) raise exceptions.UserError( - 'Failed to initialize plugin directory {!r}: {}.'.format(root, e)) + 'Failed to initialize plugin directory {}: {}.'.format(root, e)) -def _get_entry_point_contents(plugin_name, ingestion_strategy): +def _get_entry_point_contents(plugin_name, ingestion_strategy, host_type): """ Creates a valid, complete entry point file from the template with the given parameters that is escaped correctly and ready to be written. @@ -150,6 +154,9 @@ def _get_entry_point_contents(plugin_name, ingestion_strategy): Args: plugin_name (str): The name of the plugin to use for the entry point. This should not be escaped. + ingestion_strategy (str): The ingestion strategy that the plugin is + using. + host_type (str): The host type supported by the plugin. Returns: str: The contents of a valid entry point file. """ @@ -158,23 +165,31 @@ def _get_entry_point_contents(plugin_name, ingestion_strategy): template = env.get_template(ENTRY_POINT_TEMPLATE_NAME) - if ingestion_strategy == util_classes.DIRECT_TYPE: + if host_type == const.WINDOWS_HOST_TYPE: + default_mount_path = "C:\\\\tmp\\\\dlpx_staged_mounts\\\\{}" + elif host_type == const.UNIX_HOST_TYPE: + default_mount_path = "/tmp/dlpx_staged_mounts/{}" + + if ingestion_strategy == const.DIRECT_TYPE: linked_operations = env.get_template( DIRECT_OPERATIONS_TEMPLATE_NAME).render() - elif ingestion_strategy == util_classes.STAGED_TYPE: + elif ingestion_strategy == const.STAGED_TYPE: linked_operations = env.get_template( - STAGED_OPERATIONS_TEMPLATE_NAME).render() + STAGED_OPERATIONS_TEMPLATE_NAME).render( + default_mount_path=default_mount_path) else: - raise RuntimeError('Got unrecognized ingestion strategy: {!r}'.format( + raise RuntimeError('Got unrecognized ingestion strategy: {}'.format( ingestion_strategy)) # Call 'repr' to put the string in quotes and escape quotes. return template.render(name=repr(plugin_name), - linked_operations=linked_operations) + linked_operations=linked_operations, + default_mount_path=default_mount_path) def _get_default_plugin_config(plugin_id, ingestion_strategy, name, - entry_point, src_dir_path, schema_file_path): + entry_point, src_dir_path, schema_file_path, + host_type, default_build_number): """ Returns a valid plugin configuration as an OrderedDict. @@ -187,6 +202,7 @@ def _get_default_plugin_config(plugin_id, ingestion_strategy, name, the module and symbol. src_dir_path (str): The path to the source directory of the plugin. schema_file_path (str): The path to the schema file of the plugin. + host_type (str): The host type supported by the plugin. Returns: OrderedDict: A valid plugin configuration roughly ordered from most interesting to a new plugin author to least interesting. @@ -194,11 +210,14 @@ def _get_default_plugin_config(plugin_id, ingestion_strategy, name, # Ensure values are type 'str'. If they are type unicode yaml prints # them with '!!python/unicode' prepended to the value. config = OrderedDict([('id', plugin_id.encode('utf-8')), - ('name', name.encode('utf-8')), ('version', '0.1.0'), + ('name', name.encode('utf-8')), ('language', 'PYTHON27'), ('hostTypes', ['UNIX']), ('pluginType', ingestion_strategy.encode('utf-8')), ('entryPoint', entry_point.encode('utf-8')), ('srcDir', src_dir_path.encode('utf-8')), - ('schemaFile', schema_file_path.encode('utf-8'))]) + ('schemaFile', schema_file_path.encode('utf-8')), + ('hostTypes', [host_type.encode('utf-8')]), + ('buildNumber', default_build_number.encode('utf-8')) + ]) return config diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/plugin_template/entry_point.py.template b/tools/src/main/python/dlpx/virtualization/_internal/commands/plugin_template/entry_point.py.template index 0e210380..154f8684 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/plugin_template/entry_point.py.template +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/plugin_template/entry_point.py.template @@ -57,7 +57,7 @@ def virtual_post_snapshot(virtual_source, repository, source_config): @plugin.virtual.mount_specification() def virtual_mount_specification(virtual_source, repository): - mount_path = "/tmp/dlpx_virtual_mounts/{}".format(virtual_source.guid) + mount_path = "{{ default_mount_path }}".format(virtual_source.guid) mounts = [Mount(virtual_source.connection.environment, mount_path)] return MountSpecification(mounts) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/plugin_template/staged_operations.py.template b/tools/src/main/python/dlpx/virtualization/_internal/commands/plugin_template/staged_operations.py.template index 8d3f88d8..f23cb76f 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/plugin_template/staged_operations.py.template +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/plugin_template/staged_operations.py.template @@ -8,7 +8,7 @@ def linked_post_snapshot(staged_source, @plugin.linked.mount_specification() def linked_mount_specification(staged_source, repository): - mount_path = "/tmp/dlpx_staged_mounts/{}".format(staged_source.guid) + mount_path = "{{ default_mount_path }}".format(staged_source.guid) environment = staged_source.staged_connection.environment mounts = [Mount(environment, mount_path)] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/upload.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/upload.py index 98ca5034..1395f4ae 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/upload.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/upload.py @@ -13,7 +13,7 @@ UNKNOWN_ERR = 'UNKNOWN_ERR' -def upload(engine, user, upload_artifact, password): +def upload(engine, user, upload_artifact, password, wait): """ Takes in the engine hostname/ip address, logs on and uploads the artifact passed in. The upload artifact should have been generated via the build @@ -26,12 +26,15 @@ def upload(engine, user, upload_artifact, password): InvalidArtifactError HttpError UnexpectedError + PluginUploadJobFailed + PluginUploadWaitTimedOut """ logger.debug('Upload parameters include' ' engine: {},' ' user: {},' - ' upload_artifact: {}'.format(engine, user, upload_artifact)) - logger.info('Uploading plugin artifact {!r} ...'.format(upload_artifact)) + ' upload_artifact: {},' + ' wait: {}'.format(engine, user, upload_artifact, wait)) + logger.info('Uploading plugin artifact {} ...'.format(upload_artifact)) # Read content of upload artifact try: @@ -45,7 +48,7 @@ def upload(engine, user, upload_artifact, password): ' build command.') except IOError as err: raise exceptions.UserError( - 'Unable to read upload artifact file {!r}' + 'Unable to read upload artifact file \'{}\'' '\nError code: {}. Error message: {}'.format( upload_artifact, err.errno, errno.errorcode.get(err.errno, UNKNOWN_ERR))) @@ -54,4 +57,4 @@ def upload(engine, user, upload_artifact, password): client = delphix_client.DelphixClient(engine) engine_api = client.get_engine_api(content) client.login(engine_api, user, password) - client.upload_plugin(os.path.basename(upload_artifact), content) + client.upload_plugin(os.path.basename(upload_artifact), content, wait) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/const.py b/tools/src/main/python/dlpx/virtualization/_internal/const.py new file mode 100644 index 00000000..2022af51 --- /dev/null +++ b/tools/src/main/python/dlpx/virtualization/_internal/const.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# + +import os + +UNIX_HOST_TYPE = 'UNIX' +WINDOWS_HOST_TYPE = 'WINDOWS' +STAGED_TYPE = 'STAGED' +DIRECT_TYPE = 'DIRECT' + +OUTPUT_DIR_NAME = '.dvp-gen-output' +PLUGIN_SCHEMAS_DIR = os.path.join(os.path.dirname(__file__), + 'validation_schemas') +PLUGIN_CONFIG_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, + 'plugin_config_schema.json') + +PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION = os.path.join( + PLUGIN_SCHEMAS_DIR, 'plugin_config_schema_no_id_validation.json') + +PLUGIN_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, 'plugin_schema.json') diff --git a/tools/src/main/python/dlpx/virtualization/_internal/delphix_client.py b/tools/src/main/python/dlpx/virtualization/_internal/delphix_client.py index f2699bf6..2ab521fa 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/delphix_client.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/delphix_client.py @@ -4,6 +4,8 @@ import json import logging +import threading +import time import requests from dlpx.virtualization._internal import exceptions, plugin_util @@ -20,10 +22,14 @@ class DelphixClient(object): """ __BOUNDARY = '----------boundary------' __UPLOAD_CONTENT = 'multipart/form-data; boundary={}'.format(__BOUNDARY) + __JOB_POLLING_INTERVAL = 5 + __WAIT_TIMEOUT_SECONDS = 3600 __cookie = None - def __init__(self, engine): + def __init__(self, engine, timeout=None): self.__engine = engine + if timeout is not None: + self.__WAIT_TIMEOUT_SECONDS = timeout def login(self, engine_api, user, password): """ @@ -207,13 +213,12 @@ def __download_logs(self, plugin_name, token, directory): for chunk in download_zip_data: f.write(chunk) - def upload_plugin(self, name, content): + def upload_plugin(self, name, content, wait): """ Takes in the plugin name and content (as a json). Attempts to upload the plugin onto the connected Delphix Engine. Can raise HttpPostError and UnexpectedError. """ - # Get the upload token. logger.debug('Getting token to do upload.') response = self.__post('delphix/toolkit/requestUploadToken') @@ -222,10 +227,60 @@ def upload_plugin(self, name, content): logger.info('Uploading plugin {!r}.'.format(name)) # Encode plugin content. - self.__post('delphix/data/upload', - content_type=self.__UPLOAD_CONTENT, - data=self.__encode(json.dumps(content), token, name)) - logger.info('Plugin was successfully uploaded.') + upload_response = self.__post('delphix/data/upload', + content_type=self.__UPLOAD_CONTENT, + data=self.__encode( + json.dumps(content), token, name)) + if wait: + self._wait_for_upload_to_complete(name, + upload_response.get('action'), + upload_response.get('job')) + + def _wait_for_upload_to_complete(self, name, upload_action, upgrade_job): + """ + Waits a maximum of 60 minutes for the plugin upload to complete before + returning from the cli command. If the upload response contains a job, + this means that the plugin will be upgraded. We log additional details + regarding events if the job exists (i.e. event code, details, and + action), but only if we haven't seen the job event before. We will + return when the job succeeds, fails, or times out. Can raise + PluginUploadJobFailed or PluginUploadWaitTimedOut + """ + ticker = threading.Event() + start_time = time.time() + event_tuples = set() + failed_statuses = ('FAILED', 'SUSPENDED', 'CANCELLED') + while not ticker.wait(self.__JOB_POLLING_INTERVAL): + if upgrade_job: + status_response = self.__get( + 'delphix/action/{}/getJob'.format(upload_action)).json() + events = status_response.get('result').get('events') + for event in events: + event_tuple = (event.get('timestamp'), + event.get('messageCode')) + if event_tuple not in event_tuples: + logger.info('Timestamp: {}, Code: {}'.format( + event.get('timestamp'), event.get('messageCode'))) + logger.warn(event.get('messageDetails')) + if event.get('messageAction') is not None: + logger.warn(event.get('messageAction')) + event_tuples.add(event_tuple) + status = status_response.get('result').get('jobState') + else: + status_response = self.__get( + 'delphix/action/{}'.format(upload_action)).json() + status = status_response.get('result').get('state') + + if status == 'COMPLETED': + logger.warn( + 'Plugin {} was successfully uploaded.'.format(name)) + ticker.set() + elif status in failed_statuses: + ticker.set() + raise exceptions.PluginUploadJobFailed(name) + elif (time.time() - start_time) > self.__WAIT_TIMEOUT_SECONDS: + ticker.set() + raise exceptions.PluginUploadWaitTimedOut(name) def download_plugin_logs(self, directory, plugin_config): """ diff --git a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py index 8205eae1..5013e2d5 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py @@ -7,6 +7,21 @@ import re +class SDKToolingError(Exception): + """ + SDKBuildError is one of the main errors that gets caught in cli.py. Errors + that are not related to the user input should raise this error. The + message from this exception is posted to logger.error. message will be the + first arg that is passed in (for any exception that is extending it). + """ + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(SDKToolingError, self).__init__(message) + + class UserError(Exception): """ UserError is the main error that gets caught in cli.py. The message from @@ -21,17 +36,49 @@ def __init__(self, message): super(UserError, self).__init__(message) +class PluginUploadJobFailed(UserError): + """ + PluginUploadJobFailed is raised in the upload command if the action/job + that is being monitored returns with a status other than 'COMPLETED' or + 'RUNNING'. + """ + def __init__(self, plugin_name): + message = "Failed trying to upload plugin {}."\ + .format(plugin_name) + super(PluginUploadJobFailed, self).__init__(message) + + +class PluginUploadWaitTimedOut(UserError): + """ + PluginUploadWaitTimedOut is raised in the upload command if the + action/job that is being monitored does not complete or fail within a + 30 minute timeout window. + """ + def __init__(self, plugin): + message = "Timed out waiting for upload of plugin {} to complete."\ + .format(plugin) + super(PluginUploadWaitTimedOut, self).__init__(message) + + +class PathIsAbsoluteError(UserError): + def __init__(self, path): + self.path = path + message = "The path '{}' should be a relative path, but is not."\ + .format(path) + super(PathIsAbsoluteError, self).__init__(message) + + class PathDoesNotExistError(UserError): def __init__(self, path): self.path = path - message = 'The path {!r} does not exist.'.format(path) + message = "The path '{}' does not exist.".format(path) super(PathDoesNotExistError, self).__init__(message) class PathExistsError(UserError): def __init__(self, path): self.path = path - message = 'The path {!r} already exists.'.format(path) + message = "The path '{}' already exists.".format(path) super(PathExistsError, self).__init__(message) @@ -39,7 +86,7 @@ class PathTypeError(UserError): def __init__(self, path, path_type): self.path = path self.path_type = path_type - message = 'The path {!r} should be a {} but is not.'.format( + message = "The path '{}' should be a {} but is not.".format( path, path_type) super(PathTypeError, self).__init__(message) @@ -147,13 +194,36 @@ class SchemaValidationError(UserError): def __init__(self, schema_file, validation_errors): self.schema_file = schema_file self.validation_errors = validation_errors - error_msg = "\n\n".join( - map(lambda err: self.__format_error(err), validation_errors)) + + formatted_errors = self.__format_errors(validation_errors) + error_msg = "\n".join(formatted_errors) + message = ( - '{}\nValidation failed on {}. \n{} Warning(s). {} Error(s)'.format( - error_msg, self.schema_file, 0, len(validation_errors))) + '{}\n\nValidation failed on {}. \n{} Warning(s). {} Error(s)'. + format(error_msg, self.schema_file, 0, len(formatted_errors))) super(SchemaValidationError, self).__init__(message) + @staticmethod + def __format_errors(validation_errors): + """ + Formats the validation errors by extracting out relevant parts of the + object and also check for errros on nested schemas, if any. + """ + all_errors = [] + for err in validation_errors: + all_errors.append(SchemaValidationError.__format_error(err)) + + # + # Check if sub/nested schema errors are reported as well. If so, + # get the error string based on those errors. + # + if err.context: + nested_errors = SchemaValidationError.__format_errors( + err.context) + all_errors.extend(nested_errors) + + return all_errors + @staticmethod def __format_error(err): """ @@ -164,6 +234,8 @@ def __format_error(err): message - error message from validation failure path - path of the schema that failed validation instance - instance on which validation failed + context - if there are errors on nested/sub-schemas, context object + contains validations errors from those schemas. e.g. Validation Error: 'identityFields' is a required property @@ -182,17 +254,16 @@ def __format_error(err): 'properties': {'name': {'type': 'string'}}, 'type': 'object'} """ - err_instance = json.dumps(err.instance, indent=2) - # # Validation error message could be unicode encoded string. Strip out # any leading unicode characters for proper display and logging. # err_msg = re.compile(r'\bu\b', re.IGNORECASE) err_msg = err_msg.sub("", err.message) - error_string = 'Error: {} on {}\n{}'.format(err_msg, - map(str, list(err.path)), - err_instance) + + error_string = 'Error: {} on {}'.format( + err_msg, map(str, list(err.schema_path))) + return error_string @@ -207,3 +278,68 @@ class BuildFailedError(UserError): def __init__(self, exception): message = ('{} \n\nBUILD FAILED.'.format(exception.message)) super(BuildFailedError, self).__init__(message) + + +class SubprocessFailedError(UserError): + """ + SubprocessFailedError gets raised when a command executing in a subprocess + fails. + """ + def __init__(self, command, exit_code, output): + self.command = command + self.exit_code = exit_code + self.output = output + message = ("{}\n" + "{} failed with exit code {}.").format( + output, command, exit_code) + super(SubprocessFailedError, self).__init__(message) + + +class ValidationFailedError(UserError): + """ + ValidationFailedError gets raised when validation fails on plugin config + and its contents. + Defines helpers methods to format warning and exception messages. + """ + def __init__(self, warnings): + message = self.__report_warnings_and_exceptions(warnings) + super(ValidationFailedError, self).__init__(message) + + @classmethod + def __report_warnings_and_exceptions(cls, warnings): + """ + Prints the warnings and errors that were found in the plugin code, if + the warnings dictionary contains the 'exception' key. + """ + exception_msg = cls.sdk_exception_msg(warnings) + exception_msg += cls.exception_msg(warnings) + exception_msg += '\n{}'.format(cls.warning_msg(warnings)) + return '{}\n{} Warning(s). {} Error(s).'.format( + exception_msg, len(warnings['warning']), + len(warnings['exception']) + len(warnings['sdk exception'])) + + @classmethod + def sdk_exception_msg(cls, warnings): + sdk_exception_msg = '\n'.join([ + cls.__format_msg('SDK Error', ex) + for ex in warnings['sdk exception'] + ]) + return sdk_exception_msg + + @classmethod + def exception_msg(cls, exceptions): + exception_msg = '\n'.join( + cls.__format_msg('Error', ex) for ex in exceptions['exception']) + return exception_msg + + @classmethod + def warning_msg(cls, warnings): + warning_msg = '\n'.join( + cls.__format_msg('Warning', warning) + for warning in warnings['warning']) + return warning_msg + + @staticmethod + def __format_msg(msg_type, msg): + msg_str = "{}: {}".format(msg_type, msg) + return msg_str diff --git a/tools/src/main/python/dlpx/virtualization/_internal/file_util.py b/tools/src/main/python/dlpx/virtualization/_internal/file_util.py index f664f146..6c888126 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/file_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/file_util.py @@ -5,7 +5,9 @@ import logging import os import shutil +import tempfile import traceback +from contextlib import contextmanager from dlpx.virtualization._internal import exceptions @@ -55,18 +57,65 @@ def validate_paths_do_not_exist(*args): logger.debug('SUCCESS: Path %r does not exist.', path) -def get_src_dir_path(file_name, src_dir): - """Get the absolute path if the srcDir provided is relative path and - validate that srcDir is a valid directory and that it exists. +def standardize_path(path): + standardized_path = os.path.expanduser(path) + if standardized_path == '.': + standardized_path = os.path.realpath(standardized_path) + else: + standardized_path = os.path.normpath(standardized_path) + standardized_path = os.path.normcase(standardized_path) + return standardized_path + + +def get_src_dir_path(config_file_path, src_dir): """ - if not os.path.isabs(src_dir): - src_dir = os.path.join(os.path.dirname(file_name), src_dir) + Validates 4 requirements of src_dir: + - src_dir must be a relative path + - src_dir must exist + - src_dir must be a directory + - src_dir must be a subdirectory of the plugin root - if not os.path.exists(src_dir): - raise exceptions.PathDoesNotExistError(src_dir) - if not os.path.isdir(src_dir): - raise exceptions.PathTypeError(src_dir, 'directory') - return src_dir + Args: + config_file_path: A path to the plugin's config file. The plugin's + root is the directory containing the config file. No pre-processing + is needed. + src_dir: The path to the plugin's src directory. This is the path + to be validated. + Returns: + str: A normalized, absolute path to the plugin's source directory. + """ + # Validate the the src directory is not an absolute path. Paths with + # ~ in them are not considered absolute by os.path.isabs. + src_dir = os.path.expanduser(src_dir) + if os.path.isabs(src_dir): + raise exceptions.PathIsAbsoluteError(src_dir) + + # The plugin root is the directory containing the plugin config file. + # This is passed in by the CLI so it needs to be standardized and made + # absolute for comparison later. + plugin_root_dir = os.path.dirname(config_file_path) + plugin_root_dir = standardize_path(plugin_root_dir) + plugin_root_dir = os.path.abspath(plugin_root_dir) + + # The plugin's src directory is relative to the plugin root not to the + # current working directory. os.path.abspath makes a relative path + # absolute by appending the current working directory to it. The CLI + # can be executed anywhere so it's not guaranteed that the cwd is the + # plugin root. + src_dir_absolute = standardize_path(os.path.join(plugin_root_dir, src_dir)) + + if not os.path.exists(src_dir_absolute): + raise exceptions.PathDoesNotExistError(src_dir_absolute) + if not os.path.isdir(src_dir_absolute): + raise exceptions.PathTypeError(src_dir_absolute, 'directory') + + if not src_dir_absolute.startswith( + plugin_root_dir) or src_dir_absolute == plugin_root_dir: + raise exceptions.UserError( + "The src directory {} is not a subdirectory " + "of the plugin root at {}".format(src_dir_absolute, + plugin_root_dir)) + return src_dir_absolute def make_dir(path, force_remove): @@ -80,9 +129,39 @@ def make_dir(path, force_remove): shutil.rmtree(path, ignore_errors=True) try: os.mkdir(path) - logger.debug('Successfully created directory {!r}'.format(path)) + logger.debug('Successfully created directory \'{}\''.format(path)) except OSError as err: raise exceptions.UserError( - 'Unable to create new directory {!r}' + 'Unable to create new directory \'{}\'' '\nError code: {}. Error message: {}'.format( path, err.errno, os.strerror(err.errno))) + + +def clean_copy(src, tgt): + """ + Copies src into tgt. Deletes tgt if it exists before copying. + + Args: + src: The directory to copy. + tgt: The directory to copy to. + """ + delete_paths(tgt) + logger.debug('Copying %s to %s', src, tgt) + shutil.copytree(src, tgt) + + +@contextmanager +def tmpdir(): + """ + Creates a temporary directory. When the context is exited, the directory + is deleted. + + This is only needed for Python 2. tempfile in Python 3 has this + functionality built in. + """ + temp = None + try: + temp = tempfile.mkdtemp() + yield temp + finally: + delete_paths(temp) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/package_util.py b/tools/src/main/python/dlpx/virtualization/_internal/package_util.py index dd07c965..1eb75725 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/package_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/package_util.py @@ -5,8 +5,10 @@ import functools import logging import os +import re from dlpx.virtualization import _internal as virtualization_internal +from dlpx.virtualization.platform import util from six.moves import configparser logger = logging.getLogger(__name__) @@ -44,12 +46,34 @@ def _get_settings(): @_run_once def get_version(): """Returns the version of the dlpx.virtualization._internal package.""" - return _get_settings().get('General', 'package_version') + with open(os.path.join(get_internal_package_root(), + 'VERSION')) as version_file: + version = version_file.read().strip() + return version + + +def get_external_version_string(version_string): + """Returns the external version string given an external or internal + (development) version. An external version string contains only digits and + dots, and follows the following format: "1.1.0". The internal version + string might include the development build suffix of the following format: + "1.0.0-internal-001". + + :param version_string: version string in either internal or external format + :return: version string in external format + """ + return re.search(r'([0-9]\.[0-9]\.[0-9])', version_string).group(0) + + +@_run_once +def get_virtualization_api_version(): + return get_external_version_string(util.get_virtualization_api_version()) def get_build_api_version(): """Returns the sdk build version in the format build command expects""" - major, minor, micro = (int(n) for n in get_version().split('.')) + major, minor, micro =\ + (int(n) for n in get_virtualization_api_version().split('.')) build_api_version = { 'type': 'APIVersion', 'major': major, diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_dependency_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_dependency_util.py new file mode 100644 index 00000000..1a90a154 --- /dev/null +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_dependency_util.py @@ -0,0 +1,176 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import logging +import os +import subprocess +import sys + +from dlpx.virtualization._internal import file_util, package_util +from dlpx.virtualization._internal.exceptions import SubprocessFailedError + +logger = logging.getLogger(__name__) + +DVP_DEPENDENCIES = ['dvp-common', 'dvp-libs', 'dvp-platform'] + + +def install_deps(target_dir, local_vsdk_root=None): + """ + Installs the Python packages needed for the plugin to execute into the + given target directory. + + Args: + target_dir: The directory to install the plugin's dependencies into. + local_vsdk_root: This is an internal field only used for SDK + developers. It is a path to the root of the SDK repository. + """ + + # + # If local_vsdk_root is not None, it is assumed this is a development + # build being done by an SDK developer that is testing wrapper changes. + # To speed up development, instead of installing the wrappers from a PyPI + # repository, this will go to local_vsdk_root and for each package will + # build a distribution locally and install it. + # + # This alleviates the need for SDK developers to build and upload dev + # builds of the wrappers in order to run 'dvp build' and test them. + # + if local_vsdk_root: + package_names = ['common', 'libs', 'platform'] + + # + # Build the wheels for each package in a temporary directory. + # + # Pip supports installing directly from a setup.py file but this + # proved to be incredibly slow due to how it copies source files. + # If that issue is resolved, it would likely be better to use pip to + # install directly from the setup.py file instead of needing to build + # the wheels first. This would remove the need for a temp directory + # as well. + # + with file_util.tmpdir() as wheel_dir: + for package in package_names: + _build_wheel(os.path.join(local_vsdk_root, package), wheel_dir) + + packages = { + os.path.join(wheel_dir, p) + for p in os.listdir(wheel_dir) + } + + if len(packages) != len(package_names): + raise RuntimeError( + 'An error occurred while attempting to install dev builds ' + 'of the wrappers. Three packages were expected in the ' + 'temporary build directory but instead {} files were ' + 'found:\n\t{}'.format(len(packages), + '\n\t'.join(packages))) + + # + # Install the packages. this needs to be done inside the tmpdir + # context, otherwise the distributions will be deleted. + # + _pip_install_to_dir(packages, target_dir) + + if os.path.exists(wheel_dir): + raise RuntimeError('An error occured while attempting to install ' + 'dev builds of the wrappers. {} is a temporary ' + 'directory used to build the wrapper ' + 'distributions. It should have been cleaned up ' + 'but it still exists.'.format(wheel_dir)) + else: + # This is the production branch that is executed for plugin developers. + dvp_version = package_util.get_version() + packages = [ + '{}=={}'.format(pkg, dvp_version) for pkg in DVP_DEPENDENCIES + ] + _pip_install_to_dir(packages, target_dir) + + # + # This is an unfortunate hack. 'protobuf' is installed under the 'google' + # namespace package. However, there is no __init__.py under 'google'. This + # is because google assumes it is installed in a site package directory + # and uses a .pth file to setup the namespace package. + # + # The zipimporter used on the Delphix Engine to import the plugin cannot + # handle .pth files so here an empty __init__.py file is created so + # 'google' and therefore 'protobuf' can be imported successfully at + # runtime. + # + open(os.path.join(target_dir, 'google', '__init__.py'), 'w').close() + + +def _execute_pip(pip_args): + """ + Execute pip with the given args. Raises a SubprocessFailedError if the + exit code is non-zero. + + Args: + pip_args: a list of string arguments to pass to pip. + """ + args = [sys.executable, '-m', 'pip'] + args.extend(pip_args) + + logger.debug('Executing %s', ' '.join(args)) + proc = subprocess.Popen(args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + all_output, _ = proc.communicate() + exit_code = proc.wait() + + # + # If pip failed, raise an error. It's on the caller to log any output. If + # the command succeeded, log the output to debug so the caller doesn't + # need to. + # + if exit_code != 0: + raise SubprocessFailedError(' '.join(args), exit_code, all_output) + else: + logger.debug(all_output) + + +def _pip_install_to_dir(dependencies, target_dir): + """ + Installs dependencies into a target_dir. + + Args: + dependencies: a set of dependencies to install. + target_dir: the directory to the install the dependencies into. + """ + args = ['install', '-t', target_dir] + args.extend(dependencies) + _execute_pip(args) + + +def _build_wheel(package_root, target_dir=None): + """ + Uses the 'setup.py' file in package_root to build a wheel distribution. If + target_dir is present, the wheel is built into it. Raises a + SubprocessFailedError if it fails. + + Args: + package_root: The path to the root of the package to build. It is + assumed there is a setup.py file in this directory. + target_dir: The directory to build the wheel into. + """ + if not os.path.exists(os.path.join(package_root, 'setup.py')): + raise RuntimeError( + 'No setup.py file exists in directory {}'.format(package_root)) + + args = [sys.executable, 'setup.py', 'bdist_wheel'] + if target_dir: + args.extend(['-d', target_dir]) + + logger.debug('Executing %s', ' '.join(args)) + proc = subprocess.Popen(args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=package_root) + + all_output, _ = proc.communicate() + exit_code = proc.wait() + + if exit_code != 0: + raise SubprocessFailedError(' '.join(args), exit_code, all_output) + else: + logger.debug(all_output) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 0df69610..9f58feb1 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -1,25 +1,24 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import importlib -import inspect import logging import os import sys -from collections import defaultdict +from collections import defaultdict, namedtuple from multiprocessing import Process, Queue import yaml -from dlpx.virtualization._internal import exceptions, util_classes -from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE -from dlpx.virtualization._internal.util_classes import MessageUtils -from flake8.api import legacy as flake8 +from dlpx.virtualization._internal import const, exceptions +from dlpx.virtualization.platform import import_util logger = logging.getLogger(__name__) -PLUGIN_IMPORTER_YAML = os.path.join(util_classes.PLUGIN_SCHEMAS_DIR, +PLUGIN_IMPORTER_YAML = os.path.join(const.PLUGIN_SCHEMAS_DIR, 'plugin_importer.yaml') +validation_result = namedtuple('validation_result', ['plugin_manifest']) + def load_validation_maps(): """ @@ -33,19 +32,15 @@ def load_validation_maps(): class PluginImporter: """ Import helper class for the plugin. Imports the plugin module in a sub - process to ensure its isolated and does not pollute caller's runtime. + process to ensure it's isolated and does not pollute caller's runtime. On successful import, callers can get the manifest describing what methods are implemented in the plugin code. If import fails or has issues with validation of module content and entry points- will save errors/warnings in a dict that callers can access. """ - validation_maps = load_validation_maps() - expected_staged_args_by_op = validation_maps['EXPECTED_STAGED_ARGS_BY_OP'] - expected_direct_args_by_op = validation_maps['EXPECTED_DIRECT_ARGS_BY_OP'] - required_methods_by_plugin_type = \ - validation_maps['REQUIRED_METHODS_BY_PLUGIN_TYPE'] - required_methods_description = \ - validation_maps['REQUIRED_METHODS_DESCRIPTION'] + v_maps = load_validation_maps() + required_methods_by_plugin_type = v_maps['REQUIRED_METHODS_BY_PLUGIN_TYPE'] + required_methods_description = v_maps['REQUIRED_METHODS_DESCRIPTION'] def __init__(self, src_dir, @@ -58,41 +53,39 @@ def __init__(self, self.__plugin_entry_point = entry_point self.__plugin_type = plugin_type self.__validate = validate + self.__post_import_checks = [self.__check_for_required_methods] + + @property + def result(self): + return validation_result(plugin_manifest=self.__plugin_manifest) - def import_plugin(self): + def validate_plugin_module(self): """ - Imports the plugin module, does basic validation. + Imports the plugin module, does post import validation. Returns: plugin manifest - dict describing methods implemented in the plugin - Note: - warnings - dict containing a list of errors or warnings can be - obtained by the caller via warnings property. + is available to callers via the result property. + NOTE: + Importing module in the current context pollutes the runtime of + the caller, in this case dvp. If the module being imported, for + e.g. contains code that adds a handler to the root logger at + import time, this can cause issues with logging in this code and + callers of validator. To avoid such issues, perform the import in + in a sub-process and on completion return the output. """ logger.debug('Importing plugin module : %s', self.__plugin_module) + self.__plugin_manifest, warnings = self.__internal_import() + self.__run_checks(warnings) - self.__pre_import_checks() - plugin_manifest, warnings = self.__import_plugin() - self.__post_import_checks(plugin_manifest, warnings) - - return plugin_manifest, warnings - - def __pre_import_checks(self): - """ - Performs checks of the plugin code that should take place prior to - importing. - """ - warnings = PluginImporter.__check_for_undefined_names(self.__src_dir) - PluginImporter.__report_warnings_and_exceptions(warnings) - - def __import_plugin(self): + def __internal_import(self): """ - Imports the module to check for errors or issues. Also does an eval on - the entry point. + Imports the module in a sub-process to check for errors or issues. + Also does an eval on the entry point. """ plugin_manifest = {} warnings = defaultdict(list) try: - plugin_manifest, warnings = (PluginImporter.__import_in_subprocess( + plugin_manifest, warnings = (self.__import_in_subprocess( self.__src_dir, self.__plugin_module, self.__plugin_entry_point, self.__plugin_type, self.__validate)) @@ -107,40 +100,6 @@ def __import_plugin(self): return plugin_manifest, warnings - def __post_import_checks(self, plugin_manifest, warnings): - """ - Performs checks of the plugin code that should take place after - importing. - """ - check_warnings = self.__check_for_required_methods( - plugin_manifest, self.__plugin_type) - - if check_warnings and 'warning' in check_warnings: - warnings['warning'].extend(check_warnings['warning']) - - self.__report_warnings_and_exceptions(warnings) - - @staticmethod - def __check_for_required_methods(plugin_manifest, plugin_type): - """ - Checks for required methods in the manifest and adds warnings for any - missing methods. - """ - warnings = defaultdict(list) - if not plugin_manifest: - return warnings - for method_key, method_name in \ - PluginImporter.required_methods_by_plugin_type[ - plugin_type].items(): - if plugin_manifest[method_key] is False: - warnings['warning'].append( - 'Implementation missing ' - 'for required method {}. The Plugin Operation \'{}\' ' - 'will fail when executed.'.format( - method_name, PluginImporter. - required_methods_description[method_key])) - return warnings - @staticmethod def __import_in_subprocess(src_dir, module, entry_point, plugin_type, validate): @@ -177,121 +136,152 @@ def __parse_queue(queue): return manifest, warnings - @staticmethod - def __check_for_undefined_names(src_dir): + def __run_checks(self, warnings): """ - Checks the plugin module for undefined names. This catches - missing imports, references to nonexistent variables, etc. - - ..note:: - We are using the legacy flake8 api, because there is currently - no public, stable api for flake8 >= 3.0.0 - - For more info, see - https://flake8.pycqa.org/en/latest/user/python-api.html + Performs checks of the plugin code that should take place after + importing. """ - warnings = defaultdict(list) - exclude_dir = os.path.sep.join([src_dir, CODEGEN_PACKAGE]) - style_guide = flake8.get_style_guide(select=["F821"], - exclude=[exclude_dir], - quiet=1) - style_guide.check_files(paths=[src_dir]) - file_checkers = style_guide._application.file_checker_manager.checkers - for checker in file_checkers: - for result in checker.results: - # From the api code, result is a tuple defined as: error = - # (error_code, line_number, column, text, physical_line) - if result[0] == 'F821': - msg = "{} on line {} in {}".format(result[3], result[1], - checker.filename) - warnings['exception'].append(exceptions.UserError(msg)) - - return warnings - - @staticmethod - def __report_warnings_and_exceptions(warnings): + for check in self.__post_import_checks: + check_warnings = check() + if check_warnings and 'warning' in check_warnings: + warnings['warning'].extend(check_warnings['warning']) + + if warnings: + if 'exception' in warnings: + raise exceptions.ValidationFailedError(warnings) + if 'sdk exception' in warnings: + sdk_exception_msg =\ + exceptions.ValidationFailedError(warnings).message + raise exceptions.SDKToolingError(sdk_exception_msg) + + if 'warning' in warnings: + # + # Use the ValidationFailedError type to get a formatted message + # with number of warnings included in the message. + # + warning_msg = exceptions.ValidationFailedError( + warnings).message + logger.warn(warning_msg) + + def __check_for_required_methods(self): """ - Prints the warnings and errors that were found in the plugin code, if - the warnings dictionary contains the 'exception' key. + Checks for required methods in the manifest and adds warnings for any + missing methods. """ - if warnings and 'exception' in warnings: - exception_msg = MessageUtils.exception_msg(warnings) - exception_msg += '\n{}'.format(MessageUtils.warning_msg(warnings)) - raise exceptions.UserError( - '{}\n{} Warning(s). {} Error(s).'.format( - exception_msg, len(warnings['warning']), - len(warnings['exception']))) + warnings = defaultdict(list) + if not self.__plugin_manifest: + return warnings + for method_key, method_name in \ + PluginImporter.required_methods_by_plugin_type[ + self.__plugin_type].items(): + if self.__plugin_manifest[method_key] is False: + warnings['warning'].append( + 'Implementation missing ' + 'for required method {}. The Plugin Operation \'{}\' ' + 'will fail when executed.'.format( + method_name, PluginImporter. + required_methods_description[method_key])) + return warnings def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): - manifest = {} + """ + Imports the plugin module, runs validations and returns the manifest. + """ + module_content = None + + try: + module_content = _import_helper(queue, src_dir, module) + except exceptions.UserError: + # + # Exception here means there was an error importing the module and + # queue is updated with the exception details inside _import_helper. + # + return + + # + # Create an instance of plugin module with associated state to pass around + # to the validation code. + # + plugin_module = import_util.PluginModule(src_dir, module, entry_point, + plugin_type, module_content, + PluginImporter.v_maps, validate) + + # Validate if the module imported fine and is the expected one. + warnings = import_util.validate_import(plugin_module) + _process_warnings(queue, warnings) + + # If the import itself had issues, no point validating further. + if warnings and len(warnings) > 0: + return + + # Run post import validations and consolidate issues. + warnings = import_util.validate_post_import(plugin_module) + _process_warnings(queue, warnings) + + manifest = _prepare_manifest(entry_point, module_content) + queue.put({'manifest': manifest}) + + +def _import_helper(queue, src_dir, module): + """Helper method to import the module and handle any import time + exceptions. + """ + module_content = None sys.path.append(src_dir) + try: module_content = importlib.import_module(module) - manifest = _validate_and_get_manifest(module, module_content, - entry_point) - - if validate: + except (ImportError, TypeError) as err: + queue.put({'exception': err}) + except Exception as err: + # + # We need to figure out if this is an error that was raised inside the + # wrappers which would mean that it is a user error. Otherwise we + # should still queue the error but specify that it's not a user error. + # + parent_class_list = [base.__name__ for base in err.__class__.__bases__] + if 'PlatformError' in parent_class_list: + # This is a user error + error = exceptions.UserError(err.message) + queue.put({'exception': error}) + else: # - # Validated methods args against expected args and add any - # resulting warnings to the queue for caller to process. - # These warnings should be treated as an exception to make - # sure build fails. + # Because we don't know if the output of the err is actually in the + # message, we just cast the exception to a string and hope to get + # the most information possible. # - warnings = _validate_named_args(module_content, entry_point, - plugin_type) - if warnings: - map(lambda warning: queue.put({'exception': warning}), - warnings) - except ImportError as err: - queue.put({'exception': err}) - except exceptions.UserError as user_err: - queue.put({'exception': user_err}) - except RuntimeError as rt_err: - queue.put({'exception': rt_err}) + error = exceptions.SDKToolingError(str(err)) + queue.put({'sdk exception': error}) finally: sys.path.remove(src_dir) - queue.put({'manifest': manifest}) + if not module_content: + raise exceptions.UserError("Plugin module content is None") + + return module_content + +def _process_warnings(queue, warnings): + for warning in warnings: + queue.put({'exception': warning}) -def _validate_and_get_manifest(module, module_content, entry_point): + +def _prepare_manifest(entry_point, module_content): """ Creates a plugin manifest indicating which plugin operations have been implemented by a plugin developer. Plugin_module_content is a module object which must have plugin_entry_point_name as one of its attributes. Args: - module: name of the module imported - module_content: plugin module content from import entry_point: name of entry point to the above plugin module + module_content: plugin module content from import Returns: dict: dictionary that represents plugin's manifest """ - # This should never happen and if it does, flag a run time error. - if module_content is None: - raise RuntimeError('Plugin module content is None.') - - # - # Schema validation on plugin config file would have ensured entry - # is a string and should never happen its none - so raise a run time - # error if it does. - # - if entry_point is None: - raise RuntimeError('Plugin entry point object is None.') - - if not hasattr(module_content, entry_point): - raise exceptions.UserError( - 'Entry point \'{}:{}\' does not exist. \'{}\' is not a symbol' - ' in module \'{}\'.'.format(module, entry_point, entry_point, - module)) plugin_object = getattr(module_content, entry_point) - if plugin_object is None: - raise exceptions.UserError('Plugin object retrieved from the entry' - ' point {} is None'.format(entry_point)) - # Check which methods on the plugin object have been implemented. manifest = { 'type': @@ -333,65 +323,9 @@ def _validate_and_get_manifest(module, module_content, entry_point): 'hasVirtualStatus': bool(plugin_object.virtual.status_impl), 'hasInitialize': - bool(plugin_object.virtual.initialize_impl) + bool(plugin_object.virtual.initialize_impl), + 'migrationIdList': + plugin_object.upgrade.migration_id_list } return manifest - - -def _validate_named_args(module_content, entry_point, plugin_type): - """ - Does named argument validation based on the plugin type. - """ - warnings = [] - - plugin_object = getattr(module_content, entry_point) - - # Iterate over attributes objects of the Plugin object - for plugin_attrib in plugin_object.__dict__.values(): - # - # For each plugin attribute object, its __dict__.keys will give - # us the name of the plugin implemntation method name. That name - # is useful in looking up named arguments expected and what is - # actually in the plugin code. And plugin_op_type can be, for e.g. - # LinkedOperations, DiscoveryOperations, VirtualOperations - # - plugin_op_type = plugin_attrib.__class__.__name__ - for op_name_key, op_name in plugin_attrib.__dict__.items(): - if op_name is None: - continue - actual_args = inspect.getargspec(op_name) - warnings.extend( - _check_args(method_name=op_name.__name__, - expected_args=_lookup_expected_args( - plugin_type, plugin_op_type, op_name_key), - actual_args=actual_args.args)) - - return warnings - - -def _check_args(method_name, expected_args, actual_args): - warnings = [] - - if len(expected_args) != len(actual_args): - warnings.append('Number of arguments do not match in method {}.' - ' Expected: {}, Found: {}.'.format( - method_name, list(expected_args), - str(actual_args))) - - if not all(arg in expected_args for arg in actual_args): - warnings.append('Named argument mismatch in method {}.' - ' Expected: {}, Found: {}.'.format( - method_name, list(expected_args), - str(actual_args))) - - return warnings - - -def _lookup_expected_args(plugin_type, plugin_op_type, plugin_op_name): - if plugin_type == util_classes.DIRECT_TYPE: - return PluginImporter.expected_direct_args_by_op[plugin_op_type][ - plugin_op_name] - else: - return PluginImporter.expected_staged_args_by_op[plugin_op_type][ - plugin_op_name] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py index 8ef97776..1a210423 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py @@ -1,22 +1,50 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # +import enum import logging import os +from contextlib import contextmanager -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions, file_util +from dlpx.virtualization._internal.plugin_importer import PluginImporter from dlpx.virtualization._internal.plugin_validator import PluginValidator from dlpx.virtualization._internal.schema_validator import SchemaValidator -from dlpx.virtualization._internal.util_classes import ValidationMode logger = logging.getLogger(__name__) -def read_and_validate_plugin_config_file(plugin_config, - stop_build, - run_all_validations, - skip_id_validation=False): +class ValidationMode(enum.Enum): + """ + Defines the validation mode that validator uses. + INFO - validator will give out info messages if validation fails. + WARNING - validator will log a warning if validation fails. + ERROR - validator will raise an exception if validation fails. + """ + INFO = 1 + WARNING = 2 + ERROR = 3 + + +@contextmanager +def validate_error_handler(plugin_file, validation_mode): + try: + yield + except Exception as e: + if validation_mode is ValidationMode.INFO: + logger.info('Validation failed on plugin file %s : %s', + plugin_file, e) + elif validation_mode is ValidationMode.WARNING: + logger.warning('Validation failed on plugin file %s : %s', + plugin_file, e) + else: + raise e + + +def validate_plugin_config_file(plugin_config, + stop_build, + skip_id_validation=False): """ Reads a plugin config file and validates the contents using a pre-defined schema. If stop_build is True, will report exception @@ -27,12 +55,14 @@ def read_and_validate_plugin_config_file(plugin_config, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = ( - util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) - validator = PluginValidator(plugin_config, plugin_config_schema_file, - validation_mode, run_all_validations) - validator.validate() + plugin_config_schema_file = (const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION + if skip_id_validation else + const.PLUGIN_CONFIG_SCHEMA) + validator = PluginValidator(plugin_config, plugin_config_schema_file) + + with validate_error_handler(plugin_config, validation_mode): + validator.validate_plugin_config() + return validator.result @@ -48,18 +78,22 @@ def get_plugin_manifest(plugin_config_file, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = ( - util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) - validator = PluginValidator.from_config_content(plugin_config_file, - plugin_config_content, - plugin_config_schema_file, - validation_mode) - validator.validate() - return validator.result + src_dir = file_util.get_src_dir_path(plugin_config_file, + plugin_config_content['srcDir']) + entry_point_module, entry_point_object = PluginValidator.split_entry_point( + plugin_config_content['entryPoint']) + plugin_type = plugin_config_content['pluginType'] + + importer = PluginImporter(src_dir, entry_point_module, entry_point_object, + plugin_type, True) + with validate_error_handler(plugin_config_file, validation_mode): + importer.validate_plugin_module() -def read_and_validate_schema_file(schema_file, stop_build): + return importer.result + + +def validate_schema_file(schema_file, stop_build): """ Reads a plugin schema file and validates the contents using a pre-defined schema. If stop_build is True, will report exception @@ -69,9 +103,11 @@ def read_and_validate_schema_file(schema_file, stop_build): """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - validation_mode) - validator.validate() + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) + + with validate_error_handler(schema_file, validation_mode): + validator.validate() + return validator.result @@ -79,8 +115,7 @@ def get_plugin_config_property(plugin_config_path, prop): """ Returns the value for a specific property from the plugin config file. """ - result = read_and_validate_plugin_config_file(plugin_config_path, False, - False) + result = validate_plugin_config_file(plugin_config_path, False, False) return result.plugin_config_content[prop] @@ -97,3 +132,23 @@ def get_schema_file_path(plugin_config, schema_file): if not os.path.isfile(schema_file): raise exceptions.PathTypeError(schema_file, 'file') return os.path.normpath(schema_file) + + +def get_standardized_build_number(build_number): + """ + Converts the build number the way back end expects it to be - without + leading or trailing zeros in each part of the multi part build number that + is separated by dots. + """ + # Split on the period and convert to integer + array = [int(i) for i in build_number.split('.')] + + # Next we want to trim all trailing zeros so ex: 5.3.0.0 == 5.3 + while array: + if not array[-1]: + # Remove the last element which is a zero from array + array.pop() + else: + break + + return '.'.join(str(i) for i in array) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py index 89783b69..46c46435 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py @@ -8,16 +8,14 @@ from collections import defaultdict, namedtuple import yaml -from dlpx.virtualization._internal import (exceptions, file_util, - plugin_importer) -from dlpx.virtualization._internal.util_classes import ValidationMode +from dlpx.virtualization._internal import exceptions +from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE +from flake8.api import legacy as flake8 from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple( - 'validation_result', - ['plugin_config_content', 'plugin_manifest', 'warnings']) +validation_result = namedtuple('validation_result', ['plugin_config_content']) class PluginValidator: @@ -29,33 +27,30 @@ class PluginValidator: config, content of the python module specified in in the pluginEntryPoint and also name of the plugin entry point in the module. If validation fails or has issues - will report exception - back if validation mode is error, otherwise warnings or info based - on validation mode. + back. """ def __init__(self, plugin_config, plugin_config_schema, - validation_mode, - run_all_validations, plugin_config_content=None): self.__plugin_config = plugin_config self.__plugin_config_schema = plugin_config_schema - self.__validation_mode = validation_mode - self.__run_all_validations = run_all_validations self.__plugin_config_content = plugin_config_content self.__plugin_manifest = None - self.__warnings = defaultdict(list) + self.__pre_import_checks = [ + self.__validate_plugin_config_content, + self.__validate_plugin_entry_point, + self.__check_for_undefined_names + ] @property def result(self): return validation_result( - plugin_config_content=self.__plugin_config_content, - plugin_manifest=self.__plugin_manifest, - warnings=self.__warnings) + plugin_config_content=self.__plugin_config_content) @classmethod def from_config_content(cls, plugin_config_file, plugin_config_content, - plugin_config_schema, validation_mode): + plugin_config_schema): """ Instantiates the validator with given plugin config content. plugin_config_file path is not read but used to get the absolute @@ -63,56 +58,27 @@ def from_config_content(cls, plugin_config_file, plugin_config_content, Returns: PluginValidator """ - return cls(plugin_config_file, plugin_config_schema, validation_mode, - True, plugin_config_content) + return cls(plugin_config_file, plugin_config_schema, + plugin_config_content) - def validate(self): - """ - Validates the plugin config file. - """ - logger.debug('Run config validations') - try: - self.__run_validations() - except Exception as e: - if self.__validation_mode is ValidationMode.INFO: - logger.info('Validation failed on plugin config file : %s', e) - elif self.__validation_mode is ValidationMode.WARNING: - logger.warning('Validation failed on plugin config file : %s', - e) - else: - raise e - - def __run_validations(self): + def validate_plugin_config(self): """ Reads a plugin config file and validates the contents using a - pre-defined schema. If validation is successful, tries to import - the plugin module and validates the entry point specified. + pre-defined schema. """ - logger.info('Reading plugin config file %s', self.__plugin_config) - if self.__plugin_config_content is None: self.__plugin_config_content = self.__read_plugin_config_file() logger.debug('Validating plugin config file content : %s', self.__plugin_config_content) - self.__validate_plugin_config_content() - - if not self.__run_all_validations: - logger.debug('Plugin config file schema validation is done') - return - - src_dir = file_util.get_src_dir_path( - self.__plugin_config, self.__plugin_config_content['srcDir']) - - logger.debug('Validating plugin entry point : %s', - self.__plugin_config_content['entryPoint']) - self.__validate_plugin_entry_point(src_dir) + self.__run_checks() def __read_plugin_config_file(self): """ Reads a plugin config file and raises UserError if there is an issue reading the file. """ + logger.info('Reading plugin config file %s', self.__plugin_config) try: with open(self.__plugin_config, 'rb') as f: try: @@ -122,17 +88,29 @@ def __read_plugin_config_file(self): mark = err.problem_mark raise exceptions.UserError( 'Command failed because the plugin config file ' - 'provided as input {!r} was not valid yaml. ' + 'provided as input \'{}\' was not valid yaml. ' 'Verify the file contents. ' 'Error position: {}:{}'.format( self.__plugin_config, mark.line + 1, mark.column + 1)) except (IOError, OSError) as err: raise exceptions.UserError( - 'Unable to read plugin config file {!r}' + 'Unable to read plugin config file \'{}\'' '\nError code: {}. Error message: {}'.format( self.__plugin_config, err.errno, os.strerror(err.errno))) + def __run_checks(self): + """ + Runs validations on the plugin config content and raise exceptions + if any. + """ + # + # All the pre-import checks need to happen in sequence. So no point + # validating further if a check fails. + # + for check in self.__pre_import_checks: + check() + def __validate_plugin_config_content(self): """ Validates the given plugin configuration is valid. @@ -164,13 +142,13 @@ def __validate_plugin_config_content(self): plugin_schema = json.load(f) except ValueError as err: raise exceptions.UserError( - 'Failed to load schemas because {!r} is not a ' + 'Failed to load schemas because {} is not a ' 'valid json file. Error: {}'.format( self.__plugin_config_schema, err)) except (IOError, OSError) as err: raise exceptions.UserError( - 'Unable to read plugin config schema file {!r}' + 'Unable to read plugin config schema file {}' '\nError code: {}. Error message: {}'.format( self.__plugin_config_schema, err.errno, os.strerror(err.errno))) @@ -192,51 +170,55 @@ def __validate_plugin_config_content(self): raise exceptions.SchemaValidationError(self.__plugin_config, validation_errors) - def __validate_plugin_entry_point(self, src_dir): + def __validate_plugin_entry_point(self): """ Validates the plugin entry point by parsing the entry - point to get module and entry point. Imports the module - to check for errors or issues. Also does an eval on the - entry point. + point to get module and entry point. """ - entry_point_field = self.__plugin_config_content['entryPoint'] - entry_point_strings = entry_point_field.split(':') - # Get the module and entry point name to import - entry_point_module = entry_point_strings[0] - entry_point_object = entry_point_strings[1] - plugin_type = self.__plugin_config_content['pluginType'] + entry_point_module, entry_point_object = self.split_entry_point( + self.__plugin_config_content['entryPoint']) - try: - self.__plugin_manifest, self.__warnings = ( - PluginValidator.__import_plugin(src_dir, entry_point_module, - entry_point_object, - plugin_type)) - except ImportError as err: - raise exceptions.UserError( - 'Unable to load module \'{}\' specified in ' - 'pluginEntryPoint \'{}\' from path \'{}\'. ' - 'Error message: {}'.format(entry_point_module, - entry_point_object, src_dir, err)) + if not entry_point_module: + raise exceptions.UserError('Plugin module is invalid') - logger.debug("Got manifest %s", self.__plugin_manifest) + if not entry_point_object: + raise exceptions.UserError('Plugin object is invalid') - @staticmethod - def __import_plugin(src_dir, entry_point_module, entry_point_object, - plugin_type): + def __check_for_undefined_names(self): """ - Imports the given python module. - NOTE: - Importing module in the current context pollutes the runtime of - the caller, in this case dvp. If the module being imported, for - e.g. contains code that adds a handler to the root logger at - import time, this can cause issues with logging in this code and - callers of validator. To avoid such issues, perform the import in - in a sub-process and on completion return the output. + Checks the plugin module for undefined names. This catches + missing imports, references to nonexistent variables, etc. + + ..note:: + We are using the legacy flake8 api, because there is currently + no public, stable api for flake8 >= 3.0.0 + + For more info, see + https://flake8.pycqa.org/en/latest/user/python-api.html """ - importer = plugin_importer.PluginImporter(src_dir, entry_point_module, - entry_point_object, - plugin_type, True) - manifest, warnings = importer.import_plugin() + warnings = defaultdict(list) + src_dir = self.__plugin_config_content['srcDir'] + exclude_dir = os.path.sep.join([src_dir, CODEGEN_PACKAGE]) + style_guide = flake8.get_style_guide(select=["F821"], + exclude=[exclude_dir], + quiet=1) + style_guide.check_files(paths=[src_dir]) + file_checkers = style_guide._application.file_checker_manager.checkers + + for checker in file_checkers: + for result in checker.results: + # From the api code, result is a tuple defined as: error = + # (error_code, line_number, column, text, physical_line) + if result[0] == 'F821': + msg = "{} on line {} in {}".format(result[3], result[1], + checker.filename) + warnings['exception'].append(exceptions.UserError(msg)) + + if warnings and len(warnings) > 0: + raise exceptions.ValidationFailedError(warnings) - return manifest, warnings + @staticmethod + def split_entry_point(entry_point): + entry_point_strings = entry_point.split(':') + return entry_point_strings[0], entry_point_strings[1] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py index fe227c89..46354fce 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py @@ -5,16 +5,14 @@ import json import logging import os -from collections import defaultdict, namedtuple +from collections import namedtuple from dlpx.virtualization._internal import exceptions -from dlpx.virtualization._internal.util_classes import ValidationMode from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple('validation_result', - ['plugin_schemas', 'warnings']) +validation_result = namedtuple('validation_result', ['plugin_schemas']) class SchemaValidator: @@ -24,42 +22,18 @@ class SchemaValidator: Returns: On successful validation, callers can get the content of the plugin schemas. If validation fails or has issues - will report exception - back if validation mode is error, otherwise warnings or info based - on validation mode. + back. """ - def __init__(self, - schema_file, - plugin_meta_schema, - validation_mode, - schemas=None): + def __init__(self, schema_file, plugin_meta_schema, schemas=None): self.__schema_file = schema_file self.__plugin_meta_schema = plugin_meta_schema - self.__validation_mode = validation_mode self.__plugin_schemas = schemas - self.__warnings = defaultdict(list) @property def result(self): - return validation_result(plugin_schemas=self.__plugin_schemas, - warnings=self.__warnings) + return validation_result(plugin_schemas=self.__plugin_schemas) def validate(self): - """ - Validates the plugin schema file. - """ - logger.debug('Run schema validations') - try: - self.__run_validations() - except Exception as e: - if self.__validation_mode is ValidationMode.INFO: - logger.info('Validation failed on plugin schema file : %s', e) - elif self.__validation_mode is ValidationMode.WARNING: - logger.warning('Validation failed on plugin schema file : %s', - e) - else: - raise e - - def __run_validations(self): """ Reads a plugin schema file and validates the contents using a pre-defined schema. @@ -84,12 +58,12 @@ def __read_schema_file(self): return json.load(f) except ValueError as err: raise exceptions.UserError( - 'Failed to load schemas because {!r} is not a ' + 'Failed to load schemas because \'{}\' is not a ' 'valid json file. Error: {}'.format( self.__schema_file, err)) except (IOError, OSError) as err: raise exceptions.UserError( - 'Unable to load schemas from {!r}' + 'Unable to load schemas from \'{}\'' '\nError code: {}. Error message: {}'.format( self.__schema_file, err.errno, os.strerror(err.errno))) @@ -106,13 +80,13 @@ def __validate_schemas(self): plugin_meta_schema = json.load(f) except ValueError as err: raise exceptions.UserError( - 'Failed to load schemas because {!r} is not a ' + 'Failed to load schemas because \'{}\' is not a ' 'valid json file. Error: {}'.format( self.__plugin_meta_schema, err)) except (IOError, OSError) as err: raise exceptions.UserError( - 'Unable to read plugin schema file {!r}' + 'Unable to read plugin schema file \'{}\'' '\nError code: {}. Error message: {}'.format( self.__plugin_meta_schema, err.errno, os.strerror(err.errno))) @@ -125,7 +99,7 @@ def __validate_schemas(self): # validation errors and report everything wrong with the schema. # validation_errors = sorted(v.iter_errors(self.__plugin_schemas), - key=str) + key=lambda e: e.path) if validation_errors: raise exceptions.SchemaValidationError(self.__schema_file, diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 500fc924..54a8c453 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -1,10 +1,16 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # # -# The version of the tools package. This is the single source of truth. -# Anything needing the version should read from here. +# Until Gradle is removed, unfortunately there are two places the version +# is specified. The build.gradle file in the root contains the version +# that is used when creating a distribution for the package. This +# version is used during dvp runtime. One example of this is displaying +# the version in dvp --version. Another is during dvp build this version +# is used to install the wrappers that are bundled with the plugin. +# +# package_version is the version of the tools package. # # This package follows semantic versioning. # More can be read here: https://semver.org/ @@ -14,8 +20,7 @@ # versions in those packages until they are shipped out of band. # [General] -engine_api_version = 1.10.5 -package_version = 1.0.0 +engine_api_version = 1.11.2 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py b/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py deleted file mode 100644 index 345a4e34..00000000 --- a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py +++ /dev/null @@ -1,61 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -import enum -import os - -STAGED_TYPE = 'STAGED' -DIRECT_TYPE = 'DIRECT' - -OUTPUT_DIR_NAME = '.dvp-gen-output' -PLUGIN_SCHEMAS_DIR = os.path.join(os.path.dirname(__file__), - 'validation_schemas') -PLUGIN_CONFIG_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, - 'plugin_config_schema.json') - -# -# This is a temporary file. Once blackbox has made the transition to 'id' -# instead of 'name' and uses UUIDs for the id, this, and everything -# associated with it can be removed. -# -PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION = os.path.join( - PLUGIN_SCHEMAS_DIR, 'plugin_config_schema_no_id_validation.json') - -PLUGIN_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, 'plugin_schema.json') - - -class ValidationMode(enum.Enum): - """ - Defines the validation mode that validator uses. - INFO - validator will give out info messages if validation fails. - WARNING - validator will log a warning if validation fails. - ERROR - validator will raise an exception if validation fails. - """ - INFO = 1 - WARNING = 2 - ERROR = 3 - - -class MessageUtils: - """ - Defines helpers methods to format warning and exception messages. - """ - @staticmethod - def exception_msg(exceptions): - exception_msg = '\n'.join( - MessageUtils.__format_msg('Error', ex) - for ex in exceptions['exception']) - return exception_msg - - @staticmethod - def warning_msg(warnings): - warning_msg = '\n'.join( - MessageUtils.__format_msg('Warning', warning) - for warning in warnings['warning']) - return warning_msg - - @staticmethod - def __format_msg(msg_type, msg): - msg_str = "{}: {}".format(msg_type, msg) - return msg_str diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json index 847b4381..5d49e478 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json @@ -8,9 +8,8 @@ "name": { "type": "string" }, - "version": { - "type": "string", - "pattern": "^([0-9]+)\\.([0-9]+)\\.([a-zA-Z0-9_]+)$" + "externalVersion": { + "type": "string" }, "hostTypes": { "type": "array", @@ -18,7 +17,7 @@ "type": "string", "enum": [ "UNIX", "WINDOWS" ] }, - "maxItems": 2 + "maxItems": 1 }, "entryPoint": { "type": "string", @@ -47,8 +46,12 @@ "defaultLocale": { "type": "string", "default": "en-us" + }, + "buildNumber": { + "type": "string", + "pattern": "^([0-9]+\\.)*[0-9]*[1-9][0-9]*(\\.[0-9]+)*$" } }, "additionalProperties": false, - "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language"] + "required": ["id", "name", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] } diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json index 1e3e4258..703382fa 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json @@ -7,9 +7,8 @@ "name": { "type": "string" }, - "version": { - "type": "string", - "pattern": "^([0-9]+)\\.([0-9]+)\\.([a-zA-Z0-9_]+)$" + "externalVersion": { + "type": "string" }, "hostTypes": { "type": "array", @@ -17,7 +16,7 @@ "type": "string", "enum": [ "UNIX", "WINDOWS" ] }, - "maxItems": 2 + "maxItems": 1 }, "entryPoint": { "type": "string", @@ -46,8 +45,12 @@ "defaultLocale": { "type": "string", "default": "en-us" + }, + "buildNumber": { + "type": "string", + "pattern": "^([0-9]+\\.)*[0-9]*[1-9][0-9]*(\\.[0-9]+)*$" } }, "additionalProperties": false, - "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language"] + "required": ["id", "name", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] } diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml index 2d5b0f99..063877ad 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml @@ -139,23 +139,35 @@ EXPECTED_DIRECT_ARGS_BY_OP: - virtual_source - repository +EXPECTED_UPGRADE_ARGS: + repository_id_to_impl: + - old_repository + source_config_id_to_impl: + - old_source_config + linked_source_id_to_impl: + - old_linked_source + virtual_source_id_to_impl: + - old_virtual_source + snapshot_id_to_impl: + - old_snapshot + REQUIRED_METHODS_BY_PLUGIN_TYPE: DIRECT: - hasRepositoryDiscovery: discovery.repository(), - hasSourceConfigDiscovery: discovery.source_config(), - hasLinkedPostSnapshot: linked.post_snapshot(), - hasVirtualConfigure: virtual.configure(), - hasVirtualReconfigure: virtual.reconfigure(), - hasVirtualPostSnapshot: virtual.post_snapshot(), + hasRepositoryDiscovery: discovery.repository() + hasSourceConfigDiscovery: discovery.source_config() + hasLinkedPostSnapshot: linked.post_snapshot() + hasVirtualConfigure: virtual.configure() + hasVirtualReconfigure: virtual.reconfigure() + hasVirtualPostSnapshot: virtual.post_snapshot() hasVirtualMountSpecification: virtual.mount_specification() STAGED: - hasRepositoryDiscovery: discovery.repository(), - hasSourceConfigDiscovery: discovery.source_config(), - hasLinkedPostSnapshot: linked.post_snapshot(), - hasLinkedMountSpecification: linked.mount_specification(), - hasVirtualConfigure: virtual.configure(), - hasVirtualReconfigure: virtual.reconfigure(), - hasVirtualPostSnapshot: virtual.post_snapshot(), + hasRepositoryDiscovery: discovery.repository() + hasSourceConfigDiscovery: discovery.source_config() + hasLinkedPostSnapshot: linked.post_snapshot() + hasLinkedMountSpecification: linked.mount_specification() + hasVirtualConfigure: virtual.configure() + hasVirtualReconfigure: virtual.reconfigure() + hasVirtualPostSnapshot: virtual.post_snapshot() hasVirtualMountSpecification: virtual.mount_specification() REQUIRED_METHODS_DESCRIPTION: diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 99d7a301..7e4271b1 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -1,16 +1,17 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json import os -import mock -import pytest import yaml -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.commands import build -from dlpx.virtualization._internal.plugin_validator import PluginValidator +from dlpx.virtualization._internal.plugin_importer import PluginImporter + +import mock +import pytest @pytest.fixture @@ -28,7 +29,11 @@ class TestBuild: 'dlpx.virtualization._internal.plugin_util.get_plugin_manifest', return_value={}) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') - def test_build_success(mock_generate_python, mock_plugin_manifest, + @mock.patch( + 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') + @mock.patch('os.path.isabs', return_value=False) + def test_build_success(mock_relative_path, mock_install_deps, + mock_generate_python, mock_plugin_manifest, plugin_config_file, artifact_file, artifact_content, codegen_gen_py_inputs): gen_py = codegen_gen_py_inputs @@ -53,13 +58,17 @@ def test_build_success(mock_generate_python, mock_plugin_manifest, @staticmethod @pytest.mark.parametrize('artifact_filename', ['somefile.json']) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') + @mock.patch( + 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') + @mock.patch('os.path.isabs', return_value=False) def test_build_success_non_default_output_file( - mock_generate_python, mock_import_plugin, plugin_config_file, - artifact_file, artifact_content, codegen_gen_py_inputs): + mock_relative_path, mock_install_deps, mock_generate_python, + mock_import_plugin, plugin_config_file, artifact_file, + artifact_content, codegen_gen_py_inputs): gen_py = codegen_gen_py_inputs # Before running build assert that the artifact file does not exist. @@ -86,7 +95,11 @@ def test_build_success_non_default_output_file( 'dlpx.virtualization._internal.plugin_util.get_plugin_manifest', return_value={}) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') - def test_build_codegen_fail(mock_generate_python, mock_plugin_manifest, + @mock.patch( + 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') + @mock.patch('os.path.isabs', return_value=False) + def test_build_codegen_fail(mock_relative_path, mock_install_deps, + mock_generate_python, mock_plugin_manifest, plugin_config_file, artifact_file, codegen_gen_py_inputs): gen_py = codegen_gen_py_inputs @@ -119,7 +132,11 @@ def test_build_codegen_fail(mock_generate_python, mock_plugin_manifest, 'dlpx.virtualization._internal.plugin_util.get_plugin_manifest', return_value={}) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') - def test_build_manifest_fail(mock_generate_python, mock_plugin_manifest, + @mock.patch( + 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') + @mock.patch('os.path.isabs', return_value=False) + def test_build_manifest_fail(mock_relative_path, mock_install_deps, + mock_generate_python, mock_plugin_manifest, plugin_config_file, artifact_file, codegen_gen_py_inputs): gen_py = codegen_gen_py_inputs @@ -154,7 +171,11 @@ def test_build_manifest_fail(mock_generate_python, mock_plugin_manifest, 'dlpx.virtualization._internal.plugin_util.get_plugin_manifest', return_value={}) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') - def test_build_prepare_artifact_fail(mock_generate_python, + @mock.patch( + 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') + @mock.patch('os.path.isabs', return_value=False) + def test_build_prepare_artifact_fail(mock_relative_path, mock_install_deps, + mock_generate_python, mock_plugin_manifest, mock_prep_artifact, plugin_config_file, artifact_file, @@ -192,11 +213,13 @@ def test_build_prepare_artifact_fail(mock_generate_python, 'dlpx.virtualization._internal.plugin_util.get_plugin_manifest', return_value={}) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') - def test_build_generate_artifact_fail(mock_generate_python, - mock_plugin_manifest, - mock_gen_artifact, - plugin_config_file, artifact_file, - codegen_gen_py_inputs): + @mock.patch( + 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') + @mock.patch('os.path.isabs', return_value=False) + def test_build_generate_artifact_fail( + mock_relative_path, mock_install_deps, mock_generate_python, + mock_plugin_manifest, mock_gen_artifact, plugin_config_file, + artifact_file, codegen_gen_py_inputs): gen_py = codegen_gen_py_inputs # Before running build assert that the artifact file does not exist. @@ -227,7 +250,11 @@ def test_build_generate_artifact_fail(mock_generate_python, @mock.patch('dlpx.virtualization._internal.commands.build' '.prepare_upload_artifact') @mock.patch('dlpx.virtualization._internal.codegen.generate_python') - def test_generate_only_success(mock_generate_python, mock_prep_artifact, + @mock.patch( + 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') + @mock.patch('os.path.isabs', return_value=False) + def test_generate_only_success(mock_relative_path, mock_install_deps, + mock_generate_python, mock_prep_artifact, plugin_config_file, artifact_file, codegen_gen_py_inputs): gen_py = codegen_gen_py_inputs @@ -293,8 +320,7 @@ def test_zip_and_encode_source_files_invalid_dir(src_dir): @staticmethod @mock.patch('compileall.compile_dir') - def test_zip_and_encode_source_files_compileall_fail( - mock_compile, src_dir): + def test_zip_and_encode_source_files_compileall_fail(mock_compile, src_dir): mock_compile.return_value = 0 with pytest.raises(exceptions.UserError) as err_info: build.zip_and_encode_source_files(src_dir) @@ -315,21 +341,25 @@ def test_zip_and_encode_source_files_encode_fail(mock_encode, src_dir): ''.format(src_dir, 'something')) @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) + @mock.patch( + 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') + @mock.patch('os.path.isabs', return_value=False) @pytest.mark.parametrize(('plugin_id', 'skip_id_validation'), [('77f18ce4-4425-4cd6-b9a7-23653254d660', False), ('77f18ce4-4425-4cd6-b9a7-23653254d660', True), ('mongo', True)]) - def test_id_validation_positive(mock_import_plugin, plugin_config_file, + def test_id_validation_positive(mock_relative_path, mock_install_deps, + mock_import_plugin, plugin_config_file, artifact_file, skip_id_validation): build.build(plugin_config_file, artifact_file, False, skip_id_validation) @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) @pytest.mark.parametrize('plugin_id', ['mongo']) def test_id_validation_negative(mock_import_plugin, plugin_config_file, @@ -369,7 +399,7 @@ def test_plugin_bad_format(mock_generate_python, plugin_config_file, message = err_info.value.message assert ('Command failed because the plugin config file ' - 'provided as input {!r} was not valid yaml. ' + 'provided as input \'{}\' was not valid yaml. ' 'Verify the file contents. ' 'Error position: 3:9'.format(plugin_config_file)) in message @@ -403,9 +433,10 @@ def test_plugin_bad_language(mock_generate_python, plugin_config_file, @staticmethod @pytest.mark.parametrize('src_dir', ['/not/a/real/dir/src']) + @mock.patch('os.path.isabs', return_value=False) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') - def test_plugin_no_src_dir(mock_generate_python, plugin_config_file, - artifact_file): + def test_plugin_no_src_dir(mock_generate_python, mock_path_is_relative, + plugin_config_file, artifact_file): with pytest.raises(exceptions.UserError) as err_info: build.build(plugin_config_file, artifact_file, False, False) @@ -425,15 +456,16 @@ def test_plugin_schema_not_file(mock_generate_python, plugin_config_file, build.build(plugin_config_file, artifact_file, False, False) message = err_info.value.message - assert message == 'The path {!r} should be a file but is not.'.format( + assert message == "The path '{}' should be a file but is not.".format( schema_file) assert not mock_generate_python.called @staticmethod @mock.patch('dlpx.virtualization._internal.codegen.generate_python') - def test_plugin_src_not_dir(mock_generate_python, plugin_config_file, - artifact_file, src_dir): + @mock.patch('os.path.isabs', return_value=False) + def test_plugin_src_not_dir(mock_relative_path, mock_generate_python, + plugin_config_file, artifact_file, src_dir): # Delete the src dir folder and create a file there instead os.rmdir(src_dir) with open(src_dir, 'w') as f: @@ -442,8 +474,8 @@ def test_plugin_src_not_dir(mock_generate_python, plugin_config_file, build.build(plugin_config_file, artifact_file, False, False) message = err_info.value.message - assert message == ('The path {!r} should be a' - ' directory but is not.'.format(src_dir)) + assert message == ("The path '{}' should be a" + " directory but is not.".format(src_dir)) assert not mock_generate_python.called @@ -473,7 +505,7 @@ def test_schema_file_bad_permission(mock_generate_python, message = err_info.value.message assert ( - 'Unable to load schemas from {!r}\nError code: 13.' + 'Unable to load schemas from \'{}\'\nError code: 13.' ' Error message: Permission denied'.format(schema_file)) in message assert not mock_generate_python.called @@ -489,9 +521,10 @@ def test_schema_bad_format(mock_generate_python, plugin_config_file, build.build(plugin_config_file, artifact_file, False, False) message = err_info.value.message - assert ('Failed to load schemas because {!r} is not a valid json file.' - ' Error: Extra data: line 2 column 1 - line 2 column 9' - ' (char 19 - 27)'.format(schema_file)) in message + assert ( + 'Failed to load schemas because \'{}\' is not a valid json file.' + ' Error: Extra data: line 2 column 1 - line 2 column 9' + ' (char 19 - 27)'.format(schema_file)) in message assert not mock_generate_python.called @@ -594,13 +627,27 @@ def test_manual_discovery_parameter(plugin_config_content, src_dir, @staticmethod def test_plugin_config_schemas_diff(): - with open(util_classes.PLUGIN_CONFIG_SCHEMA) as f: + with open(const.PLUGIN_CONFIG_SCHEMA) as f: config_schema = json.load(f) - with open(util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION) as f: + with open(const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION) as f: config_schema_no_id = json.load(f) # Only the id's pattern should be different so remove it. config_schema['properties']['id'].pop('pattern') assert config_schema == config_schema_no_id + + @staticmethod + @pytest.mark.parametrize('build_number, expected', [ + pytest.param('0.0.1', '0.0.1'), + pytest.param('0.1.0', '0.1'), + pytest.param('1.0.01.0', '1.0.1') + ]) + def test_build_number_parameter(plugin_config_content, src_dir, + schema_content, expected): + + upload_artifact = build.prepare_upload_artifact( + plugin_config_content, src_dir, schema_content, {}) + + assert expected == upload_artifact['buildNumber'] diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py index 30c64df4..adca36ce 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import errno @@ -8,8 +8,7 @@ import subprocess import pytest -from dlpx.virtualization._internal import (codegen, exceptions, file_util, - util_classes) +from dlpx.virtualization._internal import codegen, const, exceptions, file_util class TestCodegen: @@ -139,7 +138,7 @@ def test_codegen_success(codegen_gen_py_inputs, popen_helper): assert popen_helper.package_name == codegen.CODEGEN_PACKAGE assert popen_helper.module_name == codegen.CODEGEN_MODULE expected_output_dir = os.path.join(gen_py.plugin_content_dir, - util_classes.OUTPUT_DIR_NAME) + const.OUTPUT_DIR_NAME) assert popen_helper.output_dir == expected_output_dir # Validate that the "generated" file were copied. @@ -158,7 +157,7 @@ def test_codegen_success(codegen_gen_py_inputs, popen_helper): @staticmethod def test_get_build_dir_success(tmpdir): - testdir = os.path.join(tmpdir.strpath, util_classes.OUTPUT_DIR_NAME) + testdir = os.path.join(tmpdir.strpath, const.OUTPUT_DIR_NAME) file_util.make_dir(testdir, True) assert os.path.exists(testdir) assert os.path.isdir(testdir) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_delphix_client.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_delphix_client.py index 34f269cb..d663e7c3 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_delphix_client.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_delphix_client.py @@ -5,11 +5,12 @@ import json import os +import requests +from dlpx.virtualization._internal import delphix_client, exceptions + import httpretty import mock import pytest -import requests -from dlpx.virtualization._internal import delphix_client, exceptions @pytest.mark.usefixtures('httpretty_enabled') @@ -221,6 +222,30 @@ def httpretty_enabled(): 'Date': 'Mon, 04 Feb 2019 08:09:44 GMT' }) + JOB_RESP_FAIL = (('{"type": "OKResult", "status": "OK", "result": ' + '{"jobState": "FAILED", "events": []}}'), { + 'X-Frame-Options': 'SAMEORIGIN', + 'X-Content-Type-Options': 'nosniff', + 'X-XSS-Protection': '1; mode=block', + 'Cache-Control': 'max-age=0', + 'Expires': 'Mon, 04 Feb 2019 23:12:00 GMT', + 'Content-Type': 'application/json', + 'Content-Length': '71', + 'Date': 'Mon, 09 Mar 2020 12:09:27 GMT' + }) + + JOB_RESP_TIMED_OUT = (('{"type": "OKResult", "status": "OK", "result": ' + '{"jobState": "RUNNING", "events": []}}'), { + 'X-Frame-Options': 'SAMEORIGIN', + 'X-Content-Type-Options': 'nosniff', + 'X-XSS-Protection': '1; mode=block', + 'Cache-Control': 'max-age=0', + 'Expires': 'Mon, 04 Feb 2019 23:12:00 GMT', + 'Content-Type': 'application/json', + 'Content-Length': '71', + 'Date': 'Mon, 09 Mar 2020 12:09:27 GMT' + }) + PLUGIN_RESP_SUCCESS = ( '{"type": "ListResult", "status": "OK", "result": [' '{"type": "Toolkit", "reference": "APPDATA_TOOLKIT-1",' @@ -288,7 +313,7 @@ def test_delphix_client_upload_success(engine_api, artifact_content): dc = delphix_client.DelphixClient('test-engine.com') dc.login(engine_api, 'admin', 'delphix') - dc.upload_plugin('plugin name', artifact_content) + dc.upload_plugin('plugin name', artifact_content, False) history = httpretty.HTTPretty.latest_requests assert history[-1].path == u'/resources/json/delphix/data/upload' @@ -489,7 +514,7 @@ def test_delphix_client_get_token_fail(engine_api, artifact_content): dc.login(engine_api, 'admin', 'delphix') with pytest.raises(exceptions.UnexpectedError) as err_info: - dc.upload_plugin('plugin name', artifact_content) + dc.upload_plugin('plugin name', artifact_content, False) assert err_info.value.status_code == 403 assert err_info.value.response == token_body @@ -537,7 +562,7 @@ def test_delphix_client_upload_fail(engine_api, artifact_content): dc.login(engine_api, 'admin', 'delphix') with pytest.raises(exceptions.HttpError) as err_info: - dc.upload_plugin('plugin name', artifact_content) + dc.upload_plugin('plugin name', artifact_content, False) error = err_info.value.error message = err_info.value.message assert err_info.value.status_code == 200 @@ -565,6 +590,99 @@ def test_delphix_client_upload_fail(engine_api, artifact_content): assert history[-3].path == u'/resources/json/delphix/login' assert history[-4].path == u'/resources/json/delphix/session' + @staticmethod + def test_delphix_client_wait_for_job_to_complete_job_failed( + engine_api, artifact_content): + session_body, session_header = TestDelphixClient.SES_RESP_SUCCESS + httpretty.register_uri( + httpretty.POST, + 'http://test-engine.com/resources/json/delphix/session', + body=session_body, + forcing_headers=session_header) + + login_body, login_header = TestDelphixClient.LOGIN_RESP_SUCCESS + httpretty.register_uri( + httpretty.POST, + 'http://test-engine.com/resources/json/delphix/login', + body=login_body, + forcing_headers=login_header) + + token_body, token_header = TestDelphixClient.TOKEN_RESP_SUCCESS + httpretty.register_uri(httpretty.POST, + 'http://test-engine.com/resources/' + 'json/delphix/toolkit/requestUploadToken', + body=token_body, + forcing_headers=token_header) + + job_body, job_header = TestDelphixClient.JOB_RESP_FAIL + httpretty.register_uri(httpretty.GET, + 'http://test-engine.com/resources/json/' + 'delphix/action/ACTION-161/getJob', + body=job_body) + + dc = delphix_client.DelphixClient('test-engine.com') + dc.login(engine_api, 'admin', 'delphix') + + with pytest.raises(exceptions.PluginUploadJobFailed) as err_info: + dc._wait_for_upload_to_complete('nix_direct_python', 'ACTION-161', + 'JOB-38') + + assert err_info.value.message == ('Failed trying to upload plugin ' + 'nix_direct_python.') + + history = httpretty.HTTPretty.latest_requests + assert (history[-1].path == + u'/resources/json/delphix/action/ACTION-161/getJob') + assert history[-2].path == u'/resources/json/delphix/login' + assert history[-3].path == u'/resources/json/delphix/session' + + @staticmethod + def test_delphix_client_wait_for_job_to_complete_timed_out( + engine_api, artifact_content): + session_body, session_header = TestDelphixClient.SES_RESP_SUCCESS + httpretty.register_uri( + httpretty.POST, + 'http://test-engine.com/resources/json/delphix/session', + body=session_body, + forcing_headers=session_header) + + login_body, login_header = TestDelphixClient.LOGIN_RESP_SUCCESS + httpretty.register_uri( + httpretty.POST, + 'http://test-engine.com/resources/json/delphix/login', + body=login_body, + forcing_headers=login_header) + + token_body, token_header = TestDelphixClient.TOKEN_RESP_SUCCESS + httpretty.register_uri(httpretty.POST, + 'http://test-engine.com/resources/' + 'json/delphix/toolkit/requestUploadToken', + body=token_body, + forcing_headers=token_header) + + job_body, job_header = TestDelphixClient.JOB_RESP_TIMED_OUT + httpretty.register_uri(httpretty.GET, + 'http://test-engine.com/resources/json/' + 'delphix/action/ACTION-161/getJob', + body=job_body) + + dc = delphix_client.DelphixClient('test-engine.com', 0) + dc.login(engine_api, 'admin', 'delphix') + + with pytest.raises(exceptions.PluginUploadWaitTimedOut) as err_info: + dc._wait_for_upload_to_complete('nix_direct_python', 'ACTION-161', + 'JOB-38') + + assert err_info.value.message == ('Timed out waiting for upload of ' + 'plugin nix_direct_python to ' + 'complete.') + + history = httpretty.HTTPretty.latest_requests + assert (history[-1].path == + u'/resources/json/delphix/action/ACTION-161/getJob') + assert history[-2].path == u'/resources/json/delphix/login' + assert history[-3].path == u'/resources/json/delphix/session' + @staticmethod def test_delphix_client_download_success(engine_api, src_dir, plugin_config_file): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_download_logs.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_download_logs.py index f512dbef..e302a79b 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_download_logs.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_download_logs.py @@ -2,10 +2,11 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -import pytest from dlpx.virtualization._internal import delphix_client, exceptions from dlpx.virtualization._internal.commands import download_logs +import pytest + class FakeDelphixClient(object): """ diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index 4593f625..5486eaa6 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import ast @@ -9,9 +9,8 @@ import jinja2 import mock import pytest -from dlpx.virtualization._internal import (exceptions, plugin_util, - plugin_validator, schema_validator, - util_classes) +from dlpx.virtualization._internal import (const, exceptions, plugin_util, + plugin_validator, schema_validator) from dlpx.virtualization._internal.commands import initialize as init @@ -47,43 +46,55 @@ def staged_operations_template(): def format_entry_point_template(entry_point_template): template = jinja2.Environment().from_string(entry_point_template) - def format_template(plugin_name, ingestion_strategy): - if ingestion_strategy == util_classes.DIRECT_TYPE: + def format_template(plugin_name, ingestion_strategy, host_type): + if host_type == const.WINDOWS_HOST_TYPE: + default_mount_path = "C:\\\\tmp\\\\dlpx_staged_mounts\\\\{}" + elif host_type == const.UNIX_HOST_TYPE: + default_mount_path = "/tmp/dlpx_staged_mounts/{}" + + if ingestion_strategy == const.DIRECT_TYPE: operations = direct_operations_template() - elif ingestion_strategy == util_classes.STAGED_TYPE: - operations = staged_operations_template() + elif ingestion_strategy == const.STAGED_TYPE: + operations = jinja2.Environment().from_string( + staged_operations_template()) + operations = operations.render( + default_mount_path=default_mount_path) else: raise RuntimeError( - 'Got unrecognized ingestion strategy: {!r}'.format( + 'Got unrecognized ingestion strategy: {}'.format( ingestion_strategy)) return template.render(name=repr(plugin_name), - linked_operations=operations) + linked_operations=operations, + default_mount_path=default_mount_path) return format_template class TestInitialize: @staticmethod - @pytest.mark.parametrize( - 'ingestion_strategy', - [util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE]) - def test_init(tmpdir, ingestion_strategy, schema_template, plugin_name, - format_entry_point_template): + @pytest.mark.parametrize('ingestion_strategy', + [const.DIRECT_TYPE, const.STAGED_TYPE]) + @pytest.mark.parametrize('host_type', + [const.UNIX_HOST_TYPE, const.WINDOWS_HOST_TYPE]) + def test_init(tmpdir, ingestion_strategy, host_type, schema_template, + plugin_name, format_entry_point_template): # Initialize an empty directory. - init.init(tmpdir.strpath, ingestion_strategy, plugin_name) + init.init(tmpdir.strpath, ingestion_strategy, plugin_name, host_type) # Validate the config file is as we expect. - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content + assert config['hostTypes'] == [host_type] assert config['pluginType'] == ingestion_strategy assert config['name'] == plugin_name assert config['entryPoint'] == init.DEFAULT_ENTRY_POINT assert config['srcDir'] == init.DEFAULT_SRC_DIRECTORY assert config['schemaFile'] == init.DEFAULT_SCHEMA_FILE + assert config['buildNumber'] == init.DEFAULT_BUILD_NUMBER # Validate the schema file is identical to the template. schema_file_path = os.path.join(tmpdir.strpath, config['schemaFile']) @@ -101,15 +112,15 @@ def test_init(tmpdir, ingestion_strategy, schema_template, plugin_name, with open(entry_file_path, 'r') as f: contents = f.read() assert contents == format_entry_point_template( - config['id'], ingestion_strategy) + config['id'], ingestion_strategy, host_type) @staticmethod def test_init_without_plugin_name(tmpdir): - init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, "") + init.init(tmpdir.strpath, const.DIRECT_TYPE, "", const.UNIX_HOST_TYPE) - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content @@ -117,38 +128,58 @@ def test_init_without_plugin_name(tmpdir): assert config['name'] == config['id'] @staticmethod - @pytest.mark.parametrize( - 'ingestion_strategy', - [util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE]) + def test_init_windows_plugin(tmpdir, plugin_name): + init.init(tmpdir.strpath, const.DIRECT_TYPE, plugin_name, + const.WINDOWS_HOST_TYPE) + result = plugin_util.validate_plugin_config_file( + os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), + True) + config = result.plugin_config_content + + # Validate that the host type is WINDOWS + host_types = config['hostTypes'] + assert len(host_types) == 1 + assert host_types[0] == const.WINDOWS_HOST_TYPE + + @staticmethod + @pytest.mark.parametrize('ingestion_strategy', + [const.DIRECT_TYPE, const.STAGED_TYPE]) def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, plugin_name): - init.init(tmpdir.strpath, ingestion_strategy, plugin_name) + init.init(tmpdir.strpath, ingestion_strategy, plugin_name, + const.UNIX_HOST_TYPE) plugin_config_file = os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE) schema_file = os.path.join(tmpdir.strpath, init.DEFAULT_SCHEMA_FILE) validator = plugin_validator.PluginValidator(plugin_config_file, - schema_file, True, True) - validator.validate() + schema_file) + + # Assert config file validation is not done. + assert not validator.result.plugin_config_content + + validator.validate_plugin_config() - assert not validator.result.warnings + # Assert config file is validated. + assert validator.result.plugin_config_content @staticmethod def test_invalid_with_config_file(plugin_config_file): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(plugin_config_file), - util_classes.DIRECT_TYPE, None) + init.init(os.path.dirname(plugin_config_file), const.DIRECT_TYPE, + None, const.UNIX_HOST_TYPE) @staticmethod def test_invalid_with_schema_file(schema_file): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(schema_file), util_classes.DIRECT_TYPE, - None) + init.init(os.path.dirname(schema_file), const.DIRECT_TYPE, None, + const.UNIX_HOST_TYPE) @staticmethod def test_invalid_with_src_dir(src_dir): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(src_dir), util_classes.DIRECT_TYPE, None) + init.init(os.path.dirname(src_dir), const.DIRECT_TYPE, None, + const.UNIX_HOST_TYPE) @staticmethod @mock.patch('yaml.dump') @@ -157,7 +188,8 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, tmpdir, plugin_name): mock_yaml_dump.side_effect = RuntimeError() with pytest.raises(exceptions.UserError): - init.init(tmpdir.strpath, util_classes.STAGED_TYPE, plugin_name) + init.init(tmpdir.strpath, const.STAGED_TYPE, plugin_name, + const.UNIX_HOST_TYPE) src_dir_path = os.path.join(tmpdir.strpath, init.DEFAULT_SRC_DIRECTORY) config_file_path = os.path.join(tmpdir.strpath, @@ -170,9 +202,8 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, @staticmethod def test_default_schema_definition(schema_template): - validator = schema_validator.SchemaValidator( - None, util_classes.PLUGIN_SCHEMA, - util_classes.ValidationMode.ERROR, schema_template) + validator = schema_validator.SchemaValidator(None, const.PLUGIN_SCHEMA, + schema_template) validator.validate() # Validate the repository schema only has the 'name' property. @@ -207,7 +238,7 @@ def test_default_schema_definition(schema_template): @staticmethod def test_default_entry_point(plugin_id): entry_point_contents = init._get_entry_point_contents( - plugin_id, util_classes.DIRECT_TYPE) + plugin_id, const.DIRECT_TYPE, const.UNIX_HOST_TYPE) tree = ast.parse(entry_point_contents) for stmt in ast.walk(tree): if isinstance(stmt, ast.Assign): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py index 7bea69b8..bb7e9d89 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py @@ -9,9 +9,10 @@ import subprocess import sys -import pytest from dlpx.virtualization._internal import codegen +import pytest + @pytest.fixture(scope='module') def tmp_factory(tmp_path_factory): @@ -130,10 +131,7 @@ def test_success(module): assert not test_object.string_property test_dict = test_object.to_dict() - assert test_dict == { - 'requiredStringProperty': 'test string', - 'stringProperty': None - } + assert test_dict == {'requiredStringProperty': 'test string'} from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @@ -160,10 +158,7 @@ def test_unicode_success(module): assert not test_object.string_property test_dict = test_object.to_dict() - assert test_dict == { - 'requiredStringProperty': u'test\u2345\u2603', - 'stringProperty': None - } + assert test_dict == {'requiredStringProperty': u'test\u2345\u2603'} from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @@ -342,9 +337,7 @@ def test_success(module): test_dict = test_object.to_dict() assert test_dict == { 'requiredNumberProperty': 200.5, - 'numberProperty': None, - 'requiredIntegerProperty': -50, - 'integerProperty': None + 'requiredIntegerProperty': -50 } from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @@ -1269,20 +1262,17 @@ def test_successs(module): test_dict = test_object.to_dict() assert test_dict == { 'requiredStringProperty': 'A', - 'stringProperty': None, 'requiredObjectProperty': { 'TWO': 'dos', 'ONE': 'uno' }, - 'objectProperty': None, - 'requiredArrayProperty': ['DO', 'RE', 'MI'], - 'arrayProperty': None + 'requiredArrayProperty': ['DO', 'RE', 'MI'] } from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @staticmethod - def test_successs_setter(module): + def test_success_setter(module): test_object = module.TestDefinition(required_string_property='B', required_object_property={}, required_array_property=[]) @@ -1300,14 +1290,11 @@ def test_successs_setter(module): test_dict = test_object.to_dict() assert test_dict == { 'requiredStringProperty': 'A', - 'stringProperty': None, 'requiredObjectProperty': { 'TWO': 'dos', 'ONE': 'uno' }, - 'objectProperty': None, - 'requiredArrayProperty': ['DO', 'RE', 'MI'], - 'arrayProperty': None + 'requiredArrayProperty': ['DO', 'RE', 'MI'] } from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_upload.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_upload.py index b98bc1e4..444e5d2c 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_upload.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_upload.py @@ -4,10 +4,11 @@ import json -import pytest from dlpx.virtualization._internal import delphix_client, exceptions from dlpx.virtualization._internal.commands import upload +import pytest + class FakeDelphixClient(object): """ @@ -60,7 +61,7 @@ def login(self, engine_api, user, password): 'id': 'exception.webservices.login.failed' }) - def upload_plugin(self, name, content): + def upload_plugin(self, name, content, wait): if content.get('discoveryDefinition') is None: raise exceptions.HttpError( 200, { @@ -112,7 +113,7 @@ def test_upload_success(artifact_file, artifact_content, fake_client): user = 'admin' password = 'delphix' - upload.upload(fake_client.engine, user, artifact_file, password) + upload.upload(fake_client.engine, user, artifact_file, password, False) # Make sure that the fake client was passed in the correct contents. assert ( @@ -127,7 +128,8 @@ def test_upload_no_file(fake_client, artifact_file): password = 'delphix' with pytest.raises(exceptions.UserError) as err_info: - upload.upload(fake_client.engine, user, artifact_file, password) + upload.upload(fake_client.engine, user, artifact_file, password, + False) message = err_info.value.message assert message == ("Unable to read upload artifact file" @@ -147,7 +149,8 @@ def test_upload_file_not_json(artifact_file, fake_client): password = 'delphix' with pytest.raises(exceptions.UserError) as err_info: - upload.upload(fake_client.engine, user, artifact_file, password) + upload.upload(fake_client.engine, user, artifact_file, password, + False) message = err_info.value.message assert message == ( @@ -171,7 +174,8 @@ def test_upload_api_incorrect(artifact_file, fake_client): password = 'delphix' with pytest.raises(exceptions.HttpError) as err_info: - upload.upload(fake_client.engine, user, artifact_file, password) + upload.upload(fake_client.engine, user, artifact_file, password, + False) error = err_info.value.error message = err_info.value.message @@ -202,7 +206,8 @@ def test_upload_password_incorrect(artifact_file, artifact_content, password = 'delphix2' with pytest.raises(exceptions.HttpError) as err_info: - upload.upload(fake_client.engine, user, artifact_file, password) + upload.upload(fake_client.engine, user, artifact_file, password, + False) error = err_info.value.error message = err_info.value.message @@ -227,7 +232,8 @@ def test_upload_plugin_failed(artifact_file, artifact_content, password = 'delphix' with pytest.raises(exceptions.HttpError) as err_info: - upload.upload(fake_client.engine, user, artifact_file, password) + upload.upload(fake_client.engine, user, artifact_file, password, + False) error = err_info.value.error message = err_info.value.message diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index 40026159..28ccaef1 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import configparser @@ -7,17 +7,17 @@ import json import os -import pytest import yaml -from dlpx.virtualization._internal import (cli, click_util, package_util, - util_classes) +from dlpx.virtualization._internal import cli, click_util, const, package_util + +import pytest # # conftest.py is used to share fixtures among multiple tests files. pytest will # automatically get discovered in the test class if the figure name is used # as the input variable. The idea of fixtures is to define certain object # configs and allow them to get used in different tests but also being allowed -# to set certain parts definated in other fixtures. Read more at: +# to set certain parts defined in other fixtures. Read more at: # https://docs.pytest.org/en/latest/fixture.html # @@ -46,18 +46,6 @@ def plugin_config_filename(): return 'plugin_config.yml' -@pytest.fixture -def fake_staged_plugin_config(): - return os.path.join(os.path.dirname(__file__), - 'fake_plugin/staged/plugin_config.yml') - - -@pytest.fixture -def fake_direct_plugin_config(): - return os.path.join(os.path.dirname(__file__), - 'fake_plugin/direct/plugin_config.yml') - - @pytest.fixture def src_dir(tmpdir, src_dirname): """ @@ -97,11 +85,32 @@ def schema_filename(): @pytest.fixture def dvp_config_file(tmpdir, dvp_config_properties): + _write_dvp_config_file(tmpdir, dvp_config_properties=dvp_config_properties) + + +@pytest.fixture +def dev_config_file(tmpdir, dev_config_properties): + _write_dvp_config_file(tmpdir, dev_config_properties=dev_config_properties) + + +@pytest.fixture +def empty_config_file(tmpdir): + _write_dvp_config_file(tmpdir) + + +def _write_dvp_config_file(tmpdir, + dvp_config_properties=None, + dev_config_properties=None): dvp_dir = tmpdir.join(click_util.CONFIG_DIR_NAME).strpath os.mkdir(dvp_dir) dvp_config_filepath = os.path.join(dvp_dir, click_util.CONFIG_FILE_NAME) parser = configparser.ConfigParser() - parser['default'] = dvp_config_properties + if dvp_config_properties: + parser['default'] = dvp_config_properties + + if dev_config_properties: + parser['dev'] = dev_config_properties + with open(dvp_config_filepath, 'wb') as config_file: parser.write(config_file) @@ -160,104 +169,246 @@ def artifact_file_created(): @pytest.fixture -def plugin_config_content(plugin_id, plugin_name, src_dir, schema_file, - language, manual_discovery, plugin_type): +def plugin_config_content(plugin_id, plugin_name, external_version, language, + host_types, plugin_type, entry_point, src_dir, + schema_file, manual_discovery, build_number): """ This fixutre creates the dict expected in the properties yaml file the customer must provide for the build and compile commands. """ config = { - 'version': '2.0.0', - 'hostTypes': ['UNIX'], - 'entryPoint': 'python_vfiles:vfiles', 'defaultLocale': 'en-us', 'rootSquashEnabled': True, } - if id: + + if plugin_id: config['id'] = plugin_id if plugin_name: config['name'] = plugin_name + if external_version: + config['externalVersion'] = external_version + + if language: + config['language'] = language + + if host_types: + config['hostTypes'] = host_types + if plugin_type: config['pluginType'] = plugin_type + if entry_point: + config['entryPoint'] = entry_point + if src_dir: config['srcDir'] = src_dir if schema_file: config['schemaFile'] = schema_file - if language: - config['language'] = language - - # Here we do is not None check because we will be passing in + # Here we do an 'is not None' check because we will be passing in # booleans as a parameter in tests. if manual_discovery is not None: config['manualDiscovery'] = manual_discovery + if build_number: + config['buildNumber'] = build_number + return config @pytest.fixture -def plugin_entry_point_name(): +def plugin_id(): + return '16bef554-9470-11e9-b2e3-8c8590d4a42c' + + +@pytest.fixture +def plugin_name(): + return 'python_vfiles' + + +@pytest.fixture +def external_version(): + return '2.0.0' + + +@pytest.fixture +def language(): + return 'PYTHON27' + + +@pytest.fixture +def host_types(): + return ['UNIX'] + + +@pytest.fixture +def plugin_type(): + return const.DIRECT_TYPE + + +@pytest.fixture +def entry_point(entry_point_module, entry_point_object): + return '{}:{}'.format(entry_point_module, entry_point_object) + + +@pytest.fixture +def entry_point_module(): + return 'python_vfiles' + + +@pytest.fixture +def entry_point_object(): return 'vfiles' @pytest.fixture -def plugin_module_content(plugin_entry_point_name): +def manual_discovery(): + return None + + +@pytest.fixture +def build_number(): + return '2.0.0' + + +@pytest.fixture +def artifact_manual_discovery(): + return True + + +@pytest.fixture +def plugin_module_content(entry_point_object, discovery_operation, + linked_operation, virtual_operation, + upgrade_operation): class Object(object): pass - discovery = Object() - discovery.repository_impl = True - discovery.source_config_impl = True - - linked = Object() - linked.pre_snapshot_impl = True - linked.post_snapshot_impl = True - linked.start_staging_impl = True - linked.stop_staging_impl = False - linked.status_impl = True - linked.worker_impl = False - linked.mount_specification_impl = True - - virtual = Object() - virtual.configure_impl = True - virtual.unconfigure_impl = False - virtual.reconfigure_impl = True - virtual.start_impl = True - virtual.stop_impl = False - virtual.pre_snapshot_impl = True - virtual.post_snapshot_impl = True - virtual.mount_specification_impl = True - virtual.status_impl = False - virtual.initialize_impl = False - plugin_object = Object() - plugin_object.discovery = discovery - plugin_object.linked = linked - plugin_object.virtual = virtual + plugin_object.discovery = discovery_operation + plugin_object.linked = linked_operation + plugin_object.virtual = virtual_operation + plugin_object.upgrade = upgrade_operation plugin_module = Object() - setattr(plugin_module, plugin_entry_point_name, plugin_object) + setattr(plugin_module, entry_point_object, plugin_object) return plugin_module @pytest.fixture -def plugin_manifest(): +def discovery_operation(): + class DiscoveryOperations(object): + pass + + discovery = DiscoveryOperations() + + def repository_discovery(source_connection): + return None + + def source_config_discovery(source_connection, repository): + return None + + discovery.repository_impl = repository_discovery + discovery.source_config_impl = source_config_discovery + + return discovery + + +@pytest.fixture +def linked_operation(): + class LinkedOperations(object): + pass + + linked = LinkedOperations() + + def pre_snapshot(direct_source, repository, source_config): + pass + + def post_snapshot(direct_source, repository, source_config): + return None + + linked.pre_snapshot_impl = pre_snapshot + linked.post_snapshot_impl = post_snapshot + linked.start_staging_impl = None + linked.stop_staging_impl = None + linked.status_impl = None + linked.worker_impl = None + linked.mount_specification_impl = None + + return linked + + +@pytest.fixture +def virtual_operation(): + class VirtualOperations(object): + pass + + virtual = VirtualOperations() + + def configure(virtual_source, repository, snapshot): + return None + + def reconfigure(virtual_source, repository, source_config, snapshot): + pass + + def start(virtual_source, repository, source_config): + pass + + def pre_snapshot(virtual_source, repository, source_config): + pass + + def post_snapshot(virtual_source, repository, source_config): + return None + + def mount_specification(virtual_source, repository): + return None + + virtual.configure_impl = configure + virtual.unconfigure_impl = None + virtual.reconfigure_impl = reconfigure + virtual.start_impl = start + virtual.stop_impl = None + virtual.pre_snapshot_impl = pre_snapshot + virtual.post_snapshot_impl = post_snapshot + virtual.mount_specification_impl = mount_specification + virtual.status_impl = None + virtual.initialize_impl = None + + return virtual + + +@pytest.fixture +def upgrade_operation(): + class UpgradeOperation(object): + pass + + upgrade = UpgradeOperation() + upgrade.migration_id_list = [] + upgrade.repository_id_to_impl = {} + upgrade.source_config_id_to_impl = {} + upgrade.linked_source_id_to_impl = {} + upgrade.virtual_source_id_to_impl = {} + upgrade.snapshot_id_to_impl = {} + + return upgrade + + +@pytest.fixture +def plugin_manifest(upgrade_operation): manifest = { 'type': 'PluginManifest', 'hasRepositoryDiscovery': True, 'hasSourceConfigDiscovery': True, 'hasLinkedPreSnapshot': True, 'hasLinkedPostSnapshot': True, - 'hasLinkedStartStaging': True, + 'hasLinkedStartStaging': False, 'hasLinkedStopStaging': False, - 'hasLinkedStatus': True, + 'hasLinkedStatus': False, 'hasLinkedWorker': False, - 'hasLinkedMountSpecification': True, + 'hasLinkedMountSpecification': False, 'hasVirtualConfigure': True, 'hasVirtualUnconfigure': False, 'hasVirtualReconfigure': True, @@ -267,41 +418,12 @@ def plugin_manifest(): 'hasVirtualPostSnapshot': True, 'hasVirtualMountSpecification': True, 'hasVirtualStatus': False, - 'hasInitialize': False + 'hasInitialize': False, + 'migrationIdList': upgrade_operation.migration_id_list } return manifest -@pytest.fixture -def plugin_id(): - return '16bef554-9470-11e9-b2e3-8c8590d4a42c' - - -@pytest.fixture -def plugin_name(): - return 'python_vfiles' - - -@pytest.fixture -def language(): - return 'PYTHON27' - - -@pytest.fixture -def manual_discovery(): - return None - - -@pytest.fixture -def artifact_manual_discovery(): - return True - - -@pytest.fixture -def plugin_type(): - return util_classes.DIRECT_TYPE - - @pytest.fixture def schema_content(repository_definition, source_config_definition, virtual_source_definition, linked_source_definition, @@ -430,7 +552,7 @@ def basic_artifact_content(engine_api, virtual_source_definition, 'type': 'Plugin', 'name': '16bef554-9470-11e9-b2e3-8c8590d4a42c', 'prettyName': 'python_vfiles', - 'version': '2.0.0', + 'externalVersion': '2.0.0', 'defaultLocale': 'en-us', 'language': 'PYTHON27', 'hostTypes': ['UNIX'], @@ -438,6 +560,7 @@ def basic_artifact_content(engine_api, virtual_source_definition, 'buildApi': package_util.get_build_api_version(), 'engineApi': engine_api, 'rootSquashEnabled': True, + 'buildNumber': '2', 'sourceCode': 'UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==', 'manifest': {} } @@ -476,7 +599,7 @@ def artifact_content(engine_api, virtual_source_definition, 'type': 'Plugin', 'name': '16bef554-9470-11e9-b2e3-8c8590d4a42c', 'prettyName': 'python_vfiles', - 'version': '2.0.0', + 'externalVersion': '2.0.0', 'defaultLocale': 'en-us', 'language': 'PYTHON27', 'hostTypes': ['UNIX'], @@ -484,6 +607,7 @@ def artifact_content(engine_api, virtual_source_definition, 'buildApi': package_util.get_build_api_version(), 'sourceCode': 'UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==', 'rootSquashEnabled': True, + 'buildNumber': '2', 'manifest': {} } @@ -513,7 +637,7 @@ def artifact_content(engine_api, virtual_source_definition, @pytest.fixture def engine_api(): - return {'type': 'APIVersion', 'major': 1, 'minor': 10, 'micro': 5} + return {'type': 'APIVersion', 'major': 1, 'minor': 11, 'micro': 2} @pytest.fixture diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py new file mode 100644 index 00000000..cd28b92c --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py @@ -0,0 +1,19 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +class ArbitraryError(Exception): + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(ArbitraryError, self).__init__(message) + + +raise ArbitraryError('Got an arbitrary non-platforms error for testing.') diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py new file mode 100644 index 00000000..2688405b --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from __future__ import print_function + +import logging + +from dlpx.virtualization.platform import Plugin + +logger = logging.getLogger() +logger.setLevel(logging.NOTSET) + +plugin = Plugin() + + +@plugin.discovery.repository() +def repository_discovery(source_connection): + return None + + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + return None + + +# Defining the decorator as not a function +@plugin.linked.pre_snapshot() +class PreSnapshot(object): + pass diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py new file mode 100644 index 00000000..6b99f58c --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py @@ -0,0 +1,12 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository('1234.0.0.') +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py new file mode 100644 index 00000000..6ea3add1 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py @@ -0,0 +1,12 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository(['testing', 'out', 'validation']) +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py new file mode 100644 index 00000000..5f8196d1 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository('5.4.0.1') +def repo_upgrade(old_repository): + return old_repository + + +@plugin.upgrade.snapshot('5.04.000.01') +def snap_upgrade(old_snapshot): + return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/src/python_vfiles.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/multiple_warnings.py similarity index 93% rename from tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/src/python_vfiles.py rename to tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/multiple_warnings.py index d5a00513..c0a031bc 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/src/python_vfiles.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/multiple_warnings.py @@ -71,3 +71,8 @@ def stop(repository, source_config, virtual_source): @vfiles.virtual.unconfigure() def unconfigure(repository, source_config, virtual_source): pass + + +@vfiles.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py new file mode 100644 index 00000000..59fbcc0c --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.discovery.repository() +def repository_discovery(source_connection): + return None + + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + return None + + +# Defining another function with the same decorator +@plugin.discovery.source_config() +def source_config_discovery_two(source_connection, repository): + return None diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml deleted file mode 100644 index 6ddc6cd1..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml +++ /dev/null @@ -1,11 +0,0 @@ -id: 16bef554-9470-11e9-b2e3-8c8590d4a42c -name: Unstructured Files using Python -version: 2.0.0 -hostTypes: - - UNIX -entryPoint: python_vfiles:vfiles -srcDir: src/ -schemaFile: ./schema.json -manualDiscovery: true -pluginType: DIRECT -language: PYTHON27 diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json deleted file mode 100644 index ba2ebcc6..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "repositoryDefinition": { - "type": "object", - "properties": { - "name": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["name"] - }, - "sourceConfigDefinition": { - "type": "object", - "required": ["name", "path"], - "additionalProperties": false, - "properties": { - "name": { "type": "string" }, - "path": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["path"] - }, - "virtualSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties" : { - "path": { "type": "string" } - } - }, - "linkedSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties" : {} - }, - "snapshotDefinition": { - "type" : "object", - "additionalProperties" : false, - "properties" : {} - } -} diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py new file mode 100644 index 00000000..010c705a --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py @@ -0,0 +1,84 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin, Status + +direct = Plugin() + + +@direct.discovery.repository() +def repository_discovery(source_connection): + return [] + + +@direct.discovery.source_config() +def source_config_discovery(source_connection, repository): + return [] + + +@direct.linked.pre_snapshot() +def direct_pre_snapshot(direct_source, repository, source_config): + return + + +@direct.linked.post_snapshot() +def direct_post_snapshot(direct_source, repository, source_config): + return None + + +@direct.virtual.configure() +def configure(virtual_source, repository, snapshot): + path = virtual_source.parameters.path + name = "VDB mounted to " + path + return None + + +@direct.virtual.mount_specification() +def mount_specification(repository, virtual_source): + return None + + +@direct.virtual.post_snapshot() +def postSnapshot(repository, source_config, virtual_source): + return None + + +@direct.virtual.pre_snapshot() +def preSnapshot(repository, source_config, virtual_source): + pass + + +@direct.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + pass + + +@direct.virtual.start() +def start(repository, source_config, virtual_source): + pass + + +@direct.virtual.status() +def status(repository, source_config, virtual_source): + return Status.ACTIVE + + +@direct.virtual.stop() +def stop(repository, source_config, virtual_source): + pass + + +@direct.virtual.unconfigure() +def unconfigure(repository, source_config, virtual_source): + pass + + +@direct.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository + + +@direct.upgrade.snapshot('2019.11.30') +def snap_upgrade(old_snapshot): + return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py new file mode 100644 index 00000000..68ecd5b2 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py @@ -0,0 +1,100 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin, Status + +direct = Plugin() + + +@direct.discovery.repository() +def repository_discovery(source_connection): + return [] + + +@direct.discovery.source_config() +def source_config_discovery(source_connection, repository): + return [] + + +@direct.linked.pre_snapshot() +def direct_pre_snapshot(direct_source, repository, source_config): + return + + +@direct.linked.post_snapshot() +def direct_post_snapshot(direct_source, repository, source_config): + return None + + +@direct.virtual.configure() +def configure(virtual_source, repository, snapshot): + return None + + +@direct.virtual.mount_specification() +def mount_specification(repository, virtual_source): + return None + + +@direct.virtual.post_snapshot() +def postSnapshot(repository, source_config, virtual_source): + return None + + +@direct.virtual.pre_snapshot() +def preSnapshot(repository, source_config, virtual_source): + pass + + +@direct.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + pass + + +@direct.virtual.start() +def start(repository, source_config, virtual_source): + pass + + +@direct.virtual.status() +def status(repository, source_config, virtual_source): + return Status.ACTIVE + + +@direct.virtual.stop() +def stop(repository, source_config, virtual_source): + pass + + +@direct.virtual.unconfigure() +def unconfigure(repository, source_config, virtual_source): + pass + + +@direct.upgrade.repository('2019.11.20') +def repo_upgrade(old_repository): + return old_repository + + +@direct.upgrade.source_config('2019.11.22') +def sc_upgrade(old_source_config): + return old_source_config + + +# Added second arg to check if length arg check fails. +@direct.upgrade.linked_source('2019.11.24') +def ls_upgrade(old_linked, old_source): + return old_linked + + +# Renamed old_virtual_source to old_linked_source to test named arg checks. +@direct.upgrade.virtual_source('2019.11.26') +def ls_upgrade(old_linked_source): + return old_linked_source + + +# Renamed old_snapshot to bad_input_name to test named arg checks. +@direct.upgrade.snapshot('2019.11.30') +def snap_upgrade(bad_input_name): + return bad_input_name diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/src/python_staged.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/multiple_warnings.py similarity index 89% rename from tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/src/python_staged.py rename to tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/multiple_warnings.py index acd32437..094c1dde 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/src/python_staged.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/multiple_warnings.py @@ -17,12 +17,12 @@ # Renamed source_connection to connection to test if named arg check detects. @staged.discovery.repository() def repository_discovery(connection): - return None + return [] @staged.discovery.source_config() def source_config_discovery(source_connection, repository): - return None + return [] @staged.linked.mount_specification() @@ -67,6 +67,11 @@ def configure(virtual_source, repository, snapshot): return None +@staged.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + return None + + # Removed virtual.mount_specification for test validation. @@ -86,8 +91,11 @@ def start(repository, source_config, virtual_source): # Added snapshot parameter to check if arg check fails. - - @staged.virtual.stop() def stop(repository, source_config, virtual_source, snapshot): pass + + +@staged.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml deleted file mode 100644 index 1742b86b..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# -id: 16bef554-9470-11e9-b2e3-8c8590d4a42c -name: Staged Toolkit using Python -version: 1.0.0 -hostTypes: - - UNIX -entryPoint: python_staged:staged -srcDir: src/ -schemaFile: ./schema.json -manualDiscovery: true -pluginType: STAGED -language: PYTHON27 diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json deleted file mode 100644 index 7c7d10ea..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "repositoryDefinition": { - "type": "object", - "properties": { - "name": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["name"] - }, - "sourceConfigDefinition": { - "type": "object", - "required": ["name"], - "additionalProperties": false, - "properties": { - "name": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["name"] - }, - "virtualSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties" : { - "path": { "type": "string" } - }, - "required": ["path"] - }, - "linkedSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties": { - "path": { "type": "string" } - } - }, - "snapshotDefinition": { - "type" : "object", - "additionalProperties" : false, - "properties" : {} - } -} diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py new file mode 100644 index 00000000..31ae1151 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py @@ -0,0 +1,101 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from __future__ import print_function + +import logging + +from dlpx.virtualization.platform import Plugin + +logger = logging.getLogger() +logger.setLevel(logging.NOTSET) + +staged = Plugin() + + +@staged.discovery.repository() +def repository_discovery(source_connection): + return None + + +@staged.discovery.source_config() +def source_config_discovery(source_connection, repository): + return None + + +@staged.linked.mount_specification() +def staged_mount_specification(staged_source, repository): + return None + + +@staged.linked.pre_snapshot() +def staged_pre_snapshot(repository, source_config, staged_source, + snapshot_parameters): + pass + + +@staged.linked.post_snapshot() +def staged_post_snapshot(repository, source_config, staged_source, + snapshot_parameters): + return None + + +@staged.linked.start_staging() +def start_staging(repository, source_config, staged_source): + pass + + +@staged.linked.stop_staging() +def stop_staging(repository, source_config, staged_source): + pass + + +@staged.linked.status() +def staged_status(staged_source, repository, source_config): + return None + + +@staged.linked.worker() +def staged_worker(repository, source_config, staged_source): + pass + + +@staged.virtual.configure() +def configure(virtual_source, repository, snapshot): + return None + + +@staged.virtual.mount_specification() +def mount_specification(virtual_source, repository): + return None + + +@staged.virtual.pre_snapshot() +def pre_snapshot(repository, source_config, virtual_source): + pass + + +@staged.virtual.post_snapshot() +def post_snapshot(repository, source_config, virtual_source): + return None + + +@staged.virtual.start() +def start(repository, source_config, virtual_source): + pass + + +@staged.virtual.stop() +def stop(repository, source_config, virtual_source): + pass + + +@staged.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository + + +@staged.upgrade.snapshot('2019.11.30') +def snap_upgrade(old_snapshot): + return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py b/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py index 7303ba27..f9a98dc7 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py @@ -1,14 +1,15 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import os import click.testing as click_testing +import yaml +from dlpx.virtualization._internal import cli, const, exceptions + import mock import pytest -import yaml -from dlpx.virtualization._internal import cli, exceptions, util_classes class TestCli: @@ -94,6 +95,36 @@ def test_get_console_logging_level_both_non_zero(): with pytest.raises(AssertionError): cli.get_console_logging_level(1, 1) + @staticmethod + @mock.patch('dlpx.virtualization._internal.commands.initialize.init') + def test_command_user_error(mock_init, plugin_name): + runner = click_testing.CliRunner() + + mock_init.side_effect = exceptions.UserError("codegen_error") + result = runner.invoke(cli.delphix_sdk, ['init', '-n', plugin_name]) + + assert result.exit_code == 1 + assert result.output == 'codegen_error\n' + + # 'DIRECT' and os.getcwd() are the expected defaults + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) + + @staticmethod + @mock.patch('dlpx.virtualization._internal.commands.initialize.init') + def test_command_non_user_error(mock_init, plugin_name): + runner = click_testing.CliRunner() + + mock_init.side_effect = Exception("internal_error") + result = runner.invoke(cli.delphix_sdk, ['init', '-n', plugin_name]) + + assert result.exit_code == 2 + assert 'Internal error, please contact Delphix.\n' in result.output + + # 'DIRECT' and os.getcwd() are the expected defaults + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) + class TestInitCli: @staticmethod @@ -106,24 +137,21 @@ def test_default_params(mock_init, plugin_name): assert result.exit_code == 0, 'Output: {}'.format(result.output) # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.initialize.init') def test_non_default_params(mock_init, plugin_name): runner = click_testing.CliRunner() - result = runner.invoke(cli.delphix_sdk, [ - 'init', '-s', util_classes.STAGED_TYPE, '-r', '.', '-n', - plugin_name - ]) + result = runner.invoke( + cli.delphix_sdk, + ['init', '-s', const.STAGED_TYPE, '-r', '.', '-n', plugin_name]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_init.assert_called_once_with(os.getcwd(), - util_classes.STAGED_TYPE, - plugin_name) + mock_init.assert_called_once_with(os.getcwd(), const.STAGED_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod def test_invalid_ingestion_strategy(plugin_name): @@ -143,6 +171,39 @@ def test_name_required(): assert result.exit_code != 0 + @staticmethod + def test_multiple_host_types(): + runner = click_testing.CliRunner() + + result = runner.invoke(cli.delphix_sdk, [ + 'init', '-t', '{},{}'.format(const.UNIX_HOST_TYPE, + const.WINDOWS_HOST_TYPE) + ]) + + assert result.exit_code != 0 + assert "invalid choice" in result.output + + @staticmethod + @mock.patch('dlpx.virtualization._internal.commands.initialize.init') + def test_windows_host_type(mock_init, plugin_name): + runner = click_testing.CliRunner() + + result = runner.invoke( + cli.delphix_sdk, + ['init', '-n', plugin_name, '-t', const.WINDOWS_HOST_TYPE]) + assert result.exit_code == 0, 'Output: {}'.format(result.output) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.WINDOWS_HOST_TYPE) + + @staticmethod + def test_invalid_host_type(): + runner = click_testing.CliRunner() + + result = runner.invoke(cli.delphix_sdk, ['init', '-t', 'UNI']) + + assert result.exit_code != 0 + assert "invalid choice" in result.output + class TestBuildCli: @staticmethod @@ -172,7 +233,10 @@ def test_default_plugin_file_success(mock_build, plugin_config_filename, assert result.exit_code == 0, 'Output: {}'.format(result.output) mock_build.assert_called_once_with(plugin_config_file, - artifact_file, False, False) + artifact_file, + False, + False, + local_vsdk_root=None) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.build.build') @@ -190,8 +254,11 @@ def test_generate_only_success(mock_build, plugin_config_filename, result = runner.invoke(cli.delphix_sdk, ['build', '-g']) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_build.assert_called_once_with(plugin_config_file, None, True, - False) + mock_build.assert_called_once_with(plugin_config_file, + None, + True, + False, + local_vsdk_root=None) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.build.build') @@ -202,8 +269,11 @@ def test_valid_params(mock_build, plugin_config_file, artifact_file): ['build', '-c', plugin_config_file, '-a', artifact_file]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_build.assert_called_once_with(plugin_config_file, artifact_file, - False, False) + mock_build.assert_called_once_with(plugin_config_file, + artifact_file, + False, + False, + local_vsdk_root=None) @staticmethod @pytest.mark.parametrize('plugin_config_filename', ['plugin.yml']) @@ -215,9 +285,12 @@ def test_valid_params_new_name(mock_build, plugin_config_file, ['build', '-c', plugin_config_file]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_build.assert_called_once_with( - plugin_config_file, os.path.join(os.getcwd(), artifact_filename), - False, False) + mock_build.assert_called_once_with(plugin_config_file, + os.path.join( + os.getcwd(), artifact_filename), + False, + False, + local_vsdk_root=None) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.build.build') @@ -230,8 +303,11 @@ def test_skip_id_validation(mock_build, plugin_config_file, artifact_file): ]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_build.assert_called_once_with(plugin_config_file, artifact_file, - False, True) + mock_build.assert_called_once_with(plugin_config_file, + artifact_file, + False, + True, + local_vsdk_root=None) @staticmethod @pytest.mark.parametrize('plugin_config_file', @@ -242,14 +318,14 @@ def test_file_not_exist(plugin_config_file): ['build', '-c', plugin_config_file]) assert result.exit_code == 2 - assert result.output == (u'Usage: delphix-sdk build [OPTIONS]' - u'\nTry "delphix-sdk build -h" for help.' - u'\n' - u'\nError: Invalid value for "-c" /' - u' "--plugin-config": File' - u' "/not/a/real/file/plugin_config.yml"' - u' does not exist.' - u'\n') + assert result.output == (u"Usage: delphix-sdk build [OPTIONS]" + u"\nTry 'delphix-sdk build -h' for help." + u"\n" + u"\nError: Invalid value for '-c' /" + u" '--plugin-config': File" + u" '/not/a/real/file/plugin_config.yml'" + u" does not exist." + u"\n") @staticmethod def test_option_a_and_g_set(plugin_config_file, artifact_file): @@ -277,6 +353,38 @@ def test_option_g_and_a_set(plugin_config_file, artifact_file): u' "upload_artifact".' u'\n') + @staticmethod + @mock.patch('dlpx.virtualization._internal.commands.build.build') + @pytest.mark.parametrize('dev_config_properties', + [{ + 'vsdk_root': '/path/to/vsdk/dir' + }]) + def test_with_dev(mock_build, plugin_config_file, artifact_file, + dev_config_file): + runner = click_testing.CliRunner() + result = runner.invoke( + cli.delphix_sdk, + ['build', '-c', plugin_config_file, '-a', artifact_file, '--dev']) + + assert result.exit_code == 0 + mock_build.assert_called_once_with(plugin_config_file, + artifact_file, + False, + False, + local_vsdk_root='/path/to/vsdk/dir') + + @staticmethod + @mock.patch('dlpx.virtualization._internal.commands.build.build') + def test_with_dev_fail(mock_build, plugin_config_file, artifact_file, + empty_config_file): + runner = click_testing.CliRunner() + result = runner.invoke( + cli.delphix_sdk, + ['build', '-c', plugin_config_file, '-a', artifact_file, '--dev']) + + assert result.exit_code == 2 + assert not mock_build.called, 'build should not have been called' + class TestUploadCli: @staticmethod @@ -298,7 +406,7 @@ def test_default_params(mock_upload, artifact_file): assert result.exit_code == 0, 'Output: {}'.format(result.output) mock_upload.assert_called_once_with(engine, user, artifact_file, - password) + password, False) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.upload.upload') @@ -315,7 +423,7 @@ def test_valid_params(mock_upload, artifact_file): assert result.exit_code == 0, 'Output: {}'.format(result.output) mock_upload.assert_called_once_with(engine, user, artifact_file, - password) + password, False) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.upload.upload') @@ -344,7 +452,7 @@ def test_bad_password(mock_upload, artifact_file): '\nAction: Try with a different set of credentials.' '\n') mock_upload.assert_called_once_with(engine, user, artifact_file, - password) + password, False) @staticmethod @pytest.mark.parametrize('artifact_file', @@ -362,20 +470,19 @@ def test_file_not_exist(artifact_file): ]) assert result.exit_code == 2 - assert result.output == (u'Usage: delphix-sdk upload [OPTIONS]' - u'\nTry "delphix-sdk upload -h" for help.' - u'\n' - u'\nError: Invalid value for "-a" /' - u' "--upload-artifact": File' - u' "/not/a/real/file/artifact.json"' - u' does not exist.' - u'\n') + assert result.output == (u"Usage: delphix-sdk upload [OPTIONS]" + u"\nTry 'delphix-sdk upload -h' for help." + u"\n" + u"\nError: Invalid value for '-a' /" + u" '--upload-artifact': File" + u" '/not/a/real/file/artifact.json'" + u" does not exist." + u"\n") @staticmethod @mock.patch('dlpx.virtualization._internal.commands.upload.upload') - @pytest.mark.usefixtures('dvp_config_file') def test_with_config_file_success(mock_upload, artifact_file, - dvp_config_properties): + dvp_config_properties, dvp_config_file): engine = dvp_config_properties['engine'] user = dvp_config_properties['user'] password = dvp_config_properties['password'] @@ -390,13 +497,12 @@ def test_with_config_file_success(mock_upload, artifact_file, assert result.exit_code == 0, 'Output: {}'.format(result.output) mock_upload.assert_called_once_with(engine, user, artifact_file, - password) + password, False) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.upload.upload') - @pytest.mark.usefixtures('dvp_config_file') def test_with_config_file_override(mock_upload, artifact_file, - dvp_config_properties): + dvp_config_properties, dvp_config_file): engine = dvp_config_properties['engine'] user = 'fake_admin' password = dvp_config_properties['password'] @@ -411,7 +517,7 @@ def test_with_config_file_override(mock_upload, artifact_file, assert result.exit_code == 0, 'Output: {}'.format(result.output) mock_upload.assert_called_once_with(engine, user, artifact_file, - password) + password, False) @staticmethod @pytest.mark.parametrize('dvp_config_properties', [{ @@ -432,8 +538,8 @@ def test_with_config_file_fail(artifact_file): assert result.exit_code == 2 assert result.output == (u'Usage: delphix-sdk upload [OPTIONS]\n' u'\n' - u'Error: Invalid value for "-e" / ' - u'"--engine": Option is required ' + u'Error: Invalid value for \'-e\' / ' + u'\'--engine\': Option is required ' u'and must be specified via the command line.' u'\n') @@ -495,12 +601,12 @@ def test_missing_params(): assert result.exit_code == 2 assert result.output == ( - u'Usage: delphix-sdk download-logs [OPTIONS]\n' - u'\n' - u'Error: Invalid value for "-e" / ' - u'"--engine": Option is required ' - u'and must be specified via the command line.' - u'\n') + u"Usage: delphix-sdk download-logs [OPTIONS]\n" + u"\n" + u"Error: Invalid value for '-e' / " + u"'--engine': Option is required " + u"and must be specified via the command line." + u"\n") @staticmethod @mock.patch( @@ -549,14 +655,14 @@ def test_directory_not_exist(directory): assert result.exit_code == 2 assert result.output == ( - u'Usage: delphix-sdk download-logs [OPTIONS]' - u'\nTry "delphix-sdk download-logs -h" for help.' - u'\n' - u'\nError: Invalid value for "-d" /' - u' "--directory": Directory' - u' "/not/a/real/directory"' - u' does not exist.' - u'\n') + u"Usage: delphix-sdk download-logs [OPTIONS]" + u"\nTry 'delphix-sdk download-logs -h' for help." + u"\n" + u"\nError: Invalid value for '-d' /" + u" '--directory': Directory" + u" '/not/a/real/directory'" + u" does not exist." + u"\n") @staticmethod @pytest.mark.parametrize('plugin_config_file', @@ -575,21 +681,20 @@ def test_file_not_exist(plugin_config_file): assert result.exit_code == 2 assert result.output == ( - u'Usage: delphix-sdk download-logs [OPTIONS]' - u'\nTry "delphix-sdk download-logs -h" for help.' - u'\n' - u'\nError: Invalid value for "-c" /' - u' "--plugin-config": File' - u' "/not/a/real/file/plugin_config.yml"' - u' does not exist.' - u'\n') + u"Usage: delphix-sdk download-logs [OPTIONS]" + u"\nTry 'delphix-sdk download-logs -h' for help." + u"\n" + u"\nError: Invalid value for '-c' /" + u" '--plugin-config': File" + u" '/not/a/real/file/plugin_config.yml'" + u" does not exist." + u"\n") @staticmethod @mock.patch( 'dlpx.virtualization._internal.commands.download_logs.download_logs') - @pytest.mark.usefixtures('dvp_config_file') def test_with_config_file_success(mock_download_logs, plugin_config_file, - dvp_config_properties): + dvp_config_properties, dvp_config_file): engine = dvp_config_properties['engine'] user = dvp_config_properties['user'] password = dvp_config_properties['password'] @@ -609,9 +714,8 @@ def test_with_config_file_success(mock_download_logs, plugin_config_file, @staticmethod @mock.patch( 'dlpx.virtualization._internal.commands.download_logs.download_logs') - @pytest.mark.usefixtures('dvp_config_file') def test_with_config_file_override(mock_download_logs, plugin_config_file, - dvp_config_properties): + dvp_config_properties, dvp_config_file): engine = dvp_config_properties['engine'] user = 'fake_admin' password = dvp_config_properties['password'] @@ -634,8 +738,7 @@ def test_with_config_file_override(mock_download_logs, plugin_config_file, 'user': 'user', 'password': 'password' }]) - @pytest.mark.usefixtures('dvp_config_file') - def test_with_config_file_fail(plugin_config_file): + def test_with_config_file_fail(plugin_config_file, dvp_config_file): cwd = os.getcwd() try: @@ -647,9 +750,9 @@ def test_with_config_file_fail(plugin_config_file): assert result.exit_code == 2 assert result.output == ( - u'Usage: delphix-sdk download-logs [OPTIONS]\n' - u'\n' - u'Error: Invalid value for "-e" / ' - u'"--engine": Option is required ' - u'and must be specified via the command line.' - u'\n') + u"Usage: delphix-sdk download-logs [OPTIONS]\n" + u"\n" + u"Error: Invalid value for '-e' / " + u"'--engine': Option is required " + u"and must be specified via the command line." + u"\n") diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_click_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_click_util.py index 46e701a3..fb6819d7 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_click_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_click_util.py @@ -3,10 +3,11 @@ # import click -import pytest from click import testing as click_testing from dlpx.virtualization._internal import click_util +import pytest + class TestClickUtil: @staticmethod diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py index dc1f8e07..0c925bec 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py @@ -4,9 +4,11 @@ import os -import pytest from dlpx.virtualization._internal import exceptions, file_util +import mock +import pytest + class TestFileUtil: @staticmethod @@ -24,21 +26,104 @@ def test_delete_paths_none_values(plugin_config_file): assert not os.path.exists(plugin_config_file) @staticmethod - def test_get_src_dir_path(tmpdir): - test_file = os.path.join(tmpdir.strpath, 'test_file') - src_dir = file_util.get_src_dir_path(test_file, tmpdir.strpath) - assert src_dir == tmpdir.strpath + def test_get_src_dir_path_relative(tmp_path): + plugin_root = tmp_path / 'plugin' + src_dir = plugin_root / 'src' + plugin_root.mkdir() + src_dir.mkdir() + + cwd = os.getcwd() + try: + os.chdir(tmp_path.as_posix()) + actual = file_util.get_src_dir_path('plugin/plugin_config.yml', + 'src') + finally: + os.chdir(cwd) + + assert actual == src_dir.as_posix() @staticmethod - def test_get_src_dir_path_fail(tmpdir): - test_file = os.path.join(tmpdir.strpath, 'test_file') - expected_message = 'The path \'{}\' does not exist'.format(test_file) + def test_get_src_dir_path_is_abs_fail(): + expected_message = "The path '{}' should be a relative path, but is " \ + "not.".format('/absolute/src') with pytest.raises(exceptions.UserError) as err_info: - file_util.get_src_dir_path(test_file, test_file) + file_util.get_src_dir_path('/absolute/config', '/absolute/src') + message = err_info.value.message + assert expected_message in message + + @staticmethod + def test_get_src_dir_path_exists_fail(): + expected_path = os.path.join(os.getcwd(), 'fake', 'nonexistent', 'dir') + expected_message = "The path '{}' does not exist.".format( + expected_path) + with pytest.raises(exceptions.UserError) as err_info: + file_util.get_src_dir_path('fake/plugin_config', 'nonexistent/dir') + message = err_info.value.message + assert expected_message in message + + @staticmethod + @mock.patch('os.path.isabs', return_value=False) + @mock.patch('os.path.exists', return_value=True) + def test_get_src_dir_path_is_dir_fail(mock_existing_path, + mock_relative_path): + expected_path = os.path.join(os.getcwd(), 'fake', 'not', 'dir') + expected_message = "The path '{}' should be a {} but is not.".format( + expected_path, 'directory') + with pytest.raises(exceptions.UserError) as err_info: + file_util.get_src_dir_path('fake/plugin_config', 'not/dir') + message = err_info.value.message + assert expected_message in message + + @staticmethod + @mock.patch('os.path.isdir', return_value=True) + @mock.patch('os.path.exists', return_value=True) + @mock.patch('os.path.isabs', return_value=False) + @pytest.mark.parametrize( + 'plugin_config_file_path, src_dir_path', + [(os.path.join(os.getenv('HOME'), 'plugin/file_name'), '.'), + ('/mongo/file_name', '/src'), ('/plugin/mongo/file_name', '/plugin'), + ('/plugin/file_name', '/plugin/src/../..')]) + def test_get_src_dir_path_fail(mock_relative_path, mock_existing_path, + mock_directory_path, + plugin_config_file_path, src_dir_path): + expected_plugin_root_dir = os.path.dirname(plugin_config_file_path) + + expected_plugin_root_dir = file_util.standardize_path( + expected_plugin_root_dir) + expected_src_dir = file_util.standardize_path( + os.path.join(expected_plugin_root_dir, src_dir_path)) + + expected_src_dir = os.path.join(expected_plugin_root_dir, + expected_src_dir) + expected_message = "The src directory {} is not a subdirectory of " \ + "the plugin root at {}"\ + .format(expected_src_dir, + os.path.dirname(expected_plugin_root_dir)) + with pytest.raises(exceptions.UserError) as err_info: + file_util.get_src_dir_path(plugin_config_file_path, src_dir_path) message = err_info.value.message assert expected_message in message + @staticmethod + @mock.patch('os.path.isdir', return_value=True) + @mock.patch('os.path.exists', return_value=True) + @mock.patch('os.path.isabs', return_value=False) + @pytest.mark.parametrize( + 'plugin_config_file_path, src_dir_path', + [(os.path.join(os.path.dirname(os.getcwd()), + 'plugin/filename'), '../plugin/src'), + (os.path.join(os.getenv('HOME'), 'plugin/file_name'), '~/plugin/src'), + (os.path.join(os.getcwd(), 'plugin/file_name'), './plugin/src'), + ('/UPPERCASE/file_name', '/UPPERCASE/src'), + ('/mongo/file_name', '/mongo/src/main/python'), + ('~/plugin/file_name', '~/plugin/src'), + (r'windows\path\some_file', r'windows\path')]) + def test_get_src_dir_path_success(mock_relative_path, mock_existing_path, + mock_directory_path, + plugin_config_file_path, src_dir_path): + file_util.get_src_dir_path(plugin_config_file_path, src_dir_path) + @staticmethod def test_make_dir_success(tmpdir): testdir = os.path.join(tmpdir.strpath, 'test_dir') @@ -65,3 +150,95 @@ def test_make_dir_force_fail(tmpdir): message = err_info.value.message assert "Error code: 17. Error message: File exists" in message + + @staticmethod + def test_clean_copy_no_tgt_dir(tmp_path): + # + # Before: After: + # src/ src/ + # hello.txt hello.txt + # tgt/ + # hello.txt + # + src = tmp_path / 'src' + src.mkdir() + f = src / 'hello.txt' + f.write_text(u'hello') + tgt = tmp_path / 'tgt' + + file_util.clean_copy(src.as_posix(), tgt.as_posix()) + + expected_file = tgt / 'hello.txt' + assert expected_file.exists() + assert expected_file.read_text() == 'hello' + + @staticmethod + def test_clean_copy_removes_tgt_dir(tmp_path): + # + # Before: After: + # src/ src/ + # hello.txt hello.txt + # tgt/ tgt/ + # remove.txt hello.txt + # + src = tmp_path / 'src' + src.mkdir() + src_file = src / 'hello.txt' + src_file.write_text(u'hello') + tgt = tmp_path / 'tgt' + tgt.mkdir() + tgt_file = tgt / 'remove.txt' + tgt_file.touch() + + file_util.clean_copy(src.as_posix(), tgt.as_posix()) + + expected_file = tgt / 'hello.txt' + assert expected_file.exists() + assert expected_file.read_text() == 'hello' + assert not tgt_file.exists() + + @staticmethod + def test_clean_copy_nested_tgt_dir(tmp_path): + # + # Before: After: + # src/ src/ + # child/ child/ + # hello.txt hello.txt + # tgt_parent/ tgt_parent/ + # tgt/ + # child/ + # hello.txt + # + src = tmp_path / 'src' + src.mkdir() + child = src / 'child' + child.mkdir() + src_file = child / 'hello.txt' + src_file.write_text(u'hello') + tgt_parent = tmp_path / 'tgt_parent' + tgt_parent.mkdir() + tgt = tgt_parent / 'tgt' + + file_util.clean_copy(src.as_posix(), tgt.as_posix()) + + expected_file = tgt / 'child' / 'hello.txt' + assert expected_file.exists() + assert expected_file.read_text() == 'hello' + + @staticmethod + def test_tmpdir(): + with file_util.tmpdir() as d: + assert os.path.exists(d) + + assert not os.path.exists(d) + + @staticmethod + def test_tmpdir_with_raised_exception(): + try: + with file_util.tmpdir() as d: + assert os.path.exists(d) + + raise RuntimeError('test') + except RuntimeError as e: + assert str(e) == 'test' + assert not os.path.exists(d) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_logging_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_logging_util.py index a5f9f69f..2da27e79 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_logging_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_logging_util.py @@ -5,9 +5,10 @@ import os import tempfile -import mock from dlpx.virtualization._internal import logging_util +import mock + class TestLoggingUtil: @staticmethod diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 92197d2b..3b7c70da 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -1,25 +1,31 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # from dlpx.virtualization._internal import package_util +import pytest + class TestPackageUtil: @staticmethod - def test_get_build_api_version(): - assert package_util.get_version() == '1.0.0' + def test_get_version(): + assert package_util.get_version() == '2.0.0' + + @staticmethod + def test_get_virtualization_api_version(): + assert package_util.get_virtualization_api_version() == '1.1.0' @staticmethod def test_get_engine_api_version(): - assert package_util.get_engine_api_version_from_settings() == '1.10.5' + assert package_util.get_engine_api_version_from_settings() == '1.11.2' @staticmethod def test_get_build_api_version_json(): build_api_version = { 'type': 'APIVersion', 'major': 1, - 'minor': 0, + 'minor': 1, 'micro': 0 } assert package_util.get_build_api_version() == build_api_version @@ -29,12 +35,21 @@ def test_get_engine_api_version_json(): engine_api_version = { 'type': 'APIVersion', 'major': 1, - 'minor': 10, - 'micro': 5 + 'minor': 11, + 'micro': 2 } assert package_util.get_engine_api_version() == engine_api_version @staticmethod def test_get_internal_package_root(): assert package_util.get_internal_package_root().endswith( - 'main/python/dlpx/virtualization/_internal') + 'dlpx/virtualization/_internal') + + @staticmethod + @pytest.mark.parametrize('version_string', [ + '1.1.0', ' 1.1.0', '1.1.0-internal-001', ' 1.1.0-internal-001', + ' 1.1.0-internal-002 ', '1.1.0whatever' + ]) + def test_get_external_version_string(version_string): + assert package_util.get_external_version_string( + version_string) == '1.1.0' diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py new file mode 100644 index 00000000..ccd294d7 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py @@ -0,0 +1,188 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import os +import subprocess +import sys + +from dlpx.virtualization._internal import file_util, package_util +from dlpx.virtualization._internal import plugin_dependency_util as pdu +from dlpx.virtualization._internal.exceptions import SubprocessFailedError + +import mock +import pytest + + +class TestPluginDependencyUtil: + @staticmethod + @mock.patch.object(pdu, '_pip_install_to_dir') + def test_install_plugin_dependencies(mock_install_to_dir, tmp_path): + google = tmp_path / 'google' + google.mkdir() + + pdu.install_deps(tmp_path.as_posix()) + + expected_dependencies = [ + '{}=={}'.format(p, package_util.get_version()) + for p in ['dvp-common', 'dvp-libs', 'dvp-platform'] + ] + mock_install_to_dir.assert_called_once_with(expected_dependencies, + tmp_path.as_posix()) + + @staticmethod + @mock.patch.object(pdu, '_pip_install_to_dir') + @mock.patch.object(pdu, '_build_wheel') + @mock.patch.object(file_util, 'tmpdir') + def test_install_plugin_dependencies_dev(mock_tmpdir, mock_build_wheel, + mock_install_to_dir, tmp_path): + wheel_dir = tmp_path / 'wheel' + build_dir = tmp_path / 'build' + google = build_dir / 'google' + wheel_dir.mkdir() + build_dir.mkdir() + google.mkdir() + + global packages + packages = set() + + def build_wheel(package, dir): + dist_path = wheel_dir / os.path.basename(package) + dist_path.touch() + + global packages + packages.add(dist_path.as_posix()) + + def clean_up(a, b, c): + file_util.delete_paths(wheel_dir.as_posix()) + + mock_tmpdir.return_value.__enter__.return_value = wheel_dir.as_posix() + mock_tmpdir.return_value.__exit__.side_effect = clean_up + mock_build_wheel.side_effect = build_wheel + + pdu.install_deps(build_dir.as_posix(), local_vsdk_root='vsdk') + mock_install_to_dir.assert_called_once_with(packages, + build_dir.as_posix()) + + @staticmethod + @mock.patch.object(subprocess, 'Popen') + def test_execute_pip(mock_popen): + mock_popen.return_value.communicate.return_value = ('output', '') + mock_popen.return_value.wait.return_value = 0 + + pdu._execute_pip(['-h']) + + mock_popen.assert_called_once_with([sys.executable, '-m', 'pip', '-h'], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + @staticmethod + @mock.patch.object(subprocess, 'Popen') + def test_execute_pip_non_zero_exit(mock_popen): + mock_popen.return_value.communicate.return_value = ('output', '') + mock_popen.return_value.wait.return_value = 1 + + with pytest.raises(SubprocessFailedError) as excinfo: + pdu._execute_pip(['-h']) + + expected_args = [sys.executable, '-m', 'pip', '-h'] + + mock_popen.assert_called_once_with(expected_args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + e = excinfo.value + assert e.command == ' '.join(expected_args) + assert e.exit_code == 1 + assert e.output == 'output' + + @staticmethod + @mock.patch.object(subprocess, 'Popen') + def test_install_to_dir(mock_popen): + mock_popen.return_value.communicate.return_value = ('output', '') + mock_popen.return_value.wait.return_value = 0 + dependencies = ['dvp-common==1.0.0', 'six'] + + pdu._pip_install_to_dir(dependencies, 'tgt') + + expected_args = [ + sys.executable, '-m', 'pip', 'install', '-t', 'tgt', + 'dvp-common==1.0.0', 'six' + ] + mock_popen.assert_called_once_with(expected_args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + @staticmethod + @mock.patch.object(subprocess, 'Popen') + def test_build_wheel(mock_popen, tmp_path): + setup_file = tmp_path / 'setup.py' + setup_file.touch() + + mock_popen.return_value.communicate.return_value = ('output', '') + mock_popen.return_value.wait.return_value = 0 + + pdu._build_wheel(tmp_path.as_posix()) + + mock_popen.assert_called_once_with( + [sys.executable, 'setup.py', 'bdist_wheel'], + cwd=tmp_path.as_posix(), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + @staticmethod + def test_build_wheel_fails_with_no_setup_file(tmp_path): + with pytest.raises(RuntimeError) as excinfo: + pdu._build_wheel(tmp_path.as_posix()) + + assert str(excinfo.value) == ('No setup.py file exists in directory ' + '{}'.format(tmp_path.as_posix())) + + @staticmethod + @mock.patch.object(subprocess, 'Popen') + def test_build_wheel_non_zero_exit(mock_popen, tmp_path): + setup_file = tmp_path / 'setup.py' + setup_file.touch() + + mock_popen.return_value.communicate.return_value = ('output', '') + mock_popen.return_value.wait.return_value = 1 + + with pytest.raises(SubprocessFailedError) as excinfo: + pdu._build_wheel(tmp_path.as_posix()) + + e = excinfo.value + + expected_args = [sys.executable, 'setup.py', 'bdist_wheel'] + mock_popen.asesrt_called_once_with(expected_args, + cwd=tmp_path.as_posix(), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + assert e.command == ' '.join(expected_args) + assert e.exit_code == 1 + assert e.output == 'output' + + @staticmethod + @mock.patch.object(subprocess, 'Popen') + def test_build_wheel_target_dir(mock_popen, tmp_path): + package_dir = tmp_path / 'pkg' + setup_file = package_dir / 'setup.py' + target_dir = tmp_path / 'tgt' + package_dir.mkdir() + setup_file.touch() + target_dir.mkdir() + + mock_popen.return_value.communicate.return_value = ('output', '') + mock_popen.return_value.wait.return_value = 0 + + pdu._build_wheel(package_dir.as_posix(), + target_dir=target_dir.as_posix()) + + expected_args = [ + sys.executable, 'setup.py', 'bdist_wheel', '-d', + target_dir.as_posix() + ] + mock_popen.assert_called_once_with(expected_args, + cwd=package_dir.as_posix(), + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py index 37a64f50..ab31c65f 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py @@ -2,62 +2,90 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # import exceptions -from collections import defaultdict +import os +import uuid +from collections import OrderedDict + +from dlpx.virtualization._internal.plugin_importer import PluginImporter +from dlpx.virtualization._internal import (file_util, plugin_util, + plugin_validator) import mock import pytest -from dlpx.virtualization._internal.plugin_importer import PluginImporter +import yaml + + +@pytest.fixture +def fake_src_dir(plugin_type): + """ + This fixture gets the path of the fake plugin src files used for testing + """ + return os.path.join(os.path.dirname(__file__), 'fake_plugin', + plugin_type.lower()) + + +def get_plugin_importer(plugin_config_file): + plugin_config_content = None + with open(plugin_config_file, 'rb') as f: + plugin_config_content = yaml.safe_load(f) + + src_dir = file_util.get_src_dir_path(plugin_config_file, + plugin_config_content['srcDir']) + entry_point_module, entry_point_object = plugin_validator.PluginValidator\ + .split_entry_point(plugin_config_content['entryPoint']) + plugin_type = plugin_config_content['pluginType'] + + return PluginImporter(src_dir, entry_point_module, entry_point_object, + plugin_type, True) class TestPluginImporter: @staticmethod @mock.patch('importlib.import_module') def test_get_plugin_manifest(mock_import, src_dir, plugin_type, - plugin_name, plugin_entry_point_name, + entry_point_module, entry_point_object, plugin_module_content, plugin_manifest): mock_import.return_value = plugin_module_content - importer = PluginImporter(src_dir, plugin_name, - plugin_entry_point_name, plugin_type, False) - manifest, warnings = importer.import_plugin() - assert not warnings - assert manifest == plugin_manifest + importer = PluginImporter(src_dir, entry_point_module, + entry_point_object, plugin_type, False) + importer.validate_plugin_module() + + assert importer.result.plugin_manifest == plugin_manifest @staticmethod @mock.patch('importlib.import_module') def test_plugin_module_content_none(mock_import, src_dir, plugin_type, - plugin_name, plugin_entry_point_name): + entry_point_module, + entry_point_object): mock_import.return_value = None - manifest = {} - warnings = defaultdict(list) - - with pytest.raises(exceptions.UserError) as err_info: - importer = PluginImporter(src_dir, plugin_name, - plugin_entry_point_name, plugin_type, - False) - manifest, warnings = importer.import_plugin() - - message = str(err_info) - assert warnings.items() > 0 - assert manifest == {} - assert 'Plugin module content is None.' in message + importer = PluginImporter(src_dir, entry_point_module, + entry_point_object, plugin_type, False) + importer.validate_plugin_module() + result = importer.result + + # + # If module_content is None, importer does not perform any validations + # and just does a return. So result should have an empty manifest and + # assert to make sure it is the case. + # + assert result.plugin_manifest == {} @staticmethod @mock.patch('importlib.import_module') def test_plugin_entry_object_none(mock_import, src_dir, plugin_type, plugin_name, plugin_module_content): mock_import.return_value = plugin_module_content - manifest = {} - warnings = defaultdict(list) + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, None, plugin_type, False) - manifest, warnings = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = str(err_info) - assert warnings.items() > 0 - assert manifest == {} + assert result == () assert 'Plugin entry point object is None.' in message @staticmethod @@ -67,17 +95,16 @@ def test_plugin_entry_point_nonexistent(mock_import, src_dir, plugin_type, plugin_module_content): entry_point_name = "nonexistent entry point" mock_import.return_value = plugin_module_content - manifest = {} - warnings = defaultdict(list) + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, entry_point_name, plugin_type, False) - manifest, warnings = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = err_info.value.message - assert warnings.items() > 0 - assert manifest == {} + assert result == () assert ('\'{}\' is not a symbol in module'.format(entry_point_name) in message) @@ -89,16 +116,139 @@ def test_plugin_object_none(mock_import, src_dir, plugin_type, plugin_name, setattr(plugin_module_content, none_entry_point, None) mock_import.return_value = plugin_module_content - manifest = {} - warnings = defaultdict(list) + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, none_entry_point, plugin_type, False) - manifest, warnings = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = err_info.value.message - assert warnings.items() > 0 - assert manifest == {} + assert result == () assert ('Plugin object retrieved from the entry point {} is' ' None'.format(none_entry_point)) in message + + @staticmethod + @pytest.mark.parametrize('entry_point,plugin_type', + [('successful:staged', 'STAGED'), + ('successful:direct', 'DIRECT')]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_successful_validation(mock_file_util, plugin_config_file, + fake_src_dir): + mock_file_util.return_value = fake_src_dir + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,plugin_type,expected_errors', + [('multiple_warnings:staged', 'STAGED', [ + 'Error: Named argument mismatch in method repository_discovery', + 'Error: Number of arguments do not match in method stop', + 'Error: Named argument mismatch in method stop', + 'Warning: Implementation missing for required method' + ' virtual.mount_specification().', '1 Warning(s). 3 Error(s).' + ]), + ('multiple_warnings:vfiles', 'DIRECT', [ + 'Error: Number of arguments do not match in method status', + 'Error: Named argument mismatch in method status', + 'Warning: Implementation missing for required method' + ' virtual.reconfigure().', '1 Warning(s). 2 Error(s).' + ])]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_multiple_warnings(mock_file_util, plugin_config_file, + fake_src_dir, expected_errors): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + for error in expected_errors: + assert error in message + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,expected_errors', [('upgrade_warnings:direct', [ + 'Error: Named argument mismatch in method snap_upgrade.', + 'Error: Number of arguments do not match in method ls_upgrade.', + 'Error: Named argument mismatch in method ls_upgrade.', + 'Error: Named argument mismatch in method ls_upgrade.', + '0 Warning(s). 4 Error(s).' + ])]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_upgrade_warnings(mock_file_util, plugin_config_file, fake_src_dir, + expected_errors): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + for error in expected_errors: + assert error in message + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,expected_error', + [('op_already_defined:plugin', 'has already been defined'), + ('dec_not_function:plugin', "decorated by 'linked.pre_snapshot()'" + " is not a function"), + ('id_not_string:plugin', "The migration id '['testing', 'out'," + " 'validation']' used in the function" + " 'repo_upgrade' should be a string."), + ('id_bad_format:plugin', "used in the function 'repo_upgrade' does" + " not follow the correct format"), + ('id_used:plugin', "'5.04.000.01' used in the function 'snap_upgrade'" + " has the same canonical form '5.4.0.1' as another migration")]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_wrapper_failures(mock_file_util, plugin_config_file, fake_src_dir, + expected_error): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + assert expected_error in message + assert '0 Warning(s). 1 Error(s).' in message + + @staticmethod + @pytest.mark.parametrize('entry_point', ['arbitrary_error:plugin']) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_sdk_error(mock_file_util, plugin_config_file, fake_src_dir): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.SDKToolingError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + assert ('SDK Error: Got an arbitrary non-platforms error for testing.' + in message) + assert '0 Warning(s). 1 Error(s).' in message + + @staticmethod + @mock.patch('os.path.isabs', return_value=False) + @mock.patch('importlib.import_module') + def test_plugin_info_warn_mode(mock_import, mock_relative_path, + plugin_config_file, src_dir, + plugin_module_content): + plugin_config_content = OrderedDict([ + ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), + ('version', '0.1.0'), ('language', 'PYTHON27'), + ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), + ('manualDiscovery', True), + ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), + ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) + ]) + mock_import.return_value = plugin_module_content + try: + plugin_util.get_plugin_manifest(plugin_config_file, + plugin_config_content, False) + except Exception: + raise AssertionError() diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index 3205f965..52722bd4 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -1,28 +1,13 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json -import os -import uuid -from collections import OrderedDict import mock import pytest -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.plugin_validator import PluginValidator -from dlpx.virtualization._internal.util_classes import ValidationMode - - -@pytest.fixture -def plugin_config_file(tmpdir): - return os.path.join(tmpdir.strpath, 'plugin_config.yml') - - -@pytest.fixture -def src_dir(tmpdir): - tmpdir.mkdir('src') - return os.path.join(tmpdir.strpath, 'src') class TestPluginValidator: @@ -30,34 +15,25 @@ class TestPluginValidator: @pytest.mark.parametrize( 'schema_content', ['{}\nNOT JSON'.format(json.dumps({'random': 'json'}))]) - def test_plugin_bad_schema(plugin_config_file, schema_file): - plugin_config_content = OrderedDict([ - ('name', 'staged'.encode('utf-8')), - ('prettyName', 'StagedPlugin'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) + def test_plugin_bad_schema(plugin_config_file, plugin_config_content, + schema_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator.from_config_content( - plugin_config_file, plugin_config_content, schema_file, - ValidationMode.ERROR) - validator.validate() + plugin_config_file, plugin_config_content, schema_file) + validator.validate_plugin_config() message = err_info.value.message - assert ('Failed to load schemas because {!r} is not a valid json file.' + assert ('Failed to load schemas because {} is not a valid json file.' ' Error: Extra data: line 2 column 1 - line 2 column 9' ' (char 19 - 27)'.format(schema_file)) in message @staticmethod + @pytest.mark.parametrize('plugin_config_file', ['/dir/plugin_config.yml']) def test_plugin_bad_config_file(plugin_config_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert message == ("Unable to read plugin config file '{}'" @@ -65,232 +41,133 @@ def test_plugin_bad_config_file(plugin_config_file): " directory".format(plugin_config_file)) @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - def test_plugin_valid_content(mock_import_plugin, src_dir, - plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @mock.patch('os.path.isabs', return_value=False) + def test_plugin_valid_content(src_dir, plugin_config_file, + plugin_config_content): validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() - - mock_import_plugin.assert_called() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() @staticmethod - def test_plugin_missing_field(plugin_config_file): - plugin_config_content = OrderedDict([ - ('name', 'staged'.encode('utf-8')), ('version', '0.1.0'), - ('language', 'PYTHON27'), ('hostTypes', ['UNIX']), - ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize('src_dir', [None]) + def test_plugin_missing_field(plugin_config_file, plugin_config_content): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - @pytest.mark.parametrize('version, expected', [ - pytest.param('xxx', "'xxx' does not match"), - pytest.param('1.0.0', None), - pytest.param('1.0.0_HF', None) - ]) - def test_plugin_version_format(mock_import_plugin, src_dir, - plugin_config_file, version, expected): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', version), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @mock.patch('os.path.isabs', return_value=False) + @pytest.mark.parametrize('external_version,expected', + [(1, "1 is not of type 'string'"), + (1.0, "1.0 is not of type 'string'"), + ('my_version', None), ('1.0.0', None), + ('1.0.0_HF', None)]) + def test_plugin_version_format(src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() - mock_import_plugin.assert_called() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - @pytest.mark.parametrize('entry_point, expected', [ - pytest.param('staged_plugin', "'staged_plugin' does not match"), - pytest.param(':staged_plugin', "':staged_plugin' does not match"), - pytest.param('staged:', "'staged:' does not match"), - pytest.param('staged_plugin::staged', - "'staged_plugin::staged' does not match"), - pytest.param(':staged_plugin:staged:', - "':staged_plugin:staged:' does not match"), - pytest.param('staged_plugin:staged', None) - ]) - def test_plugin_entry_point(mock_import_plugin, src_dir, - plugin_config_file, entry_point, expected): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '1.0.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', entry_point.encode('utf-8')), ('srcDir', src_dir), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @mock.patch('os.path.isabs', return_value=False) + @pytest.mark.parametrize( + 'entry_point,expected', + [('staged_plugin', "'staged_plugin' does not match"), + (':staged_plugin', "':staged_plugin' does not match"), + ('staged:', "'staged:' does not match"), + ('staged_plugin::staged', "'staged_plugin::staged' does not match"), + (':staged_plugin:staged:', "':staged_plugin:staged:' does not match"), + ('staged_plugin:staged', None)]) + def test_plugin_entry_point(src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() - mock_import_plugin.assert_called() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod - def test_plugin_additional_properties(src_dir, plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '1.0.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('unknown_key', 'unknown_value'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) + def test_plugin_additional_properties(src_dir, plugin_config_file, + plugin_config_content): + # Adding an unknown key + plugin_config_content['unknown_key'] = 'unknown_value' try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message - assert "Additional properties are not allowed " \ - "('unknown_key' was unexpected)" in message + assert ("Additional properties are not allowed" + " ('unknown_key' was unexpected)" in message) @staticmethod - def test_multiple_validation_errors(plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['xxx']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize('host_types', [['xxx']]) + @pytest.mark.parametrize('src_dir', [None]) + def test_multiple_validation_errors(plugin_config_file, + plugin_config_content): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message assert "'xxx' is not one of ['UNIX', 'WINDOWS']" in message @staticmethod - def test_staged_plugin(fake_staged_plugin_config): - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(fake_staged_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() - - message = err_info.value.message - assert validator.result.warnings.items() > 0 - assert 'Named argument mismatch in method' in message - assert 'Number of arguments do not match' in message - assert 'Implementation missing for required method' in message - - @staticmethod - def test_direct_plugin(fake_direct_plugin_config): - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(fake_direct_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() - - message = err_info.value.message - assert validator.result.warnings.items() > 0 - assert 'Named argument mismatch in method' in message - assert 'Number of arguments do not match' in message - assert 'Implementation missing for required method' in message - - @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - @pytest.mark.parametrize('plugin_id , expected', [ - pytest.param('Staged_plugin', "'Staged_plugin' does not match"), - pytest.param('staged_Plugin', "'staged_Plugin' does not match"), - pytest.param('STAGED', "'STAGED' does not match"), - pytest.param('E3b69c61-4c30-44f7-92c0-504c8388b91e', None), - pytest.param('e3b69c61-4c30-44f7-92c0-504c8388b91e', None) - ]) + @mock.patch('os.path.isabs', return_value=False) + @pytest.mark.parametrize( + 'plugin_id , expected', + [('Staged_plugin', "'Staged_plugin' does not match"), + ('staged_Plugin', "'staged_Plugin' does not match"), + ('STAGED', "'STAGED' does not match"), + ('E3b69c61-4c30-44f7-92c0-504c8388b91e', None), + ('e3b69c61-4c30-44f7-92c0-504c8388b91e', None)]) def test_plugin_id(mock_import_plugin, src_dir, plugin_config_file, - plugin_id, expected): - plugin_config_content = OrderedDict([ - ('id', plugin_id.encode('utf-8')), ('name', 'python_vfiles'), - ('version', '1.0.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() - mock_import_plugin.assert_called() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod - @pytest.mark.parametrize('validation_mode', - [ValidationMode.INFO, ValidationMode.WARNING]) - def test_plugin_info_warn_mode(plugin_config_file, validation_mode): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - err_info = None + @mock.patch('os.path.isabs', return_value=False) + @pytest.mark.parametrize('build_number, expected', + [('xxx', "'xxx' does not match"), ('1', None), + ('1.x', "'1.x' does not match"), ('1.100', None), + ('0.1.2', None), ('02.5000', None), + (None, "'buildNumber' is a required property"), + ('1.0.0_HF', "'1.0.0_HF' does not match"), + ('0.0.0', "'0.0.0' does not match"), + ('0', "'0' does not match"), + ('0.0.00', "'0.0.00' does not match"), + ('0.1', None)]) + def test_plugin_build_number_format(src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, validation_mode) - validator.validate() - except Exception as e: - err_info = e - - assert err_info is None + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() + except exceptions.SchemaValidationError as err_info: + message = err_info.message + assert expected in message diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py index 6e2191a5..2b064b57 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py @@ -1,14 +1,13 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json import os import pytest -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions, plugin_util from dlpx.virtualization._internal.schema_validator import SchemaValidator -from dlpx.virtualization._internal.util_classes import ValidationMode class TestSchemaValidator: @@ -18,7 +17,7 @@ def test_bad_meta_schema(schema_file, tmpdir, schema_filename): f = tmpdir.join(schema_filename) f.write(meta_schema) with pytest.raises(exceptions.UserError) as err_info: - validator = SchemaValidator(schema_file, f, ValidationMode.ERROR) + validator = SchemaValidator(schema_file, f) validator.validate() message = err_info.value.message @@ -30,9 +29,7 @@ def test_bad_meta_schema(schema_file, tmpdir, schema_filename): def test_bad_schema_file(schema_file): os.remove(schema_file) with pytest.raises(exceptions.UserError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -42,8 +39,7 @@ def test_bad_schema_file(schema_file): @staticmethod def test_valid_schema(schema_file): - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -68,8 +64,7 @@ def test_missing_root_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -91,9 +86,7 @@ def test_missing_root_type(schema_file): }]) def test_bad_root_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -118,9 +111,7 @@ def test_bad_root_type_num(schema_file): }]) def test_bad_root_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -144,9 +135,7 @@ def test_bad_root_type(schema_file): }]) def test_missing_identity_fields(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -170,9 +159,7 @@ def test_missing_identity_fields(schema_file): }]) def test_missing_name_field(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -199,8 +186,7 @@ def test_missing_sub_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -222,9 +208,7 @@ def test_missing_sub_type(schema_file): }]) def test_bad_sub_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -249,9 +233,7 @@ def test_bad_sub_type(schema_file): }]) def test_bad_sub_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -278,9 +260,7 @@ def test_missing_required_field(schema_file): # pytest.skip("required fields validation is not working yet") with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -304,9 +284,7 @@ def test_missing_required_field(schema_file): }]) def test_multiple_validation_errors(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -314,8 +292,6 @@ def test_multiple_validation_errors(schema_file): assert "'identityFields' is a required property" in message @staticmethod - @pytest.mark.parametrize('validation_mode', - [ValidationMode.INFO, ValidationMode.WARNING]) @pytest.mark.parametrize('source_config_definition', [{ 'type': 'object', @@ -332,14 +308,51 @@ def test_multiple_validation_errors(schema_file): 'nameField': 'name', 'identityFields': ['name'] }]) - def test_bad_sub_type_info_warn_mode(schema_file, validation_mode): + def test_bad_sub_type_info_warn_mode(schema_file): err_info = None try: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - validation_mode) - validator.validate() + plugin_util.validate_schema_file(schema_file, False) except Exception as e: err_info = e assert err_info is None + + @staticmethod + @pytest.mark.parametrize( + 'source_config_definition', [{ + 'type': 'object', + 'required': ['name', 'path'], + 'additionalProperties': False, + 'properties': { + 'name': { + 'type': 'string' + }, + 'path': { + 'type': 'string' + }, + "repoArray": { + "type": "array", + "items": { + "type": "object", + "prettyName": "Repo Array Type", + "properties": { + "repoArrayStringValue": { + "type": "strings", + "prettyName": "Repo Array String Value", + "description": "A string value." + } + } + } + } + }, + 'nameField': 'name', + 'identityFields': ['name'] + }]) + def test_bad_type_in_array(schema_file): + with pytest.raises(exceptions.SchemaValidationError) as err_info: + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) + validator.validate() + + message = err_info.value.message + assert ( + "'strings' is not valid under any of the given schemas" in message)