diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e9169d26..bbd85c54 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 2.0.0 +current_version = 2.1.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\.(?P[a-z]+)(?P\d+))? diff --git a/.dependabot/config.yaml b/.dependabot/config.yaml new file mode 100644 index 00000000..2502bd72 --- /dev/null +++ b/.dependabot/config.yaml @@ -0,0 +1,23 @@ +version: 1 +update_configs: + # Keep requirements.txt files up-to-date in each package. + - package_manager: "python" + directory: "/dvp" + update_schedule: "daily" + target_branch: "develop" + - package_manager: "python" + directory: "/common" + update_schedule: "daily" + target_branch: "develop" + - package_manager: "python" + directory: "/platform" + update_schedule: "daily" + target_branch: "develop" + - package_manager: "python" + directory: "/libs" + update_schedule: "daily" + target_branch: "develop" + - package_manager: "python" + directory: "/tools" + update_schedule: "daily" + target_branch: "develop" diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index a7625c63..ab5d1efe 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -10,7 +10,7 @@ jobs: max-parallel: 4 matrix: python-version: [2.7] - os: [ubuntu-latest, macos-latest] + os: [ubuntu-latest, macos-latest, windows-latest] package: [common, libs, platform, tools] steps: @@ -67,7 +67,7 @@ jobs: - name: Run flake8 on test directory working-directory: ${{ matrix.package }} - run: python -m flake8 test/main/python --max-line-length 88 + run: python -m flake8 src/test/python --max-line-length 88 #format: #name: Check format ${{ matrix.package}} diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml new file mode 100644 index 00000000..d9d09d48 --- /dev/null +++ b/.github/workflows/publish-docs.yml @@ -0,0 +1,77 @@ +name: Publish docs to GitHub pages + +on: + create: + branches: + - 'docs/**' + push: + branches: + - 'docs/**' + paths: + - 'docs/**' + +jobs: + publish: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.7] + package: [docs] + + steps: + - uses: actions/checkout@v2 + with: + # Only a single commit is fetched by default, for the ref/SHA that triggered the workflow. + # Set fetch-depth: 0 to fetch all history for all branches and tags. + fetch-depth: 0 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Set the current branch environment variable + working-directory: ${{ matrix.package }} + run: | + CURRENT_BRANCH_VAR=${GITHUB_REF#refs/heads/} + echo "Current branch: $CURRENT_BRANCH_VAR" + # Set CURRENT_BRANCH environment variable to the current branch name. + echo "::set-env name=CURRENT_BRANCH::$(echo $CURRENT_BRANCH_VAR)" + - name: Display all remote branches + working-directory: ${{ matrix.package }} + run: | + # Display all remote branches + git branch -r + - name: Set the latest docs branch environment variable + working-directory: ${{ matrix.package }} + run: | + # Get only docs branches, extract the "docs/x.y.z" part, sort them in descending order, and get the first one. + LATEST_DOCS_BRANCH_VAR=$(git branch -r | grep -e ".*\/*docs\/[0-9].[0-9].[0-9]" | sed -n "s/.*\/*\(docs\/[0-9].[0-9].[0-9]\).*/\1/p" | sort -r | head -n 1) + echo "Latest docs branch: $LATEST_DOCS_BRANCH_VAR" + # Set the LATEST_DOCS_BRANCH environment variable. + echo "::set-env name=LATEST_DOCS_BRANCH::$(echo $LATEST_DOCS_BRANCH_VAR)" + - name: Check that the current branch is the latest docs branch, fail otherwise + working-directory: ${{ matrix.package }} + run: | + if [ $CURRENT_BRANCH != $LATEST_DOCS_BRANCH ]; then + echo "The action is running on branch $CURRENT_BRANCH which is not the latest docs branch ($LATEST_DOCS_BRANCH)." + exit 1 + else + echo "The action is running on the latest docs branch." + fi + - name: Install Pipenv + uses: dschep/install-pipenv-action@v1 + - name: Install dependencies for building documenation + working-directory: ${{ matrix.package }} + run: | + pipenv install + - name: Build documentation + working-directory: ${{ matrix.package }} + run: | + pipenv run mkdocs build --clean + - name: Deploy the contents of docs/site to gh-pages 🚀 + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: docs/site + commit_message: Deploy to gh-pages 🚀 + user_name: "github-actions[bot]" + user_email: "github-actions[bot]@users.noreply.github.com" diff --git a/.github/workflows/publish-python-packages.yml b/.github/workflows/publish-python-packages.yml new file mode 100644 index 00000000..d24324c3 --- /dev/null +++ b/.github/workflows/publish-python-packages.yml @@ -0,0 +1,42 @@ +name: Publish Python packages to Test PyPi + +on: + # Run on push when the version file has changed on selected branches. + push: + branches: + - master + - develop + - 'release/**' + paths: + - 'dvp/src/main/python/dlpx/virtualization/VERSION' + +jobs: + publish: + runs-on: ubuntu-latest + strategy: + max-parallel: 5 + matrix: + python-version: [2.7] + package: [common, dvp, libs, platform, tools] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + # Install dependencies necessary for building and publishing the package. + - name: Install dependencies + run: | + pip install setuptools wheel twine + # Build each Python package and publish it to Test PyPi. + - name: Build and publish ${{ matrix.package }} package + working-directory: ${{ matrix.package }} + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.VSDK_PYPI_TOKEN }} + TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ + run: | + python setup.py sdist bdist_wheel + twine upload dist/* + diff --git a/common/.python-version b/common/.python-version new file mode 100644 index 00000000..43c4dbe6 --- /dev/null +++ b/common/.python-version @@ -0,0 +1 @@ +2.7.17 diff --git a/common/requirements.txt b/common/requirements.txt index 28ecde71..33673b1b 100644 --- a/common/requirements.txt +++ b/common/requirements.txt @@ -8,8 +8,7 @@ pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.0 py==1.8.0 pyparsing==2.4.5 -pytest==4.6.6 +pytest==4.6.11 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.7 zipp==0.6.0 diff --git a/common/setup.py b/common/setup.py index 35d574e2..64e5643c 100644 --- a/common/setup.py +++ b/common/setup.py @@ -4,7 +4,7 @@ PYTHON_SRC = 'src/main/python' install_requires = [ - "dvp-api == 1.1.0", + "dvp-api == 1.3.0", ] with open(os.path.join(PYTHON_SRC, 'dlpx/virtualization/common/VERSION')) as version_file: diff --git a/common/src/main/python/dlpx/virtualization/common/VERSION b/common/src/main/python/dlpx/virtualization/common/VERSION index 359a5b95..50aea0e7 100644 --- a/common/src/main/python/dlpx/virtualization/common/VERSION +++ b/common/src/main/python/dlpx/virtualization/common/VERSION @@ -1 +1 @@ -2.0.0 \ No newline at end of file +2.1.0 \ No newline at end of file diff --git a/docs/Pipfile b/docs/Pipfile index aa02c1f8..dc5d7ae0 100644 --- a/docs/Pipfile +++ b/docs/Pipfile @@ -4,9 +4,12 @@ verify_ssl = true name = "pypi" [packages] -mkdocs = "*" -mkdocs-material = "*" -markdown-include = "*" -mkdocs-awesome-pages-plugin = "*" +mkdocs = "==0.17.5" +mkdocs-material = "==2.9.2" +markdown-include = "==0.5.1" +mkdocs-awesome-pages-plugin = "==1.2.0" + +[requires] +python_version = "3.7" [dev-packages] diff --git a/docs/Pipfile.lock b/docs/Pipfile.lock index 6b9f6840..d43add76 100644 --- a/docs/Pipfile.lock +++ b/docs/Pipfile.lock @@ -1,10 +1,12 @@ { "_meta": { "hash": { - "sha256": "6766a756dba28084af761664985b55708ca941615fe77db3cafce3ce8c65135d" + "sha256": "30e56bad0351cec79ac4bd9b489c99a5c10653529c78a75b52ecebbf22cf5b7c" }, "pipfile-spec": 6, - "requires": {}, + "requires": { + "python_version": "3.7" + }, "sources": [ { "name": "pypi", @@ -16,32 +18,38 @@ "default": { "click": { "hashes": [ - "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d", - "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b" + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "version": "==7.1.2" + }, + "importlib-metadata": { + "hashes": [ + "sha256:0505dd08068cfec00f53a74a0ad927676d7757da81b7436a6eefe4c7cf75c545", + "sha256:15ec6c0fd909e893e3a08b3a7c76ecb149122fb14b7efe1199ddd4c7c57ea958" ], - "version": "==6.7" + "markers": "python_version < '3.8'", + "version": "==1.6.1" }, "jinja2": { "hashes": [ - "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", - "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" + "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", + "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" ], - "version": "==2.10" + "version": "==2.11.2" }, "livereload": { "hashes": [ - "sha256:583179dc8d49b040a9da79bd33de59e160d2a8802b939e304eb359a4419f6498", - "sha256:dd4469a8f5a6833576e9f5433f1439c306de15dbbfeceabd32479b1123380fa5" + "sha256:d1eddcb5c5eb8d2ca1fa1f750e580da624c0f7fcb734aa5780dc81b7dcbd89be" ], - "markers": "python_version != '3.0.*' and python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.3.*'", - "version": "==2.5.2" + "version": "==2.6.2" }, "markdown": { "hashes": [ - "sha256:9ba587db9daee7ec761cfc656272be6aabe2ed300fece21208e4aab2e457bc8f", - "sha256:a856869c7ff079ad84a3e19cd87a64998350c2b94e9e08e44270faef33400f81" + "sha256:1fafe3f1ecabfb514a5285fca634a53c1b32a81cb0feb154264d55bf2ff22c17", + "sha256:c467cd6233885534bf0fe96e62e3cf46cfc1605112356c4f9981512b8174de59" ], - "version": "==2.6.11" + "version": "==3.2.2" }, "markdown-include": { "hashes": [ @@ -52,9 +60,41 @@ }, "markupsafe": { "hashes": [ - "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" + "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", + "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", + "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", + "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", + "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", + "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", + "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", + "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", + "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", + "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", + "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", + "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", + "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", + "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", + "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", + "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", + "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", + "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", + "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", + "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", + "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", + "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", + "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", + "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], - "version": "==1.0" + "version": "==1.1.1" }, "mkdocs": { "hashes": [ @@ -82,40 +122,40 @@ }, "pygments": { "hashes": [ - "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d", - "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc" + "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44", + "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324" ], - "version": "==2.2.0" + "version": "==2.6.1" }, "pymdown-extensions": { "hashes": [ - "sha256:20f2ae1067ab850cab92fcf57487267a7fd1365a7b1e7c5394e1e0778455eec1", - "sha256:7d3fcbb4c5d70a78d1f4c2c7eef02dbe7e1ba08b06cb72e08b3d1027eb77458b" + "sha256:5bf93d1ccd8281948cd7c559eb363e59b179b5373478e8a7195cf4b78e3c11b6", + "sha256:8f415b21ee86d80bb2c3676f4478b274d0a8ccb13af672a4c86b9ffd22bd005c" ], - "version": "==4.12" + "version": "==7.1" }, "pyyaml": { "hashes": [ - "sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b", - "sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf", - "sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a", - "sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3", - "sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1", - "sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1", - "sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613", - "sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04", - "sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f", - "sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537", - "sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531" + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", + "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" ], - "version": "==3.13" + "version": "==5.3.1" }, "six": { "hashes": [ - "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", - "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" ], - "version": "==1.11.0" + "version": "==1.15.0" }, "tornado": { "hashes": [ @@ -126,6 +166,13 @@ "sha256:b36298e9f63f18cad97378db2222c0e0ca6a55f6304e605515e05a25483ed51a" ], "version": "==4.5.3" + }, + "zipp": { + "hashes": [ + "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", + "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" + ], + "version": "==3.1.0" } }, "develop": {} diff --git a/docs/docs/Best_Practices/Sensitive_Data.md b/docs/docs/Best_Practices/Sensitive_Data.md index 8665649f..166d15fe 100644 --- a/docs/docs/Best_Practices/Sensitive_Data.md +++ b/docs/docs/Best_Practices/Sensitive_Data.md @@ -34,6 +34,9 @@ This tells the Delphix Engine to take special precautions with this password pro 3. The Delphix Engine's UI and CLI will not display the password. 4. Clients of the Delphix Engine's public API will not be able to access the password. +!!! note + Removing a previously added password property from a field and running a [Data Migration](/References/Glossary.md#data-migration) will expose the password in plaintext. If this is intentional, write a migration to ensure that the new property conforms to the new schema. + # Using Environment Variables For Remote Data Passing Sometimes, a plugin will need to pass sensitive data to a remote environment. For example, perhaps a database command needs to be run on a [staging environment](/References/Glossary.md#staging-environment), and that database command will need to use a password. diff --git a/docs/docs/Building_Your_First_Plugin/Discovery.md b/docs/docs/Building_Your_First_Plugin/Discovery.md index 67eb9e2a..dcff0993 100644 --- a/docs/docs/Building_Your_First_Plugin/Discovery.md +++ b/docs/docs/Building_Your_First_Plugin/Discovery.md @@ -164,7 +164,7 @@ The Delphix Engine will pass us information about the source environment in an a return [RepositoryDefinition(name='1e87dc30-3cdb-4f0a-9634-07ce017d20d1')] ``` -This creates and returns a Python object that corresponds to the format defined by our repository schema. Because out repository has exactly one string property called `name`, therefore this Python object has one property called `name`. +This creates and returns a Python object that corresponds to the format defined by our repository schema. Because our repository has exactly one string property called `name`, therefore this Python object has one property called `name`. Notice that the code generator has filled in the value of `name` with a random string. This results in a plugin operation that works, but which will not be very helpful for the user. We'll change this later. diff --git a/docs/docs/Building_Your_First_Plugin/Initial_Setup.md b/docs/docs/Building_Your_First_Plugin/Initial_Setup.md index 5012daa4..d2b0424e 100644 --- a/docs/docs/Building_Your_First_Plugin/Initial_Setup.md +++ b/docs/docs/Building_Your_First_Plugin/Initial_Setup.md @@ -4,7 +4,7 @@ Before we begin to start writing plugin code, we will need to do some setup work The quoted examples in this section assume you're working on a Unix-like system. -## Sanity check +## Quick Check First a reminder that it's highly recommended that you develop your plugin in a [virtual environment](https://virtualenv.pypa.io/en/latest/). @@ -53,14 +53,14 @@ To start, we will create a new directory where our new plugin code will live. Now that we are in our new plugin directory, we can use the `dvp` tool to create a plugin for us. This plugin will be a mere skeleton -- it will not do anything useful until we modify it in the subsequent pages. ``` -(venv) first_plugin$ dvp init -n first_plugin -s STAGED -p WINDOWS +(venv) first_plugin$ dvp init -n first_plugin -s STAGED -t WINDOWS ``` The `-n` argument here means "plugin name." We are using the name `first_plugin`. The `-s` argument tells which syncing strategy we want to use. -The `-p` argument tells which host platform our plugin supports. +The `-t` argument tells which host platform our plugin supports. You can type `dvp init -h` for more information about the options available. diff --git a/docs/docs/Getting_Started.md b/docs/docs/Getting_Started.md index 0879a0e8..83d08838 100644 --- a/docs/docs/Getting_Started.md +++ b/docs/docs/Getting_Started.md @@ -14,7 +14,7 @@ The platform and libs modules expose objects and methods needed to develop a plu - macOS 10.14+, Ubuntu 16.04+, or Windows 10 - Python 2.7 (Python 3 is not supported) - Java 7+ -- Delphix Engine 6.0.2.0 or above +- Delphix Engine 6.0.3.0 or above ## Installation To install the latest version of the SDK run: @@ -67,4 +67,4 @@ You can also use a [CLI Configuration File](/Best_Practices/CLI_Configuration_Fi ## Questions? -If you have questions, bugs or feature requests reach out to us via the [Virtualization SDK GitHub repository](https://github.com/delphix/virtualization-sdk/). \ No newline at end of file +If you have questions, bugs or feature requests reach out to us via the [Virtualization SDK GitHub repository](https://github.com/delphix/virtualization-sdk/). diff --git a/docs/docs/References/Glossary.md b/docs/docs/References/Glossary.md index 56e6e0d2..a1038684 100644 --- a/docs/docs/References/Glossary.md +++ b/docs/docs/References/Glossary.md @@ -104,6 +104,9 @@ The process by which the Delphix Engine ingests data from a dataset on a [source ## Target Environment An [environment](#environment) on which Delphix-provided virtualized datasets can be used. +## Lua Toolkit +Legacy model for writing "plugins" in Lua, with limited documentation and support for writing, building and uploading toolkits. This was the predecessor to the Virtualization SDK. + ## Upgrade Operation A special plugin operation that takes data produced by an older version of a plugin, and transforms it into the format expected by the new version of the plugin. diff --git a/docs/docs/Release_Notes/.pages b/docs/docs/Release_Notes/.pages index 8396df7a..d160c92f 100644 --- a/docs/docs/Release_Notes/.pages +++ b/docs/docs/Release_Notes/.pages @@ -1,4 +1,5 @@ arrange: + - 2.1.0 - 2.0.0 - 1.0.0 - 0.4.0 diff --git a/docs/docs/Release_Notes/1.0.0/1.0.0.md b/docs/docs/Release_Notes/1.0.0/1.0.0.md index 73769b82..522342f6 100644 --- a/docs/docs/Release_Notes/1.0.0/1.0.0.md +++ b/docs/docs/Release_Notes/1.0.0/1.0.0.md @@ -1,4 +1,4 @@ -# Release - GA (v1.0.0) +# Release - v1.0.0 To install or upgrade the SDK, refer to instructions [here](/Getting_Started.md#installation). diff --git a/docs/docs/Release_Notes/2.0.0/2.0.0.md b/docs/docs/Release_Notes/2.0.0/2.0.0.md index 68e80920..4269462c 100644 --- a/docs/docs/Release_Notes/2.0.0/2.0.0.md +++ b/docs/docs/Release_Notes/2.0.0/2.0.0.md @@ -1,10 +1,10 @@ -# Release - GA (v2.0.0) +# Release - v2.0.0 To install or upgrade the SDK, refer to instructions [here](/Getting_Started.md#installation). ## New & Improved -* Added the ability for plugins to upgrade across plugin versions with schema changes. Some hightlights: +* Added the ability for plugins to upgrade across plugin versions with schema changes. Some highlights: * Schema updates using data migrations. * Flexiblity for plugins to pick any release strategy. * Plugin upgrades supported across multiple plugin versions. diff --git a/docs/docs/Release_Notes/2.1.0/2.1.0.md b/docs/docs/Release_Notes/2.1.0/2.1.0.md new file mode 100644 index 00000000..d3e05867 --- /dev/null +++ b/docs/docs/Release_Notes/2.1.0/2.1.0.md @@ -0,0 +1,15 @@ +# Release - v2.1.0 + +To install or upgrade the SDK, refer to instructions [here](/Getting_Started.md#installation). + +## New & Improved + +* Added the ability to migrate existing [Lua toolkits](/References/Glossary.md#lua-toolkit) to SDK plugins. + * [Convert](/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Converting_Migration_Scripts.md) any existing Lua upgrade scripts to Python migrations by using new Lua upgrade [decorators](/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Decorators.md). + * Added new optional fields `luaName` and `minimumLuaVersion` to the [Plugin Config](/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Plugin_Config.md). These properties allow a plugin to specify which Lua toolkit(s) the plugin can migrate. + + More details about Lua toolkit migration can be found [here](/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Overview.md). + +## Breaking Changes + +No breaking changes in this release! \ No newline at end of file diff --git a/docs/docs/Versioning_And_Upgrade/.pages b/docs/docs/Versioning_And_Upgrade/.pages index c8648003..335bc1b3 100644 --- a/docs/docs/Versioning_And_Upgrade/.pages +++ b/docs/docs/Versioning_And_Upgrade/.pages @@ -5,3 +5,4 @@ arrange: - Compatibility.md - Backports_And_Hotfixes.md - Replication.md + - Lua_Toolkit_To_SDK_Plugin_Migration diff --git a/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/.pages b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/.pages new file mode 100644 index 00000000..9306b009 --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/.pages @@ -0,0 +1,6 @@ +arrange: + - Overview.md + - Plugin_Config.md + - Decorators.md + - Plugin_Operations.md + - Converting_Migration_Scripts.md diff --git a/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Converting_Migration_Scripts.md b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Converting_Migration_Scripts.md new file mode 100644 index 00000000..03c56fea --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Converting_Migration_Scripts.md @@ -0,0 +1,56 @@ +# Converting Lua Upgrade Scripts to Python Data Migrations +To convert migrations (a.k.a. "upgrade scripts") that were originally written in Lua, we need to get the version that the migration upgrades from, the object type the migration is written for, and lastly convert the code into Python code using the [decorators](Decorators.md) described previously. + +## Example +Assume there are two versions of a lua toolkit, `1.0.0` and `1.1.0` where the `1.1.0` version is following the basic toolkit directory structure (actually containing all operations): + +``` +├── main.json +├── discovery +│ ├── repositoryDiscovery.lua +│ └── sourceConfigDiscovery.lua +├── staged +│ ├── mountSpec.lua +│ ├── ... +│ └── worker.lua +├── virtual +│ ├── configure.lua +│ ├── ... +│ └── unconfigure.lua +├── upgrade +│ └── 1.0 +│ ├── upgradeLinkedSource.lua +│ ├── ... +│ └── upgradeVirtualSource.lua +├── resources +└── ├── log.sh + ├── ... + └── stop.sh +``` + +`upgradeLinkedSource.lua` contains: + +```lua +parameters.dsOldValue = "remove" +parameters.dsUpdateValue = 1 +parameters.dsLanguage = "LUA" +return parameters +``` + +This can be equalivalently converted into the python code: + +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.linked_source("1.0", MigrationType.LUA) +def upgrade_linked_source(old_linked_source): + new_linked_source = dict(old_linked_source) + new_linked_source["dsOldValue"] = "remove" + new_linked_source["dsUpdateValue"] = 1 + new_linked_source["dsLanguage"] = "LUA" + return new_linked_source +``` + +You will need to determine how far back in the Lua upgrade chain you want to support multi-step upgrade from, and convert all of those upgrade scripts accordingly. Remember that the execution of these scripts relies on there not being any missing migrations from the `minimumLuaVersion` defined in the plugin config to the last toolkit version written. Lua migrations will be executed from the lowest to highest version that exists. When executing, these migrations are run to the highest Lua toolkit version only. Any migrations needed to get from that toolkit to the Python plugin would need to be written as a regular Python migration. diff --git a/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Decorators.md b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Decorators.md new file mode 100644 index 00000000..f5eb8d1d --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Decorators.md @@ -0,0 +1,14 @@ +# Decorators + +The Virtualization SDK exposes [decorators](/References/Decorators.md) as mentioned in the regular documentation. Below we list the additional operations added to suport Lua to Python migrations. This assumes `Plugin()` is instantiated as `plugin`: + +Plugin Operation | Decorator +---------------- | -------- +[Lua Repository Data Migration](Plugin_Operations.md#lua-repository-data-migration) | `@plugin.upgrade.repository(lua_version, MigrationType.LUA)` +[Lua Source Config Data Migration](Plugin_Operations.md#lua-source-config-data-migration) | `@plugin.upgrade.source_config(lua_version, MigrationType.LUA)` +[Lua Linked Source Data Migration](Plugin_Operations.md#lua-linked-source-data-migration) | `@plugin.upgrade.linked_source(lua_version, MigrationType.LUA)` +[Lua Virtual Source Data Migration](Plugin_Operations.md#lua-virtual-source-data-migration) | `@plugin.upgrade.virtual_source(lua_version, MigrationType.LUA)` +[Lua Snapshot Data Migration](Plugin_Operations.md#lua-snapshot-data-migration) | `@plugin.upgrade.snapshot(lua_version, MigrationType.LUA)` + +!!! info "lua_version format" + The `lua_version` field in this decorator should be the (major,minor) version of the Lua toolkit. This means if the version is set to `1.1.HOTFIX123` in the `main.json` file for the Lua toolkit, the `lua_version` passed into this decorator should be `1.1`. diff --git a/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Overview.md b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Overview.md new file mode 100644 index 00000000..92293400 --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Overview.md @@ -0,0 +1,20 @@ +# Overview + +Before the Virtualization SDK was written, Delphix only supported [toolkits](/References/Glossary.md#lua-toolkit) written in Lua. There was only limited documentation to help people write, build, and upload toolkits. Multiple toolkits were released and are still used by customers today, so as we move towards only supporting SDK Plugins, there needs to be a way to get customers off of Lua toolkits and onto SDK plugins. + +If you are reading this and have no idea what a Lua toolkit is, there is no reason to read further into this section. Everything written in these pages will assume the goal is to write specific code as part of a plugin to convert objects created using Lua toolkits to use the newly uploaded Python plugin. + +In the next few pages, we also make the assumption that you've written both a Lua toolkit and a Python plugin before and know some of the terminology already established. If this is not true, please try [building a plugin](/Building_Your_First_Plugin/Overview.md) and [writing some upgrade migrations](/Versioning_And_Upgrade/Upgrade.md) first before coming back here to learn how to add upgrading from Lua toolkits into the mix as described below. + +## Basic no-schema Migration +One way to migrate from a Lua toolkit to a plugin is to write an exactly equivalent plugin that does not make any [schema](/References/Schemas.md) changes to the objects that were defined originally in the Lua toolkit. If this is the scenario you are in, then you only need to update the [plugin config](Plugin_Config.md) with a couple of new Lua migration specific fields. + + +## Migration with schema changes +The other way to migrate from a Lua toolkit to a plugin is to wait and write a python plugin only once you have new features you want to release. These new features may include schema changes to any of the objects. In this case you will need to update both the [plugin config](Plugin_Config.md) and write new [Lua upgrade operations](Plugin_Operations.md) for each of the objects that needs to be modified during the upgrade. + +## Supporting migrations with older versions of Lua +Having the ability to define Lua upgrade operations in the new plugin code means that older Lua version migration scripts can be [converted](Converting_Migration_Scripts.md), enabling multi-step upgrades from older Lua versions to migrate and become plugins. + +!!! warning "New versions of a Lua toolkit is strongly discouraged after Python Plugin is written" + After having written a Plugin to migrate a specific Lua toolkit, while possible, you should avoid writing new major/minor versions of the toolkit in Lua. Patch releases with no schema changes can still be done. If you need to write a new Lua toolkit version please contact the Delphix Virtualization SDK Engineering team to get help on updating migrations accordingly. diff --git a/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Plugin_Config.md b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Plugin_Config.md new file mode 100644 index 00000000..dec70fef --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Plugin_Config.md @@ -0,0 +1,145 @@ +# Plugin Config +For all regular fields in a plugin config go [here](/References/Plugin_Config.md). The following fields described are the ones needed to migrate Lua toolkits to Python plugins. + +## Fields + +|Field Name|Required|Type|Description| +|----------|:------:|:--:|-----------| +|luaName|N|string|The name of the Lua toolkit this plugin should upgrade from. This field is required if the minimumLuaVersion is defined.| +|minimumLuaVersion|N|string|The lowest major minor version of the Lua toolkit that upgrade is supported from. This field is required if the luaName is defined.| + +## Example +Assume a lua toolkit with the following `main.json` file: + +```json +{ + "type": "Toolkit", + "name": "delphixdb", + "prettyName": "DelphixDB", + "version": "1.0.0", + "defaultLocale": "en-us", + "hostTypes": ["UNIX"], + "discoveryDefinition": { + "type": "ToolkitDiscoveryDefinition", + "repositorySchema": { + "type": "object", + "properties": { + "installPath": { + "type": "string", + "prettyName": "Delphix DB Binary Installation Path", + "description": "The path to the Delphix DB installation binaries" + }, + "version": { + "type": "string", + "prettyName": "Version", + "description": "The version of the Delphix DB binaries" + } + } + }, + "repositoryIdentityFields": ["installPath"], + "repositoryNameField": "installPath", + "sourceConfigSchema": { + "type": "object", + "properties": { + "dataPath": { + "type": "string", + "prettyName": "Data Path", + "description": "The path to the Delphix DB instance's data" + }, + "port": { + "type": "integer", + "prettyName": "Port", + "description": "The port of the Delphix DB" + }, + "dbName": { + "type": "string", + "prettyName": "Delphix DB Name", + "description": "The name of the Delphix DB instance." + } + } + }, + "sourceConfigIdentityFields": ["dataPath"], + "sourceConfigNameField": "dataPath" + }, + "linkedSourceDefinition": { + "type": "ToolkitLinkedStagedSource", + "parameters": { + "type": "object", + "additionalProperties": false, + "properties": { + "primaryDbName": { + "type": "string", + "prettyName": "Primary DB Name", + "description": "The name of the primary database to link.", + "default": "primaryDB" + }, + "stagingDbName": { + "type": "string", + "prettyName": "Staging DB Name", + "description": "The name of the staging database to create." + }, + "stagingPort": { + "type": "integer", + "prettyName": "Staging Port", + "description": "The port of the staging database to create.", + "default": 1234 + } + } + } + }, + "virtualSourceDefinition": { + "type": "ToolkitVirtualSource", + "parameters": { + "type": "object", + "additionalProperties": false, + "properties": { + "port": { + "type": "integer", + "prettyName": "Port", + "description": "Port that provisioned database should use.", + "default": 1234 + }, + "dbName": { + "type": "string", + "prettyName": "Database Name", + "description": "Name to use for newly provisioned database.", + "default": "vdb" + } + } + } + }, + "snapshotSchema": { + "type": "object", + "properties": { + "snapshotID": { + "type": "string", + "prettyName": "Snapshot ID", + "description": "A unique ID for this snapshot" + } + } + } +} +``` + +Here is a valid plugin config for a plugin that wants to be upgradable from the toolkit: + +```yaml +id: ea009cb4-f76b-46dc-bbb6-689e7acecce4 +name: DelphixDB +luaName: delphixdb +minimumLuaVersion: "1.0" +language: PYTHON27 +hostTypes: +- UNIX +pluginType: STAGED +entryPoint: plugin_runner:plugin +srcDir: src +schemaFile: schema.json +buildNumber: 2.0.0 +``` + +!!! info "`id` and `luaName` fields in plugins versus `name` field in toolkits" + * The `luaName` will be used to determine if an already uploaded Lua toolkit is considered a lower version of the Pyhon plugin being uploaded. + * If the `luaName` is not set then no Lua toolkit will be upgraded. + * If the `id` of the plugin being uploaded happens to match the `name` in the Lua toolkit already installed on the Delphix Engine, the upload will fail regardless of what the `luaName` is. + * When uploading a plugin with the `luaName` set, that `luaName` and `id` pair will be the only pair uploaded successfully. Uploading a new plugin with the same `luaName` but different `id` will fail. diff --git a/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Plugin_Operations.md b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Plugin_Operations.md new file mode 100644 index 00000000..3d728f68 --- /dev/null +++ b/docs/docs/Versioning_And_Upgrade/Lua_Toolkit_To_SDK_Plugin_Migration/Plugin_Operations.md @@ -0,0 +1,286 @@ +# Plugin Operations + +## Summary +Plugin operations related to Lua migrations are listed below. Information regarding other operations is [here](/References/Plugin_Operations.md). + + +Plugin Operation | **Required** | Decorator | Delphix Engine Operations +---------------- | -------- | --------- | ------------------------- +[Lua Repository Data Migration](#lua-repository-data-migration) | **No** | `upgrade.repository(lua_version, MigrationType.LUA)` | [Upgrade](/References/Workflows.md#upgrade) +[Lua Source Config Data Migration](#lua-source-config-data-migration) | **No** | `upgrade.source_config(lua_version, MigrationType.LUA)` | [Upgrade](/References/Workflows.md#upgrade) +[Lua Linked Source Data Migration](#lua-linked-source-data-migration) | **No** | `upgrade.linked_source(lua_version, MigrationType.LUA)` | [Upgrade](/References/Workflows.md#upgrade) +[Lua Virtual Source Data Migration](#lua-virtual-source-data-migration) | **No** | `upgrade.virtual_source(lua_version, MigrationType.LUA)` | [Upgrade](/References/Workflows.md#upgrade) +[Lua Snapshot Data Migration](#snapshot-data-migration) | **No** | `upgrade.snapshot(lua_version, MigrationType.LUA)` | [Upgrade](/References/Workflows.md#upgrade) + + +## Lua Repository Data Migration + +A Lua Repository [Data Migration](/References/Glossary.md#data-migration) migrates repository data from an older [schema](/References/Glossary.md#schema) format defined originally from a Lua toolkit to an updated schema format defined in the Python plugin. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all repository data will match your updated repository schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more repository data migrations. + +### Delphix Engine Operations + +* [Upgrade](/References/Workflows.md#upgrade) + +### Signature + +`def migrate_repository(old_repository)` + +### Decorator + +`upgrade.repository(lua_version, MigrationType.LUA)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +lua_version | String | The Lua version of the toolkit that this migration would be applicable to. This is the ID of this migration. The version here is actually just the major and minor version of the Lua toolkit. Therefore the `lua_version` for each repository data migration must be unique. +migration_type | String | This field indicates whether the operation is a Lua migration or just a regular data migration. Specify this as LUA to indicate a Lua migration. If not defined, this operation will default to a regular [repository data migration](/References/Plugin_Operations.md#repository-data-migration). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_repository | Dictionary | The plugin-specific data associated with a repository, that conforms to the previous schema defined in Lua. + +!!! warning + The function argument `old_repository` is a Python dictionary, where each property name appears exactly as described in the previous repository schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](/References/Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_repository` input that must conform to the updated repository schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.repository("1.1", MigrationType.LUA) +def add_new_flag_to_repository(old_repository): + new_repository = dict(old_repository) + new_repository["useNewFeature"] = False + return new_repository +``` + +## Lua Source Config Data Migration + +A Lua Source Config [Data Migration](/References/Glossary.md#data-migration) migrates source config data from an older [schema](/References/Glossary.md#schema) format defined originally from a Lua toolkit to an updated schema format defined in the Python plugin. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all source config data will match your source config schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more source config data migrations. + +### Delphix Engine Operations + +* [Upgrade](/References/Workflows.md#upgrade) + +### Signature + +`def migrate_source_config(old_source_config)` + +### Decorator + +`upgrade.source_config(lua_version, MigrationType.LUA)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +lua_version | String | The Lua version of the toolkit that this migration would be applicable to. This is the ID of this migration. The version here is actually just the major and minor version of the Lua toolkit. Therefore the `lua_version` for each repository data migration must be unique. +migration_type | String | This field indicates whether the operation is a Lua migration or just a regular data migration. Specify this as LUA to indicate a Lua migration. If not defined, this operation will default to a regular [source config data migration](/References/Plugin_Operations.md#source-config-data-migration). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_source_config | Dictionary | The plugin-specific data associated with a source config, that conforms to the previous schema. + +!!! warning + The function argument `old_source_config` is a Python dictionary, where each property name appears exactly as described in the previous source config schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](/References/Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_source_config` input that must conform to the updated source config schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.source_config("1.1", MigrationType.LUA) +def add_new_flag_to_source_config(old_source_config): + new_source_config = dict(old_source_config) + new_source_config["useNewFeature"] = False + return new_source_config +``` +## Lua Linked Source Data Migration + +A Lua Linked Source [Data Migration](/References/Glossary.md#data-migration) migrates linked source data from an older [schema](/References/Glossary.md#schema) format defined originally from a Lua toolkit to an updated schema format defined in the Python plugin. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all linked source data will match your linked source schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more linked source data migrations. + +### Delphix Engine Operations + +* [Upgrade](/References/Workflows.md#upgrade) + +### Signature + +`def migrate_linked_source(old_linked_source)` + +### Decorator + +`upgrade.linked_source(lua_version, MigrationType.LUA)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +lua_version | String | The Lua version of the toolkit that this migration would be applicable to. This is the ID of this migration. The version here is actually just the major and minor version of the Lua toolkit. Therefore the `lua_version` for each repository data migration must be unique. +migration_type | String | This field indicates whether the operation is a Lua migration or just a regular data migration. Specify this as LUA to indicate a Lua migration. If not defined, this operation will default to a regular [linked source data migration](/References/Plugin_Operations.md#linked-source-data-migration). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_linked_source | Dictionary | The plugin-specific data associated with a linked source, that conforms to the previous schema. + +!!! warning + The function argument `old_linked_source` is a Python dictionary, where each property name appears exactly as described in the previous linked source schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](/References/Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_linked_source` input that must conform to the updated linked source schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.linked_source("1.1", MigrationType.LUA) +def add_new_flag_to_linked_source(old_linked_source): + new_linked_source = dict(old_linked_source) + new_linked_source["useNewFeature"] = False + return new_linked_source +``` +## Lua Virtual Source Data Migration + +A Lua Virtual Source [Data Migration](/References/Glossary.md#data-migration) migrates virtual source data from an older [schema](/References/Glossary.md#schema) format defined originally from a Lua toolkit to an updated schema format defined in the Python plugin. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all virtual source data will match your virtual source schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more virtual source data migrations. + +### Delphix Engine Operations + +* [Upgrade](/References/Workflows.md#upgrade) + +### Signature + +`def migrate_virtual_source(old_virtual_source)` + +### Decorator + +`upgrade.virtual_source(lua_version, MigrationType.LUA)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +lua_version | String | The Lua version of the toolkit that this migration would be applicable to. This is the ID of this migration. The version here is actually just the major and minor version of the Lua toolkit. Therefore the `lua_version` for each repository data migration must be unique. +migration_type | String | This field indicates whether the operation is a Lua migration or just a regular data migration. Specify this as LUA to indicate a Lua migration. If not defined, this operation will default to a regular [virtual source data migration](/References/Plugin_Operations.md#virtual-source-data-migration). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_virtual_source | Dictionary | The plugin-specific data associated with a virtual source, that conforms to the previous schema. + +!!! warning + The function argument `old_virtual_source` is a Python dictionary, where each property name appears exactly as described in the previous virtual source schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](/References/Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_virtual_source` input that must conform to the updated virtual source schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.virtual_source("1.1", MigrationType.LUA) +def add_new_flag_to_virtual_source(old_virtual_source): + new_virtual_source = dict(old_virtual_source) + new_virtual_source["useNewFeature"] = False + return new_virtual_source +``` +## Lua Snapshot Data Migration + +A Lua Snapshot [Data Migration](/References/Glossary.md#data-migration) migrates snapshot data from an older [schema](/References/Glossary.md#schema) format defined originally from a Lua toolkit to an updated schema format defined in the Python plugin. + +### Required / Optional +**Optional.**
+ +!!! warning + You must ensure that all snapshot data will match your snapshot schema after an upgrade operation. Depending on how your schema has changed, this might imply that you need to write one or more snapshot migrations. + +### Delphix Engine Operations + +* [Upgrade](/References/Workflows.md#upgrade) + +### Signature + +`def migrate_snapshot(old_snapshot)` + +### Decorator + +`upgrade.snapshot(lua_version, MigrationType.LUA)` + +### Decorator Arguments + +Argument | Type | Description +-------- | ---- | ----------- +lua_version | String | The Lua version of the toolkit that this migration would be applicable to. This is the ID of this migration. The version here is actually just the major and minor version of the Lua toolkit. Therefore the `lua_version` for each repository data migration must be unique. +migration_type | String | This field indicates whether the operation is a Lua migration or just a regular data migration. Specify this as LUA to indicate a Lua migration. If not defined, this operation will default to a regular [snapshot data migration](/References/Plugin_Operations.md#snapshot-data-migration). + +### Function Arguments +Argument | Type | Description +-------- | ---- | ----------- +old_snapshot | Dictionary | The plugin-specific data associated with a snapshot, that conforms to the previous schema. + +!!! warning + The function argument `old_snapshot` is a Python dictionary, where each property name appears exactly as described in the previous snapshot schema. This differs from non-upgrade-related operations, where the function arguments are [autogenerated classes](/References/Schemas_and_Autogenerated_Classes.md) based on the schema. + + +### Returns +Dictionary
+A migrated version of the `old_snapshot` input that must conform to the updated snapshot schema. + +### Example +```python +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + +@plugin.upgrade.snapshot("1.1", MigrationType.LUA) +def add_new_flag_to_snapshot(old_snapshot): + new_snapshot = dict(old_snapshot) + new_snapshot["useNewFeature"] = False + return new_snapshot +``` diff --git a/docs/docs/Versioning_And_Upgrade/Upgrade.md b/docs/docs/Versioning_And_Upgrade/Upgrade.md index 18cdc9c2..88b58e7c 100644 --- a/docs/docs/Versioning_And_Upgrade/Upgrade.md +++ b/docs/docs/Versioning_And_Upgrade/Upgrade.md @@ -162,8 +162,7 @@ During the process of upgrading to a new version, the Delphix Engine will run al #### Security Concerns Prevent Detailed Error Messages One problem here is that the Delphix Engine is limited in the information that it can provide in the error message. Ideally, the engine would say exactly what was wrong with the object (e.g.: "The field `port` has the value `15`, but the schema says it has to have a value between `256` and `1024`"). -But, the Delphix Engine cannot do this for security reasons. Ordinarily, the Delphix Engine knows which fields contain sensitive information, and can redact such fields from error messages. But, the only reason the Delphix Engine has that knowledge is because the schema provides that information. If an object does -**not** conform to the schema, then the Delphix Engine can't know what is sensitive and what isn't. +But, the Delphix Engine cannot do this for security reasons. Ordinarily, the Delphix Engine knows which fields contain sensitive information, and can redact such fields from error messages. But, the only reason the Delphix Engine has that knowledge is because the schema provides that information. If an object does **not** conform to the schema, then the Delphix Engine can't know what is sensitive and what isn't. Therefore, the error message here might lack the detail necessary to debug the problem. diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index e7caaffc..75fd7ad4 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Delphix Virtualization SDK 2.0.0 +site_name: Delphix Virtualization SDK 2.1.0 theme: name: material custom_dir: 'material/' diff --git a/docs/readme.md b/docs/readme.md index 6049ebc3..ece74f2f 100644 --- a/docs/readme.md +++ b/docs/readme.md @@ -3,18 +3,14 @@ This is the Markdown-based documentation for the Virtualization SDK. ## Local Testing -Create a `virtualenv` using Python 3 and run `pipenv run mkdocs serve` +Install dependencies for building documentation and run `pipenv run mkdocs serve` ``` -$ virtualenv -p /usr/local/bin/python3 . -Running virtualenv with interpreter /usr/local/bin/python3 -Using base prefix '/usr/local/Cellar/python/3.7.2_1/Frameworks/Python.framework/Versions/3.7' -New python executable in /Users/asarin/Documents/repos/virt-sdk-docs/env/bin/python3.7 -Also creating executable in /Users/asarin/Documents/repos/virt-sdk-docs/env/bin/python -Installing setuptools, pip, wheel... -done. - -$ source bin/activate +$ pipenv install +Installing dependencies from Pipfile.lock (cf5b7c)... + 🐍 ▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉ 16/16 — 00:00:02 +To activate this project's virtualenv, run pipenv shell. +Alternatively, run a command inside the virtualenv with pipenv run. $ pipenv run mkdocs serve INFO - Building documentation... @@ -75,12 +71,29 @@ Installing dependencies from Pipfile.lock (65135d)… 🐍 ▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉▉ 14/14 — 00:00:03 ``` -## Live Testing and Reviews -The command `git docsdev-review` will handle publishing reviews, and putting your changes on a live docs server. For example, you can clone the `docsdev-server` image on DCOA, and then run `git docsdev-review -m `. This will: +## Live Testing via Github Pages +To publish doc change to your individual fork for review, we use github pages. To set this up follow these following steps. + +1. Create a new local branch named `gh-pages`. +2. Using the same virtual environment above run: +``` +pipenv run mkdocs build --clean +``` +This will generate the `site` directory which will contain all the gererated docs. +3. Copy all these files to the root directory of the virtualization-sdk repo and delete all other files. +4. Commit and push these changes to your individual fork. +5. Go to your individual virtualization-sdk repo's settings, scroll to the bottom and verify under the GitHub Pages section the `Source` is set to `gh-pages branch`. +6. Right above this will be a link explaining where your docs are published. -- Push your doc changes to your VM -- Give you a link to the docdev server so you can test your changes live in a browser -- Publish a review +You can also utilize the GitHub workflow for publishing docs (`.github/workflows/publish-docs.yml`) associated with a pull request. +The workflow is present on the `develop` branch. Create a branch called `docs/x.y.z` off `develop` on your fork of the repository +to ensure that your docs branch triggers the workflow. If you have more than one `docs/x.y.z` branch in your fork, +you have to push your doc changes to the docs branch with the latest `x.y.z` version. Otherwise, the workflow won't run. +You also have to make sure to choose `gh-pages` branch on your fork as the [publishing source](https://help.github.com/en/github/working-with-github-pages/configuring-a-publishing-source-for-your-github-pages-site#choosing-a-publishing-source). +Once you push doc changes to the `docs/.x.y.z` branch, the docs site should be available under +`.github.io/virtualization-sdk` shortly after. You can see the status of publishing under +`https://github.com//virtualization-sdk/actions`. This is a fast way to give a preview of your +changes in a pull request. ## Workflow diagrams We create workflow diagrams using a tool called `draw.io` which allows us to import/export diagrams in html format. If you want to add a diagram or edit an existing one, simply create or import the html file in `docs/References/html` into `draw.io` and make your desired changes. When you are done, select your diagram and export it as a png file. You can think of the html files as source code, and the png files as build artifacts. After this step, you will be prompted to crop what was selected. You'll want this box checked to trim the whitespace around the diagram. After the diagrams are exported, check in the updated html file to `docs/References/html` and png file to `docs/References/images`. diff --git a/dvp/.python-version b/dvp/.python-version new file mode 100644 index 00000000..43c4dbe6 --- /dev/null +++ b/dvp/.python-version @@ -0,0 +1 @@ +2.7.17 diff --git a/dvp/requirements.txt b/dvp/requirements.txt index e584d23d..d742b1d5 100644 --- a/dvp/requirements.txt +++ b/dvp/requirements.txt @@ -8,8 +8,7 @@ pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.1 pyparsing==2.4.6 -pytest==4.6.9 +pytest==4.6.11 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.8 zipp==0.6.0 diff --git a/dvp/src/main/python/dlpx/virtualization/VERSION b/dvp/src/main/python/dlpx/virtualization/VERSION index 359a5b95..50aea0e7 100644 --- a/dvp/src/main/python/dlpx/virtualization/VERSION +++ b/dvp/src/main/python/dlpx/virtualization/VERSION @@ -1 +1 @@ -2.0.0 \ No newline at end of file +2.1.0 \ No newline at end of file diff --git a/libs/.python-version b/libs/.python-version new file mode 100644 index 00000000..43c4dbe6 --- /dev/null +++ b/libs/.python-version @@ -0,0 +1 @@ +2.7.17 diff --git a/libs/requirements.txt b/libs/requirements.txt index 10fbe011..275195fe 100644 --- a/libs/requirements.txt +++ b/libs/requirements.txt @@ -10,8 +10,7 @@ pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.1 py==1.8.1 pyparsing==2.4.6 -pytest==4.6.8 +pytest==4.6.11 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.8 zipp==0.6.0 diff --git a/libs/setup.py b/libs/setup.py index 83597b14..d3cff739 100644 --- a/libs/setup.py +++ b/libs/setup.py @@ -7,7 +7,7 @@ version = version_file.read().strip() install_requires = [ - "dvp-api == 1.1.0", + "dvp-api == 1.3.0", "dvp-common == {}".format(version) ] diff --git a/libs/src/main/python/dlpx/virtualization/libs/VERSION b/libs/src/main/python/dlpx/virtualization/libs/VERSION index 359a5b95..50aea0e7 100644 --- a/libs/src/main/python/dlpx/virtualization/libs/VERSION +++ b/libs/src/main/python/dlpx/virtualization/libs/VERSION @@ -1 +1 @@ -2.0.0 \ No newline at end of file +2.1.0 \ No newline at end of file diff --git a/platform/.python-version b/platform/.python-version new file mode 100644 index 00000000..43c4dbe6 --- /dev/null +++ b/platform/.python-version @@ -0,0 +1 @@ +2.7.17 diff --git a/platform/requirements.txt b/platform/requirements.txt index 9875b2d2..fd57ccb7 100644 --- a/platform/requirements.txt +++ b/platform/requirements.txt @@ -10,8 +10,7 @@ pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.1 pyparsing==2.4.6 -pytest==4.6.9 +pytest==4.6.10 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.8 zipp==0.6.0 diff --git a/platform/setup.py b/platform/setup.py index e8e6eb5b..b4a46018 100644 --- a/platform/setup.py +++ b/platform/setup.py @@ -7,7 +7,7 @@ version = version_file.read().strip() install_requires = [ - "dvp-api == 1.1.0", + "dvp-api == 1.3.0", "dvp-common == {}".format(version), "enum34;python_version < '3.4'", ] diff --git a/platform/src/main/python/dlpx/virtualization/platform/VERSION b/platform/src/main/python/dlpx/virtualization/platform/VERSION index 359a5b95..50aea0e7 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/VERSION +++ b/platform/src/main/python/dlpx/virtualization/platform/VERSION @@ -1 +1 @@ -2.0.0 \ No newline at end of file +2.1.0 \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/__init__.py b/platform/src/main/python/dlpx/virtualization/platform/__init__.py index fe3b144f..589f76e4 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/__init__.py +++ b/platform/src/main/python/dlpx/virtualization/platform/__init__.py @@ -4,9 +4,8 @@ __path__ = __import__('pkgutil').extend_path(__path__, __name__) - -from dlpx.virtualization.platform.migration_id_set import * from dlpx.virtualization.platform.validation_util import * +from dlpx.virtualization.platform.migration_helper import * from dlpx.virtualization.platform._plugin_classes import * from dlpx.virtualization.platform._discovery import * from dlpx.virtualization.platform._linked import * diff --git a/platform/src/main/python/dlpx/virtualization/platform/_discovery.py b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py index 0bcbd0f6..ae6bdbce 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_discovery.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py @@ -3,26 +3,23 @@ # # -*- coding: utf-8 -*- - """DiscoveryOperations for the Virtualization Platform """ import json + +from dlpx.virtualization.api import common_pb2, platform_pb2 from dlpx.virtualization.common import RemoteConnection -from dlpx.virtualization.api import common_pb2 -from dlpx.virtualization.api import platform_pb2 from dlpx.virtualization.platform import validation_util as v -from dlpx.virtualization.platform.operation import Operation as Op from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationNotDefinedError, - OperationAlreadyDefinedError) - + IncorrectReturnTypeError, OperationAlreadyDefinedError, + OperationNotDefinedError) +from dlpx.virtualization.platform.operation import Operation as Op __all__ = ['DiscoveryOperations'] class DiscoveryOperations(object): - def __init__(self): self.repository_impl = None self.source_config_impl = None @@ -35,6 +32,7 @@ def repository_decorator(repository_impl): self.repository_impl = v.check_function(repository_impl, Op.DISCOVERY_REPOSITORY) return repository_impl + return repository_decorator def source_config(self): @@ -44,6 +42,7 @@ def source_config_decorator(source_config_impl): self.source_config_impl = v.check_function( source_config_impl, Op.DISCOVERY_SOURCE_CONFIG) return source_config_impl + return source_config_decorator def _internal_repository(self, request): @@ -76,20 +75,20 @@ def to_protobuf(repository): raise OperationNotDefinedError(Op.DISCOVERY_REPOSITORY) repositories = self.repository_impl( - source_connection=RemoteConnection.from_proto(request.source_connection)) + source_connection=RemoteConnection.from_proto( + request.source_connection)) # Validate that this is a list of Repository objects if not isinstance(repositories, list): - raise IncorrectReturnTypeError( - Op.DISCOVERY_REPOSITORY, - type(repositories), - [RepositoryDefinition]) + raise IncorrectReturnTypeError(Op.DISCOVERY_REPOSITORY, + type(repositories), + [RepositoryDefinition]) - if not all(isinstance(repo, RepositoryDefinition) - for repo in repositories): + if not all( + isinstance(repo, RepositoryDefinition) + for repo in repositories): raise IncorrectReturnTypeError( - Op.DISCOVERY_REPOSITORY, - [type(repo) for repo in repositories], + Op.DISCOVERY_REPOSITORY, [type(repo) for repo in repositories], [RepositoryDefinition]) repository_discovery_response = ( @@ -137,27 +136,29 @@ def to_protobuf(source_config): json.loads(request.repository.parameters.json)) source_configs = self.source_config_impl( - source_connection=RemoteConnection.from_proto(request.source_connection), + source_connection=RemoteConnection.from_proto( + request.source_connection), repository=repository_definition) # Validate that this is a list of SourceConfigDefinition objects if not isinstance(source_configs, list): - raise IncorrectReturnTypeError( - Op.DISCOVERY_SOURCE_CONFIG, - type(source_configs), - [SourceConfigDefinition]) + raise IncorrectReturnTypeError(Op.DISCOVERY_SOURCE_CONFIG, + type(source_configs), + [SourceConfigDefinition]) - if not all(isinstance(config, SourceConfigDefinition) - for config in source_configs): + if not all( + isinstance(config, SourceConfigDefinition) + for config in source_configs): raise IncorrectReturnTypeError( Op.DISCOVERY_SOURCE_CONFIG, - [type(config) for config in source_configs], - [SourceConfigDefinition]) + [type(config) + for config in source_configs], [SourceConfigDefinition]) source_config_discovery_response = ( platform_pb2.SourceConfigDiscoveryResponse()) - source_config_protobuf_list = [to_protobuf(config) - for config in source_configs] + source_config_protobuf_list = [ + to_protobuf(config) for config in source_configs + ] source_config_discovery_response.return_value.source_configs.extend( source_config_protobuf_list) return source_config_discovery_response diff --git a/platform/src/main/python/dlpx/virtualization/platform/_linked.py b/platform/src/main/python/dlpx/virtualization/platform/_linked.py index e06094cc..d6e8c656 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_linked.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_linked.py @@ -3,32 +3,27 @@ # # -*- coding: utf-8 -*- - """LinkedOperations for the Virtualization Platform """ import json + +from dlpx.virtualization.api import common_pb2, platform_pb2 from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment -from dlpx.virtualization.api import common_pb2 -from dlpx.virtualization.api import platform_pb2 from dlpx.virtualization.common.exceptions import PluginRuntimeError -from dlpx.virtualization.platform import Status -from dlpx.virtualization.platform import DirectSource -from dlpx.virtualization.platform import StagedSource -from dlpx.virtualization.platform import Mount -from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import (DirectSource, Mount, + MountSpecification, StagedSource, + Status) from dlpx.virtualization.platform import validation_util as v -from dlpx.virtualization.platform.operation import Operation as Op from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationNotDefinedError, - OperationAlreadyDefinedError) - + IncorrectReturnTypeError, OperationAlreadyDefinedError, + OperationNotDefinedError) +from dlpx.virtualization.platform.operation import Operation as Op __all__ = ['LinkedOperations'] class LinkedOperations(object): - def __init__(self): self.pre_snapshot_impl = None self.post_snapshot_impl = None @@ -45,6 +40,7 @@ def pre_snapshot_decorator(pre_snapshot_impl): self.pre_snapshot_impl = v.check_function(pre_snapshot_impl, Op.LINKED_PRE_SNAPSHOT) return pre_snapshot_impl + return pre_snapshot_decorator def post_snapshot(self): @@ -54,6 +50,7 @@ def post_snapshot_decorator(post_snapshot_impl): self.post_snapshot_impl = v.check_function(post_snapshot_impl, Op.LINKED_POST_SNAPSHOT) return post_snapshot_impl + return post_snapshot_decorator def start_staging(self): @@ -63,6 +60,7 @@ def start_staging_decorator(start_staging_impl): self.start_staging_impl = v.check_function(start_staging_impl, Op.LINKED_START_STAGING) return start_staging_impl + return start_staging_decorator def stop_staging(self): @@ -72,6 +70,7 @@ def stop_staging_decorator(stop_staging_impl): self.stop_staging_impl = v.check_function(stop_staging_impl, Op.LINKED_STOP_STAGING) return stop_staging_impl + return stop_staging_decorator def status(self): @@ -80,6 +79,7 @@ def status_decorator(status_impl): raise OperationAlreadyDefinedError(Op.LINKED_STATUS) self.status_impl = v.check_function(status_impl, Op.LINKED_STATUS) return status_impl + return status_decorator def worker(self): @@ -88,16 +88,17 @@ def worker_decorator(worker_impl): raise OperationAlreadyDefinedError(Op.LINKED_WORKER) self.worker_impl = v.check_function(worker_impl, Op.LINKED_WORKER) return worker_impl + return worker_decorator def mount_specification(self): def mount_specification_decorator(mount_specification_impl): if self.mount_specification_impl: - raise OperationAlreadyDefinedError( - Op.LINKED_MOUNT_SPEC) + raise OperationAlreadyDefinedError(Op.LINKED_MOUNT_SPEC) self.mount_specification_impl = v.check_function( mount_specification_impl, Op.LINKED_MOUNT_SPEC) return mount_specification_impl + return mount_specification_decorator def _internal_direct_pre_snapshot(self, request): @@ -132,7 +133,8 @@ def _internal_direct_pre_snapshot(self, request): json.loads(request.direct_source.linked_source.parameters.json)) direct_source = DirectSource( guid=request.direct_source.linked_source.guid, - connection=RemoteConnection.from_proto(request.direct_source.connection), + connection=RemoteConnection.from_proto( + request.direct_source.connection), parameters=direct_source_definition) repository = RepositoryDefinition.from_dict( @@ -140,10 +142,9 @@ def _internal_direct_pre_snapshot(self, request): source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - self.pre_snapshot_impl( - direct_source=direct_source, - repository=repository, - source_config=source_config) + self.pre_snapshot_impl(direct_source=direct_source, + repository=repository, + source_config=source_config) direct_pre_snapshot_response = platform_pb2.DirectPreSnapshotResponse() direct_pre_snapshot_response.return_value.CopyFrom( @@ -187,7 +188,8 @@ def to_protobuf(snapshot): json.loads(request.direct_source.linked_source.parameters.json)) direct_source = DirectSource( guid=request.direct_source.linked_source.guid, - connection=RemoteConnection.from_proto(request.direct_source.connection), + connection=RemoteConnection.from_proto( + request.direct_source.connection), parameters=direct_source_definition) repository = RepositoryDefinition.from_dict( @@ -195,15 +197,14 @@ def to_protobuf(snapshot): source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - snapshot = self.post_snapshot_impl( - direct_source=direct_source, - repository=repository, - source_config=source_config) + snapshot = self.post_snapshot_impl(direct_source=direct_source, + repository=repository, + source_config=source_config) # Validate that this is a SnapshotDefinition object if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + raise IncorrectReturnTypeError(Op.LINKED_POST_SNAPSHOT, + type(snapshot), SnapshotDefinition) direct_post_snapshot_response = ( platform_pb2.DirectPostSnapshotResponse()) @@ -245,16 +246,18 @@ def _internal_staged_pre_snapshot(self, request): staged_source_definition = (LinkedSourceDefinition.from_dict( json.loads(linked_source.parameters.json))) staged_mount = request.staged_source.staged_mount - mount = Mount( - remote_environment=RemoteEnvironment.from_proto(staged_mount.remote_environment), - mount_path=staged_mount.mount_path, - shared_path=staged_mount.shared_path) + mount = Mount(remote_environment=RemoteEnvironment.from_proto( + staged_mount.remote_environment), + mount_path=staged_mount.mount_path, + shared_path=staged_mount.shared_path) staged_source = StagedSource( guid=linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + source_connection=RemoteConnection.from_proto( + request.staged_source.source_connection), parameters=staged_source_definition, mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + staged_connection=RemoteConnection.from_proto( + request.staged_source.staged_connection)) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) @@ -263,11 +266,10 @@ def _internal_staged_pre_snapshot(self, request): snapshot_parameters = SnapshotParametersDefinition.from_dict( json.loads(request.snapshot_parameters.parameters.json)) - self.pre_snapshot_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config, - snapshot_parameters=snapshot_parameters) + self.pre_snapshot_impl(staged_source=staged_source, + repository=repository, + source_config=source_config, + snapshot_parameters=snapshot_parameters) response = platform_pb2.StagedPreSnapshotResponse() response.return_value.CopyFrom(platform_pb2.StagedPreSnapshotResult()) @@ -309,19 +311,20 @@ def to_protobuf(snapshot): raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) + json.loads(request.staged_source.linked_source.parameters.json)) mount = Mount( - remote_environment= - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment), + remote_environment=RemoteEnvironment.from_proto( + request.staged_source.staged_mount.remote_environment), mount_path=request.staged_source.staged_mount.mount_path, shared_path=request.staged_source.staged_mount.shared_path) staged_source = StagedSource( guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + source_connection=RemoteConnection.from_proto( + request.staged_source.source_connection), parameters=staged_source_definition, mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + staged_connection=RemoteConnection.from_proto( + request.staged_source.staged_connection)) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) @@ -338,8 +341,8 @@ def to_protobuf(snapshot): # Validate that this is a SnapshotDefinition object if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + raise IncorrectReturnTypeError(Op.LINKED_POST_SNAPSHOT, + type(snapshot), SnapshotDefinition) response = platform_pb2.StagedPostSnapshotResponse() response.return_value.snapshot.CopyFrom(to_protobuf(snapshot)) @@ -375,29 +378,29 @@ def _internal_start_staging(self, request): raise OperationNotDefinedError(Op.LINKED_START_STAGING) staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) + json.loads(request.staged_source.linked_source.parameters.json)) mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + remote_environment=(RemoteEnvironment.from_proto( + request.staged_source.staged_mount.remote_environment)), mount_path=request.staged_source.staged_mount.mount_path, shared_path=request.staged_source.staged_mount.shared_path) staged_source = StagedSource( guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + source_connection=RemoteConnection.from_proto( + request.staged_source.source_connection), parameters=staged_source_definition, mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + staged_connection=RemoteConnection.from_proto( + request.staged_source.staged_connection)) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - self.start_staging_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) + self.start_staging_impl(staged_source=staged_source, + repository=repository, + source_config=source_config) start_staging_response = platform_pb2.StartStagingResponse() start_staging_response.return_value.CopyFrom( @@ -434,29 +437,29 @@ def _internal_stop_staging(self, request): raise OperationNotDefinedError(Op.LINKED_STOP_STAGING) staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) + json.loads(request.staged_source.linked_source.parameters.json)) mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + remote_environment=(RemoteEnvironment.from_proto( + request.staged_source.staged_mount.remote_environment)), mount_path=request.staged_source.staged_mount.mount_path, shared_path=request.staged_source.staged_mount.shared_path) staged_source = StagedSource( guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + source_connection=RemoteConnection.from_proto( + request.staged_source.source_connection), parameters=staged_source_definition, mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + staged_connection=RemoteConnection.from_proto( + request.staged_source.staged_connection)) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - self.stop_staging_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) + self.stop_staging_impl(staged_source=staged_source, + repository=repository, + source_config=source_config) stop_staging_response = platform_pb2.StopStagingResponse() stop_staging_response.return_value.CopyFrom( @@ -495,31 +498,32 @@ def _internal_status(self, request): staged_source_definition = LinkedSourceDefinition.from_dict( json.loads(request.staged_source.linked_source.parameters.json)) mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + remote_environment=(RemoteEnvironment.from_proto( + request.staged_source.staged_mount.remote_environment)), mount_path=request.staged_source.staged_mount.mount_path, shared_path=request.staged_source.staged_mount.shared_path) staged_source = StagedSource( guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + source_connection=RemoteConnection.from_proto( + request.staged_source.source_connection), parameters=staged_source_definition, mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + staged_connection=RemoteConnection.from_proto( + request.staged_source.staged_connection)) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - status = self.status_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) + status = self.status_impl(staged_source=staged_source, + repository=repository, + source_config=source_config) # Validate that this is a Status object. if not isinstance(status, Status): - raise IncorrectReturnTypeError( - Op.LINKED_STATUS, type(status), Status) + raise IncorrectReturnTypeError(Op.LINKED_STATUS, type(status), + Status) staged_status_response = platform_pb2.StagedStatusResponse() staged_status_response.return_value.status = status.value @@ -555,29 +559,29 @@ def _internal_worker(self, request): raise OperationNotDefinedError(Op.LINKED_WORKER) staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) + json.loads(request.staged_source.linked_source.parameters.json)) mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + remote_environment=(RemoteEnvironment.from_proto( + request.staged_source.staged_mount.remote_environment)), mount_path=request.staged_source.staged_mount.mount_path, shared_path=request.staged_source.staged_mount.shared_path) staged_source = StagedSource( guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + source_connection=RemoteConnection.from_proto( + request.staged_source.source_connection), parameters=staged_source_definition, mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + staged_connection=RemoteConnection.from_proto( + request.staged_source.staged_connection)) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - self.worker_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) + self.worker_impl(staged_source=staged_source, + repository=repository, + source_config=source_config) staged_worker_response = platform_pb2.StagedWorkerResponse() staged_worker_response.return_value.CopyFrom( @@ -630,30 +634,30 @@ def to_protobuf_ownership_spec(ownership_spec): staged_source_definition = LinkedSourceDefinition.from_dict( json.loads(request.staged_source.linked_source.parameters.json)) mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + remote_environment=(RemoteEnvironment.from_proto( + request.staged_source.staged_mount.remote_environment)), mount_path=request.staged_source.staged_mount.mount_path, shared_path=request.staged_source.staged_mount.shared_path) staged_source = StagedSource( guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + source_connection=RemoteConnection.from_proto( + request.staged_source.source_connection), parameters=staged_source_definition, mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + staged_connection=RemoteConnection.from_proto( + request.staged_source.staged_connection)) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) - mount_spec = self.mount_specification_impl( - staged_source=staged_source, - repository=repository) + mount_spec = self.mount_specification_impl(staged_source=staged_source, + repository=repository) # Validate that this is a MountSpecification object. if not isinstance(mount_spec, MountSpecification): - raise IncorrectReturnTypeError( - Op.LINKED_MOUNT_SPEC, - type(mount_spec), - MountSpecification) + raise IncorrectReturnTypeError(Op.LINKED_MOUNT_SPEC, + type(mount_spec), + MountSpecification) # Only one mount is supported for linked sources. mount_len = len(mount_spec.mounts) @@ -675,4 +679,4 @@ def to_protobuf_ownership_spec(ownership_spec): staged_mount_spec_response.return_value.ownership_spec.CopyFrom( ownership_spec) - return staged_mount_spec_response \ No newline at end of file + return staged_mount_spec_response diff --git a/platform/src/main/python/dlpx/virtualization/platform/_plugin.py b/platform/src/main/python/dlpx/virtualization/platform/_plugin.py index 2758f34b..36f3922f 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_plugin.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_plugin.py @@ -3,7 +3,6 @@ # # -*- coding: utf-8 -*- - """Plugin for the Virtualization Platform This module contains a skeleton of a plugin that allows users to extend the @@ -81,10 +80,8 @@ def my_configure_implementation(source, repository, snapshot): to have the import in the methods as the objects will exist at runtime. """ from dlpx.virtualization.platform import (DiscoveryOperations, - LinkedOperations, - VirtualOperations, - UpgradeOperations) - + LinkedOperations, UpgradeOperations, + VirtualOperations) __all__ = ['Plugin'] diff --git a/platform/src/main/python/dlpx/virtualization/platform/_plugin_classes.py b/platform/src/main/python/dlpx/virtualization/platform/_plugin_classes.py index 5768acde..4d2da24f 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_plugin_classes.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_plugin_classes.py @@ -2,15 +2,14 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # import re +from enum import Enum -import enum import six -from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment, \ - RemoteHost +from dlpx.virtualization.common import (RemoteConnection, RemoteEnvironment, + RemoteHost) from dlpx.virtualization.common.exceptions import IncorrectTypeError -from dlpx.virtualization.platform.exceptions import \ - IncorrectReferenceFormatError - +from dlpx.virtualization.platform.exceptions import ( + IncorrectReferenceFormatError) """Classes used for Plugin Operations This module defines the non autogenerated classes used as input/output for @@ -21,25 +20,17 @@ of the autogenerated classes from the schemas (e.g. VirtualSourceDefinition) """ __all__ = [ - "VirtualSource", - "StagedSource", - "DirectSource", - "Status", - "Mount", - "OwnershipSpecification", - "MountSpecification"] + "VirtualSource", "StagedSource", "DirectSource", "Status", "MigrationType", + "Mount", "OwnershipSpecification", "MountSpecification" +] class VirtualSource(object): - def __init__(self, guid, connection, parameters, mounts): self._guid = guid if not isinstance(connection, RemoteConnection): - raise IncorrectTypeError( - RemoteConnection, - 'connection', - type(connection), - RemoteConnection) + raise IncorrectTypeError(RemoteConnection, 'connection', + type(connection), RemoteConnection) self._connection = connection self._parameters = parameters self._mounts = mounts @@ -67,26 +58,19 @@ def mounts(self): class StagedSource(object): - def __init__(self, guid, source_connection, parameters, mount, staged_connection): self._guid = guid if not isinstance(source_connection, RemoteConnection): - raise IncorrectTypeError( - RemoteConnection, - 'source_connection', - type(source_connection), - RemoteConnection) + raise IncorrectTypeError(RemoteConnection, 'source_connection', + type(source_connection), RemoteConnection) self._source_connection = source_connection self._parameters = parameters self._mount = mount if not isinstance(staged_connection, RemoteConnection): - raise IncorrectTypeError( - RemoteConnection, - 'staged_connection', - type(staged_connection), - RemoteConnection) + raise IncorrectTypeError(RemoteConnection, 'staged_connection', + type(staged_connection), RemoteConnection) self._staged_connection = staged_connection @property @@ -120,15 +104,11 @@ def staged_connection(self): class DirectSource(object): - def __init__(self, guid, connection, parameters): self._guid = guid if not isinstance(connection, RemoteConnection): - raise IncorrectTypeError( - RemoteConnection, - 'connection', - type(connection), - RemoteConnection) + raise IncorrectTypeError(RemoteConnection, 'connection', + type(connection), RemoteConnection) self._connection = connection self._parameters = parameters @@ -151,10 +131,16 @@ def parameters(self): return self._parameters -class Status(enum.Enum): +class Status(Enum): ACTIVE = 0 INACTIVE = 1 + +class MigrationType(Enum): + PLATFORM = 0 + LUA = 1 + + # # Only the next 3 classes need to have validation as the plugin writer actually # creates objects of these types unlike any other defined classes. @@ -162,10 +148,7 @@ class Status(enum.Enum): class Mount(object): - - def __init__(self, remote_environment, mount_path, shared_path=None): - """A Mount object asks for multiple Python objects (RemoteEnvironment, RemoteHost), which have parameters (such as name, binary_path and scratch_path) that require the plugin writer to provide values for. @@ -179,43 +162,46 @@ def __init__(self, remote_environment, mount_path, shared_path=None): def __is_correct_reference_format(reference): unix_format = re.compile("^UNIX_HOST_ENVIRONMENT-\d+$") win_format = re.compile("^WINDOWS_HOST_ENVIRONMENT-\d+$") - return bool(unix_format.match(reference)) or \ - bool(win_format.match(reference)) - + return (bool(unix_format.match(reference)) + or bool(win_format.match(reference))) def __make_remote_environment_from_reference(reference): - dummy_host = RemoteHost("dummy host", "dummy reference", "dummy binary path", "dummy scratch path") - if not isinstance(remote_environment, RemoteEnvironment) and not \ - __is_correct_reference_format(remote_environment): - raise RuntimeError("Reference '{}' is not a valid host environment reference.".format(reference)) + dummy_host = RemoteHost("dummy host", "dummy reference", + "dummy binary path", "dummy scratch path") + if (not isinstance(remote_environment, RemoteEnvironment) + and not __is_correct_reference_format(remote_environment)): + raise RuntimeError( + "Reference '{}' is not a valid host environment reference." + .format(reference)) return RemoteEnvironment("dummy name", reference, dummy_host) # if reference is not a RemoteEnvironment nor a string - if not isinstance(remote_environment, RemoteEnvironment) and not \ - isinstance(remote_environment, six.string_types): - raise IncorrectTypeError( - Mount, - 'remote_environment', - type(remote_environment), - [RemoteEnvironment, six.string_types[0]]) + if (not isinstance(remote_environment, RemoteEnvironment) + and not isinstance(remote_environment, six.string_types)): + raise IncorrectTypeError(Mount, 'remote_environment', + type(remote_environment), + [RemoteEnvironment, six.string_types[0]]) # if reference is a string, but incorrectly formatted - if isinstance(remote_environment, six.string_types) and not __is_correct_reference_format(remote_environment): + if isinstance( + remote_environment, six.string_types + ) and not __is_correct_reference_format(remote_environment): raise IncorrectReferenceFormatError(remote_environment) # If the plugin has provided us with just a valid reference string, # convert to a real Python object if isinstance(remote_environment, six.string_types): - self._remote_environment = __make_remote_environment_from_reference(remote_environment) + self._remote_environment = ( + __make_remote_environment_from_reference(remote_environment)) else: self._remote_environment = remote_environment if not isinstance(mount_path, six.string_types): - raise IncorrectTypeError( - Mount, 'mount_path', type(mount_path), six.string_types[0]) + raise IncorrectTypeError(Mount, 'mount_path', type(mount_path), + six.string_types[0]) self._mount_path = mount_path if shared_path and not isinstance(shared_path, six.string_types[0]): - raise IncorrectTypeError( - Mount, 'shared_path', type(shared_path), six.string_types[0], False) + raise IncorrectTypeError(Mount, 'shared_path', type(shared_path), + six.string_types[0], False) self._shared_path = shared_path @property @@ -237,12 +223,12 @@ def shared_path(self): class OwnershipSpecification(object): def __init__(self, uid, gid): if not isinstance(uid, int): - raise IncorrectTypeError( - OwnershipSpecification, 'uid', type(uid), int) + raise IncorrectTypeError(OwnershipSpecification, 'uid', type(uid), + int) self._uid = uid if not isinstance(gid, int): - raise IncorrectTypeError( - OwnershipSpecification, 'gid', type(gid), int) + raise IncorrectTypeError(OwnershipSpecification, 'gid', type(gid), + int) self._gid = gid @property @@ -259,24 +245,20 @@ def gid(self): class MountSpecification(object): def __init__(self, mounts, ownership_specification=None): if not isinstance(mounts, list): - raise IncorrectTypeError( - MountSpecification, 'mounts', type(mounts), [Mount]) + raise IncorrectTypeError(MountSpecification, 'mounts', + type(mounts), [Mount]) if not all(isinstance(mount, Mount) for mount in mounts): - raise IncorrectTypeError( - MountSpecification, - 'mounts', - [type(mount) for mount in mounts], - [Mount]) + raise IncorrectTypeError(MountSpecification, 'mounts', + [type(mount) for mount in mounts], + [Mount]) self._mounts = mounts if (ownership_specification and not isinstance( ownership_specification, OwnershipSpecification)): - raise IncorrectTypeError( - MountSpecification, - 'ownership_specification', - type(ownership_specification), - OwnershipSpecification, - False) + raise IncorrectTypeError(MountSpecification, + 'ownership_specification', + type(ownership_specification), + OwnershipSpecification, False) self._ownership_specification = ownership_specification diff --git a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py index db13d731..fc9a5ae9 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py @@ -3,7 +3,6 @@ # # -*- coding: utf-8 -*- - """UpgradeOperations for the Virtualization Platform There are 5 different objects that we can upgrade. All migration ids must be @@ -15,12 +14,12 @@ """ import json import logging + from dlpx.virtualization.api import platform_pb2 -from dlpx.virtualization.platform import MigrationIdSet -from dlpx.virtualization.platform import validation_util as v -from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform import (LuaUpgradeMigrations, MigrationType, + PlatformUpgradeMigrations) from dlpx.virtualization.platform.exceptions import ( - IncorrectUpgradeObjectTypeError) + IncorrectUpgradeObjectTypeError, UnknownMigrationTypeError) logger = logging.getLogger(__name__) @@ -28,64 +27,88 @@ class UpgradeOperations(object): - def __init__(self): - self.__migration_id_set = MigrationIdSet() + self.platform_migrations = PlatformUpgradeMigrations() + self.lua_migrations = LuaUpgradeMigrations() - self.repository_id_to_impl = {} - self.source_config_id_to_impl = {} - self.linked_source_id_to_impl = {} - self.virtual_source_id_to_impl = {} - self.snapshot_id_to_impl = {} - - def repository(self, migration_id): + def repository(self, migration_id, migration_type=MigrationType.PLATFORM): def repository_decorator(repository_impl): - std_mig_id = self.__migration_id_set.add( - migration_id, repository_impl.__name__) - self.repository_id_to_impl[std_mig_id] = v.check_function( - repository_impl, Op.UPGRADE_REPOSITORY) + if migration_type == MigrationType.PLATFORM: + self.platform_migrations.add_repository( + migration_id, repository_impl) + elif migration_type == MigrationType.LUA: + self.lua_migrations.add_repository(migration_id, + repository_impl) + else: + raise UnknownMigrationTypeError(migration_type) return repository_impl + return repository_decorator - def source_config(self, migration_id): + def source_config(self, + migration_id, + migration_type=MigrationType.PLATFORM): def source_config_decorator(source_config_impl): - std_mig_id = self.__migration_id_set.add( - migration_id, source_config_impl.__name__) - self.source_config_id_to_impl[std_mig_id] = v.check_function( - source_config_impl, Op.UPGRADE_SOURCE_CONFIG) + if migration_type == MigrationType.PLATFORM: + self.platform_migrations.add_source_config( + migration_id, source_config_impl) + elif migration_type == MigrationType.LUA: + self.lua_migrations.add_source_config(migration_id, + source_config_impl) + else: + raise UnknownMigrationTypeError(migration_type) return source_config_impl + return source_config_decorator - def linked_source(self, migration_id): + def linked_source(self, + migration_id, + migration_type=MigrationType.PLATFORM): def linked_source_decorator(linked_source_impl): - std_mig_id = self.__migration_id_set.add( - migration_id, linked_source_impl.__name__) - self.linked_source_id_to_impl[std_mig_id] = v.check_function( - linked_source_impl, Op.UPGRADE_LINKED_SOURCE) + if migration_type == MigrationType.PLATFORM: + self.platform_migrations.add_linked_source( + migration_id, linked_source_impl) + elif migration_type == MigrationType.LUA: + self.lua_migrations.add_linked_source(migration_id, + linked_source_impl) + else: + raise UnknownMigrationTypeError(migration_type) return linked_source_impl + return linked_source_decorator - def virtual_source(self, migration_id): + def virtual_source(self, + migration_id, + migration_type=MigrationType.PLATFORM): def virtual_source_decorator(virtual_source_impl): - std_mig_id = self.__migration_id_set.add( - migration_id, virtual_source_impl.__name__) - self.virtual_source_id_to_impl[std_mig_id] = v.check_function( - virtual_source_impl, Op.UPGRADE_VIRTUAL_SOURCE) + if migration_type == MigrationType.PLATFORM: + self.platform_migrations.add_virtual_source( + migration_id, virtual_source_impl) + elif migration_type == MigrationType.LUA: + self.lua_migrations.add_virtual_source(migration_id, + virtual_source_impl) + else: + raise UnknownMigrationTypeError(migration_type) return virtual_source_impl + return virtual_source_decorator - def snapshot(self, migration_id): + def snapshot(self, migration_id, migration_type=MigrationType.PLATFORM): def snapshot_decorator(snapshot_impl): - std_mig_id = self.__migration_id_set.add( - migration_id, snapshot_impl.__name__) - self.snapshot_id_to_impl[std_mig_id] = v.check_function( - snapshot_impl, Op.UPGRADE_SNAPSHOT) + if migration_type == MigrationType.PLATFORM: + self.platform_migrations.add_snapshot(migration_id, + snapshot_impl) + elif migration_type == MigrationType.LUA: + self.lua_migrations.add_snapshot(migration_id, snapshot_impl) + else: + raise UnknownMigrationTypeError(migration_type) return snapshot_impl + return snapshot_decorator @property def migration_id_list(self): - return self.__migration_id_set.get_sorted_ids() + return self.platform_migrations.get_sorted_ids() @staticmethod def _success_upgrade_response(upgraded_dict): @@ -95,28 +118,31 @@ def _success_upgrade_response(upgraded_dict): return_value=upgrade_result) return upgrade_response - def __process_upgrade_request(self, request, id_to_impl): - """Iterate through all objects in the pre_upgrade_parameters map, - invoke all available migrations on each object and its metadata, - and return a map containing the updated metadata for each object. + @staticmethod + def _run_migration_upgrades(request, lua_impls_getter, + platform_impls_getter): + """ + Given the list of lua and platform migration to run, iterate and + invoke these migrations on each object and its metadata, and return a + dict containing the upgraded parameters. """ post_upgrade_parameters = {} + # + # For the request.migration_ids list, protobuf will preserve the + # ordering of repeated elements, so we can rely on the backend to + # give us the already sorted list of migrations + # + impls_list = lua_impls_getter( + request.lua_upgrade_version) + platform_impls_getter( + request.migration_ids) for (object_ref, metadata) in request.pre_upgrade_parameters.items(): # Load the object metadata into a dictionary current_metadata = json.loads(metadata) - # - # Loop through all migrations that were passed into the upgrade - # request. Protobuf will preserve the ordering of repeated - # elements, so we can rely on the backend to sort the migration - # ids before packing them into the request. - # - for migration_id in request.migration_ids: - # Only try to execute the function if the id exists in the map. - if migration_id in id_to_impl: - current_metadata = id_to_impl[migration_id](current_metadata) + for migration_function in impls_list: + current_metadata = migration_function(current_metadata) post_upgrade_parameters[object_ref] = json.dumps(current_metadata) - return self._success_upgrade_response(post_upgrade_parameters) + return post_upgrade_parameters def _internal_repository(self, request): """Upgrade repositories for plugins. @@ -125,10 +151,13 @@ def _internal_repository(self, request): raise IncorrectUpgradeObjectTypeError( request.type, platform_pb2.UpgradeRequest.REPOSITORY) - logger.debug('Upgrade repositories [{}]'.format( - ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + logger.debug('Upgrade repositories [{}]'.format(', '.join( + sorted(request.pre_upgrade_parameters.keys())))) - return self.__process_upgrade_request(request, self.repository_id_to_impl) + post_upgrade_parameters = self._run_migration_upgrades( + request, self.lua_migrations.get_repository_impls_to_exec, + self.platform_migrations.get_repository_impls_to_exec) + return self._success_upgrade_response(post_upgrade_parameters) def _internal_source_config(self, request): """Upgrade source configs for plugins. @@ -137,10 +166,13 @@ def _internal_source_config(self, request): raise IncorrectUpgradeObjectTypeError( request.type, platform_pb2.UpgradeRequest.SOURCECONFIG) - logger.debug('Upgrade source configs [{}]'.format( - ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + logger.debug('Upgrade source configs [{}]'.format(', '.join( + sorted(request.pre_upgrade_parameters.keys())))) - return self.__process_upgrade_request(request, self.source_config_id_to_impl) + post_upgrade_parameters = self._run_migration_upgrades( + request, self.lua_migrations.get_source_config_impls_to_exec, + self.platform_migrations.get_source_config_impls_to_exec) + return self._success_upgrade_response(post_upgrade_parameters) def _internal_linked_source(self, request): """Upgrade linked source for plugins. @@ -149,10 +181,13 @@ def _internal_linked_source(self, request): raise IncorrectUpgradeObjectTypeError( request.type, platform_pb2.UpgradeRequest.LINKEDSOURCE) - logger.debug('Upgrade linked sources [{}]'.format( - ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + logger.debug('Upgrade linked sources [{}]'.format(', '.join( + sorted(request.pre_upgrade_parameters.keys())))) - return self.__process_upgrade_request(request, self.linked_source_id_to_impl) + post_upgrade_parameters = self._run_migration_upgrades( + request, self.lua_migrations.get_linked_source_impls_to_exec, + self.platform_migrations.get_linked_source_impls_to_exec) + return self._success_upgrade_response(post_upgrade_parameters) def _internal_virtual_source(self, request): """Upgrade virtual sources for plugins. @@ -161,10 +196,13 @@ def _internal_virtual_source(self, request): raise IncorrectUpgradeObjectTypeError( request.type, platform_pb2.UpgradeRequest.VIRTUALSOURCE) - logger.debug('Upgrade virtual sources [{}]'.format( - ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + logger.debug('Upgrade virtual sources [{}]'.format(', '.join( + sorted(request.pre_upgrade_parameters.keys())))) - return self.__process_upgrade_request(request, self.virtual_source_id_to_impl) + post_upgrade_parameters = self._run_migration_upgrades( + request, self.lua_migrations.get_virtual_source_impls_to_exec, + self.platform_migrations.get_virtual_source_impls_to_exec) + return self._success_upgrade_response(post_upgrade_parameters) def _internal_snapshot(self, request): """Upgrade snapshots for plugins. @@ -173,7 +211,10 @@ def _internal_snapshot(self, request): raise IncorrectUpgradeObjectTypeError( request.type, platform_pb2.UpgradeRequest.SNAPSHOT) - logger.debug('Upgrade snapshots [{}]'.format( - ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + logger.debug('Upgrade snapshots [{}]'.format(', '.join( + sorted(request.pre_upgrade_parameters.keys())))) - return self.__process_upgrade_request(request, self.snapshot_id_to_impl) + post_upgrade_parameters = self._run_migration_upgrades( + request, self.lua_migrations.get_snapshot_impls_to_exec, + self.platform_migrations.get_snapshot_impls_to_exec) + return self._success_upgrade_response(post_upgrade_parameters) diff --git a/platform/src/main/python/dlpx/virtualization/platform/_virtual.py b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py index 76976c2a..6c048c46 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_virtual.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py @@ -3,30 +3,25 @@ # # -*- coding: utf-8 -*- - """VirtualOperations for the Virtualization Platform """ import json + +from dlpx.virtualization.api import common_pb2, platform_pb2 from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment -from dlpx.virtualization.api import common_pb2 -from dlpx.virtualization.api import platform_pb2 -from dlpx.virtualization.platform import VirtualSource -from dlpx.virtualization.platform import Status -from dlpx.virtualization.platform import Mount -from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import (Mount, MountSpecification, Status, + VirtualSource) from dlpx.virtualization.platform import validation_util as v -from dlpx.virtualization.platform.operation import Operation as Op from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationNotDefinedError, - OperationAlreadyDefinedError) - + IncorrectReturnTypeError, OperationAlreadyDefinedError, + OperationNotDefinedError) +from dlpx.virtualization.platform.operation import Operation as Op __all__ = ['VirtualOperations'] class VirtualOperations(object): - def __init__(self): self.configure_impl = None self.unconfigure_impl = None @@ -46,6 +41,7 @@ def configure_decorator(configure_impl): self.configure_impl = v.check_function(configure_impl, Op.VIRTUAL_CONFIGURE) return configure_impl + return configure_decorator def unconfigure(self): @@ -55,6 +51,7 @@ def unconfigure_decorator(unconfigure_impl): self.unconfigure_impl = v.check_function(unconfigure_impl, Op.VIRTUAL_UNCONFIGURE) return unconfigure_impl + return unconfigure_decorator def reconfigure(self): @@ -64,6 +61,7 @@ def reconfigure_decorator(reconfigure_impl): self.reconfigure_impl = v.check_function(reconfigure_impl, Op.VIRTUAL_RECONFIGURE) return reconfigure_impl + return reconfigure_decorator def start(self): @@ -72,6 +70,7 @@ def start_decorator(start_impl): raise OperationAlreadyDefinedError(Op.VIRTUAL_START) self.start_impl = v.check_function(start_impl, Op.VIRTUAL_START) return start_impl + return start_decorator def stop(self): @@ -80,6 +79,7 @@ def stop_decorator(stop_impl): raise OperationAlreadyDefinedError(Op.VIRTUAL_STOP) self.stop_impl = v.check_function(stop_impl, Op.VIRTUAL_STOP) return stop_impl + return stop_decorator def pre_snapshot(self): @@ -89,6 +89,7 @@ def pre_snapshot_decorator(pre_snapshot_impl): self.pre_snapshot_impl = v.check_function(pre_snapshot_impl, Op.VIRTUAL_PRE_SNAPSHOT) return pre_snapshot_impl + return pre_snapshot_decorator def post_snapshot(self): @@ -98,6 +99,7 @@ def post_snapshot_decorator(post_snapshot_impl): self.post_snapshot_impl = v.check_function( post_snapshot_impl, Op.VIRTUAL_POST_SNAPSHOT) return post_snapshot_impl + return post_snapshot_decorator def status(self): @@ -106,6 +108,7 @@ def status_decorator(status_impl): raise OperationAlreadyDefinedError(Op.VIRTUAL_STATUS) self.status_impl = v.check_function(status_impl, Op.VIRTUAL_STATUS) return status_impl + return status_decorator def initialize(self): @@ -115,24 +118,25 @@ def initialize_decorator(initialize_impl): self.initialize_impl = v.check_function(initialize_impl, Op.VIRTUAL_INITIALIZE) return initialize_impl + return initialize_decorator def mount_specification(self): def mount_specification_decorator(mount_specification_impl): if self.mount_specification_impl: - raise OperationAlreadyDefinedError( - Op.VIRTUAL_MOUNT_SPEC) + raise OperationAlreadyDefinedError(Op.VIRTUAL_MOUNT_SPEC) self.mount_specification_impl = v.check_function( mount_specification_impl, Op.VIRTUAL_MOUNT_SPEC) return mount_specification_impl + return mount_specification_decorator @staticmethod def _from_protobuf_single_subset_mount(single_subset_mount): - return Mount( - remote_environment=RemoteEnvironment.from_proto(single_subset_mount.remote_environment), - mount_path=single_subset_mount.mount_path, - shared_path=single_subset_mount.shared_path) + return Mount(remote_environment=RemoteEnvironment.from_proto( + single_subset_mount.remote_environment), + mount_path=single_subset_mount.mount_path, + shared_path=single_subset_mount.shared_path) def _internal_configure(self, request): """Configure operation wrapper. @@ -167,29 +171,30 @@ def _internal_configure(self, request): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) snapshot = SnapshotDefinition.from_dict( json.loads(request.snapshot.parameters.json)) - config = self.configure_impl( - virtual_source=virtual_source, - repository=repository, - snapshot=snapshot) + config = self.configure_impl(virtual_source=virtual_source, + repository=repository, + snapshot=snapshot) # Validate that this is a SourceConfigDefinition object. if not isinstance(config, SourceConfigDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_CONFIGURE, type(config), SourceConfigDefinition) + raise IncorrectReturnTypeError(Op.VIRTUAL_CONFIGURE, type(config), + SourceConfigDefinition) configure_response = platform_pb2.ConfigureResponse() configure_response.return_value.source_config.parameters.json = ( @@ -225,24 +230,25 @@ def _internal_unconfigure(self, request): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - self.unconfigure_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) + self.unconfigure_impl(repository=repository, + source_config=source_config, + virtual_source=virtual_source) unconfigure_response = platform_pb2.UnconfigureResponse() unconfigure_response.return_value.CopyFrom( @@ -273,13 +279,15 @@ def _internal_reconfigure(self, request): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) snapshot = SnapshotDefinition.from_dict( json.loads(request.snapshot.parameters.json)) @@ -288,16 +296,16 @@ def _internal_reconfigure(self, request): repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) - config = self.reconfigure_impl( - snapshot=snapshot, - repository=repository, - source_config=source_config, - virtual_source=virtual_source) + config = self.reconfigure_impl(snapshot=snapshot, + repository=repository, + source_config=source_config, + virtual_source=virtual_source) # Validate that this is a SourceConfigDefinition object. if not isinstance(config, SourceConfigDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_RECONFIGURE, type(config), SourceConfigDefinition) + raise IncorrectReturnTypeError(Op.VIRTUAL_RECONFIGURE, + type(config), + SourceConfigDefinition) reconfigure_response = platform_pb2.ReconfigureResponse() reconfigure_response.return_value.source_config.parameters.json = ( @@ -331,23 +339,24 @@ def _internal_start(self, request): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - self.start_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) + self.start_impl(repository=repository, + source_config=source_config, + virtual_source=virtual_source) start_response = platform_pb2.StartResponse() start_response.return_value.CopyFrom(platform_pb2.StartResult()) @@ -379,23 +388,24 @@ def _internal_stop(self, request): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - self.stop_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) + self.stop_impl(repository=repository, + source_config=source_config, + virtual_source=virtual_source) stop_response = platform_pb2.StopResponse() stop_response.return_value.CopyFrom(platform_pb2.StopResult()) @@ -432,23 +442,24 @@ def _internal_pre_snapshot(self, request): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - self.pre_snapshot_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) + self.pre_snapshot_impl(repository=repository, + source_config=source_config, + virtual_source=virtual_source) virtual_pre_snapshot_response = ( platform_pb2.VirtualPreSnapshotResponse()) @@ -491,28 +502,29 @@ def to_protobuf(snapshot): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - snapshot = self.post_snapshot_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) + snapshot = self.post_snapshot_impl(repository=repository, + source_config=source_config, + virtual_source=virtual_source) # Validate that this is a SnapshotDefinition object if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + raise IncorrectReturnTypeError(Op.VIRTUAL_POST_SNAPSHOT, + type(snapshot), SnapshotDefinition) virtual_post_snapshot_response = ( platform_pb2.VirtualPostSnapshotResponse()) @@ -549,28 +561,29 @@ def _internal_status(self, request): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - virtual_status = self.status_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) + virtual_status = self.status_impl(repository=repository, + source_config=source_config, + virtual_source=virtual_source) # Validate that this is a Status object. if not isinstance(virtual_status, Status): - raise IncorrectReturnTypeError( - Op.VIRTUAL_STATUS, type(virtual_status), Status) + raise IncorrectReturnTypeError(Op.VIRTUAL_STATUS, + type(virtual_status), Status) virtual_status_response = platform_pb2.VirtualStatusResponse() virtual_status_response.return_value.status = virtual_status.value @@ -601,23 +614,24 @@ def _internal_initialize(self, request): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) source_config = SourceConfigDefinition.from_dict( json.loads(request.source_config.parameters.json)) - self.initialize_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) + self.initialize_impl(repository=repository, + source_config=source_config, + virtual_source=virtual_source) initialize_response = platform_pb2.InitializeResponse() initialize_response.return_value.CopyFrom( platform_pb2.InitializeResult()) @@ -668,27 +682,27 @@ def to_protobuf_ownership_spec(ownership_spec): virtual_source_definition = VirtualSourceDefinition.from_dict( json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) + mounts = [ + VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts + ] + virtual_source = VirtualSource(guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto( + request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) repository = RepositoryDefinition.from_dict( json.loads(request.repository.parameters.json)) virtual_mount_spec = self.mount_specification_impl( - repository=repository, - virtual_source=virtual_source) + repository=repository, virtual_source=virtual_source) # Validate that this is a MountSpecification object if not isinstance(virtual_mount_spec, MountSpecification): - raise IncorrectReturnTypeError( - Op.VIRTUAL_MOUNT_SPEC, - type(virtual_mount_spec), - MountSpecification) + raise IncorrectReturnTypeError(Op.VIRTUAL_MOUNT_SPEC, + type(virtual_mount_spec), + MountSpecification) virtual_mount_spec_response = platform_pb2.VirtualMountSpecResponse() @@ -698,7 +712,8 @@ def to_protobuf_ownership_spec(ownership_spec): virtual_mount_spec_response.return_value.ownership_spec.CopyFrom( ownership_spec) - mounts_list = [to_protobuf_single_mount(m) - for m in virtual_mount_spec.mounts] + mounts_list = [ + to_protobuf_single_mount(m) for m in virtual_mount_spec.mounts + ] virtual_mount_spec_response.return_value.mounts.extend(mounts_list) - return virtual_mount_spec_response \ No newline at end of file + return virtual_mount_spec_response diff --git a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py index d800120f..7b28d855 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py +++ b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py @@ -1,8 +1,8 @@ # # Copyright (c) 2019 by Delphix. All rights reserved. # -from dlpx.virtualization.common.exceptions import ( - PlatformError, PluginRuntimeError) +from dlpx.virtualization.common.exceptions import (PlatformError, + PluginRuntimeError) class UserError(Exception): @@ -24,7 +24,6 @@ class UserError(Exception): action (str): Suggested action to be taken. output (str): Output to be shown. """ - @property def message(self): return self.args[0] @@ -39,7 +38,7 @@ class IncorrectReturnTypeError(PluginRuntimeError): Args: operation (Operation): The Operation enum of the operation being run - actual type (Type or List[Type]): type(s) returned from the operation + actual_type (Type or List[Type]): type(s) returned from the operation expected_type (Type): The type of the parameter that was expected. Attributes: @@ -47,7 +46,6 @@ class IncorrectReturnTypeError(PluginRuntimeError): should be returning what type. """ - def __init__(self, operation, actual_type, expected_type): actual, expected = self.get_actual_and_expected_type( actual_type, expected_type) @@ -62,7 +60,7 @@ class IncorrectUpgradeObjectTypeError(PluginRuntimeError): called with the incorrect object type to upgrade. Args: - actual type (platform_pb2.UpgradeRequest.Type): type that was passed in + actual_type (platform_pb2.UpgradeRequest.Type): type that was passed in expected_type (platform_pb2.UpgradeRequest.Type): expected type Attributes: @@ -70,7 +68,6 @@ class IncorrectUpgradeObjectTypeError(PluginRuntimeError): should be returning what type. """ - def __init__(self, actual_type, expected_type): message = ( 'The upgrade operation received objects with {} type but should' @@ -78,6 +75,25 @@ def __init__(self, actual_type, expected_type): super(IncorrectUpgradeObjectTypeError, self).__init__(message) +class UnknownMigrationTypeError(PlatformError): + """UnknownMigrationTypeError gets thrown when the migration type that is + set on an upgrade migration decorator is not one of PLATFORM or LUA. + + Args: + actual_type (MigrationType): type that was passed in + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + + """ + def __init__(self, actual_type, expected_type): + message = ( + 'The upgrade migrationType received was {} type which is not' + ' supported.'.format(actual_type)) + super(UnknownMigrationTypeError, self).__init__(message) + + class OperationAlreadyDefinedError(PlatformError): """OperationAlreadyDefinedError gets thrown when the plugin writer tries to define an operation more than ones. @@ -152,9 +168,8 @@ def __init__(self, message): @classmethod def from_fields(cls, migration_id, function_name, format): message = ("The migration id '{}' used in the function '{}' does not" - " follow the correct format '{}'.".format(migration_id, - function_name, - format)) + " follow the correct format '{}'.".format( + migration_id, function_name, format)) return cls(message) @@ -171,11 +186,23 @@ class MigrationIdAlreadyUsedError(PlatformError): message (str): A localized user-readable message about what operation should be returning what type. """ - def __init__(self, migration_id, std_migration_id, function_name): + def __init__(self, message): + super(MigrationIdAlreadyUsedError, self).__init__(message) + + @classmethod + def fromMigrationId(cls, migration_id, std_migration_id, function_name): message = ("The migration id '{}' used in the function '{}' has the" " same canonical form '{}' as another migration.".format( - migration_id, function_name, std_migration_id)) - super(MigrationIdAlreadyUsedError, self).__init__(message) + migration_id, function_name, std_migration_id)) + return cls(message) + + @classmethod + def fromLuaVersion(cls, migration_id, function_name, decorator_name): + message = ("The lua major minor version '{}' used in the function" + " '{}' decorated by '{}' has already been used.".format( + migration_id, function_name, decorator_name)) + return cls(message) + class DecoratorNotFunctionError(PlatformError): """DecoratorNotFunctionError gets thrown when the decorated variable is @@ -214,6 +241,7 @@ def __init__(self, reference): " environment reference.".format(reference)) super(IncorrectReferenceFormatError, self).__init__(message) + class IncorrectPluginCodeError(PluginRuntimeError): """ This gets thrown if the import validations come across invalid plugin @@ -230,4 +258,4 @@ def message(self): return self.args[0] def __init__(self, message): - super(IncorrectPluginCodeError, self).__init__(message) \ No newline at end of file + super(IncorrectPluginCodeError, self).__init__(message) diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_util.py b/platform/src/main/python/dlpx/virtualization/platform/import_util.py index a8e8f807..2a9ef82e 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/import_util.py +++ b/platform/src/main/python/dlpx/virtualization/platform/import_util.py @@ -5,7 +5,6 @@ from dlpx.virtualization.platform import exceptions - _IMPORT_CHECKS = {} _POST_IMPORT_CHECKS = {} @@ -135,4 +134,4 @@ def validate_post_import(plugin_module): # for key in sorted(_POST_IMPORT_CHECKS.keys()): warnings.extend(_POST_IMPORT_CHECKS[key](plugin_module)) - return warnings \ No newline at end of file + return warnings diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_validations.py b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py index 1a7ef3da..12960511 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/import_validations.py +++ b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py @@ -3,10 +3,9 @@ # import inspect -from dlpx.virtualization.platform.import_util import (import_check, - post_import_check, - PluginModule) from dlpx.virtualization.platform import exceptions +from dlpx.virtualization.platform.import_util import (import_check, + post_import_check) @import_check(ordinal=1) @@ -39,13 +38,12 @@ def validate_entry_point(plugin_module): @import_check(ordinal=3) def validate_plugin_object(plugin_module): plugin_object = getattr(plugin_module.module_content, - plugin_module.entry_point, - None) + plugin_module.entry_point, None) if plugin_object is None: raise exceptions.UserError('Plugin object retrieved from the entry' - ' point {} is None'.format - (plugin_module.entry_point)) + ' point {} is None'.format( + plugin_module.entry_point)) @post_import_check(ordinal=1) @@ -129,16 +127,18 @@ def check_upgrade_operations(plugin_module): if plugin_op_type != 'UpgradeOperations': continue - warnings.extend(_check_upgrade_args( - plugin_attrib, plugin_module.expected_upgrade_args)) + warnings.extend( + _check_upgrade_args(plugin_attrib, + plugin_module.expected_upgrade_args)) return warnings def _check_upgrade_args(upgrade_operations, expected_upgrade_args): """ - Does named argument validation of all functions in dictionaries by looping - first through all the attributes in the UpgradeOperations for this plugin. + This function does named argument validation of all migration functions by + first looping through each of the migration helpers (platform_migrations + and lua_migrations) then looping through all those attributes. Any attributes that are not dictionaries that map migration_id -> upgrade_function are skipped. We then loop through every key/value pair of each of the dictionaries and validate that the argument in the defined @@ -146,22 +146,24 @@ def _check_upgrade_args(upgrade_operations, expected_upgrade_args): """ warnings = [] - for attribute_name, attribute in vars(upgrade_operations).items(): - if attribute_name not in expected_upgrade_args.keys(): - # Skip if not in one of the operation dicts we store functions in. - continue - # - # If the attribute_name was in the expected upgrade dicts then we know - # it is a dict containing migration id -> upgrade function that we can - # iterate on. - # - for migration_id, migration_func in attribute.items(): - actual = inspect.getargspec(migration_func).args - expected = expected_upgrade_args[attribute_name] - warnings.extend( - _check_args(method_name=migration_func.__name__, - expected_args=expected, - actual_args=actual)) + for migration_helper in vars(upgrade_operations).values(): + # Next we must loop through each of the attributes (Should be just two) + for attribute_name, attribute in vars(migration_helper).items(): + if attribute_name not in expected_upgrade_args.keys(): + # Skip if not in one of the operation dictionaries. + continue + # + # If the attribute_name was in the expected upgrade dicts then we + # know it is a dict containing migration id -> upgrade function + # that we can iterate on. + # + for migration_func in attribute.values(): + actual = inspect.getargspec(migration_func).args + expected = expected_upgrade_args[attribute_name] + warnings.extend( + _check_args(method_name=migration_func.__name__, + expected_args=expected, + actual_args=actual)) return warnings @@ -188,4 +190,4 @@ def _lookup_expected_args(plugin_module, plugin_op_type, plugin_op_name): plugin_op_name] else: return plugin_module.expected_staged_args_by_op[plugin_op_type][ - plugin_op_name] \ No newline at end of file + plugin_op_name] diff --git a/platform/src/main/python/dlpx/virtualization/platform/migration_helper.py b/platform/src/main/python/dlpx/virtualization/platform/migration_helper.py new file mode 100644 index 00000000..b53e7def --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/migration_helper.py @@ -0,0 +1,327 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import re + +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.exceptions import ( + MigrationIdAlreadyUsedError, MigrationIdIncorrectFormatError, + MigrationIdIncorrectTypeError) +from dlpx.virtualization.platform.operation import Operation as Op + + +class UpgradeMigrations(object): + def __init__(self): + self._repository_id_to_impl = {} + self._source_config_id_to_impl = {} + self._linked_source_id_to_impl = {} + self._virtual_source_id_to_impl = {} + self._snapshot_id_to_impl = {} + + def add_repository(self, migration_id, repository_impl): + self._repository_id_to_impl[migration_id] = v.check_function( + repository_impl, Op.UPGRADE_REPOSITORY) + + def add_source_config(self, migration_id, source_config_impl): + self._source_config_id_to_impl[migration_id] = v.check_function( + source_config_impl, Op.UPGRADE_SOURCE_CONFIG) + + def add_linked_source(self, migration_id, linked_source_impl): + self._linked_source_id_to_impl[migration_id] = v.check_function( + linked_source_impl, Op.UPGRADE_LINKED_SOURCE) + + def add_virtual_source(self, migration_id, virtual_source_impl): + self._virtual_source_id_to_impl[migration_id] = v.check_function( + virtual_source_impl, Op.UPGRADE_VIRTUAL_SOURCE) + + def add_snapshot(self, migration_id, snapshot_impl): + self._snapshot_id_to_impl[migration_id] = v.check_function( + snapshot_impl, Op.UPGRADE_SNAPSHOT) + + def get_repository_dict(self): + """dict: The migration id to implementation for repository migrations. + """ + return self._repository_id_to_impl + + def get_source_config_dict(self): + """dict: The migration id to implementation for source config + migrations. + """ + return self._source_config_id_to_impl + + def get_linked_source_dict(self): + """dict: The migration id to implementation for linked source + migrations. + """ + return self._linked_source_id_to_impl + + def get_virtual_source_dict(self): + """dict: The migration id to implementation for virtual source + migrations. + """ + return self._virtual_source_id_to_impl + + def get_snapshot_dict(self): + """dict: The migration id to implementation for snapshot migrations. + """ + return self._snapshot_id_to_impl + + +class PlatformUpgradeMigrations(UpgradeMigrations): + """ + Keeps track of all migrations and validites/standardizes them as they are + added / parsed. + + Exceptions can be thrown when trying to add a new migration id. Otherwise + at the end of reading in all migration functions can be gotten in the + correct order. + """ + MIGRATION_ID_REGEX = re.compile(r'^\d+(\.\d+)*$') + + def __init__(self): + """ + The list of migration ids will store migrations as an array of ids + where the id is represented by the standardized array of positive + integers. For example if there were these ids: 1.0.0, 1.2.03, and + 2.0.1.0, __migration_ids would be [ [1], [1, 2, 3], [2, 0, 1]] + """ + self.__migration_ids = [] + super(PlatformUpgradeMigrations, self).__init__() + + def add_repository(self, migration_id, repository_impl): + std_mig_id = self.__add(migration_id, repository_impl.__name__) + super(PlatformUpgradeMigrations, + self).add_repository(std_mig_id, repository_impl) + + def add_source_config(self, migration_id, source_config_impl): + std_mig_id = self.__add(migration_id, source_config_impl.__name__) + super(PlatformUpgradeMigrations, + self).add_source_config(std_mig_id, source_config_impl) + + def add_linked_source(self, migration_id, linked_source_impl): + std_mig_id = self.__add(migration_id, linked_source_impl.__name__) + super(PlatformUpgradeMigrations, + self).add_linked_source(std_mig_id, linked_source_impl) + + def add_virtual_source(self, migration_id, virtual_source_impl): + std_mig_id = self.__add(migration_id, virtual_source_impl.__name__) + super(PlatformUpgradeMigrations, + self).add_virtual_source(std_mig_id, virtual_source_impl) + + def add_snapshot(self, migration_id, snapshot_impl): + std_mig_id = self.__add(migration_id, snapshot_impl.__name__) + super(PlatformUpgradeMigrations, + self).add_snapshot(std_mig_id, snapshot_impl) + + def __add(self, migration_id, impl_name): + """ + Validates that the migration id is the correct type/format and then + return the canonical format of the id. Add the id as an array of + integers into the list of migration ids. + """ + # First validate that the migration_id is the correct type/format. + self.__validate_migration_id(migration_id, impl_name) + + # Then we must standardize the migration_id. + std_migration_id = self.__standardize_migration_id_to_array( + migration_id, impl_name) + std_string = '.'.join(str(i) for i in std_migration_id) + + # Then we should check if this migration_id has already been used + if std_migration_id in self.__migration_ids: + raise MigrationIdAlreadyUsedError.fromMigrationId( + migration_id, std_string, impl_name) + + # Lastly we should add this new array into the internal migration list. + self.__migration_ids.append(std_migration_id) + + # Return back the standardized format of the migration id + return std_string + + @staticmethod + def __validate_migration_id(migration_id, impl_name): + # First validate that the id is a string + if not isinstance(migration_id, basestring): + raise MigrationIdIncorrectTypeError(migration_id, impl_name) + + # Next check if the id is the right format + if not PlatformUpgradeMigrations.MIGRATION_ID_REGEX.match( + migration_id): + raise MigrationIdIncorrectFormatError.from_fields( + migration_id, impl_name, + PlatformUpgradeMigrations.MIGRATION_ID_REGEX.pattern) + + @staticmethod + def __standardize_migration_id_to_array(migration_id, impl_name): + # Split on the period and convert to integer + array = [int(i) for i in migration_id.split('.')] + + # + # We cannot allow a migration id of essentially '0' because otherwise + # there would be no way to add a migration that goes before this. + # + if not any(array): + raise MigrationIdIncorrectFormatError( + "The migration id '{}' used in the function '{}' cannot be" + " used because a 0 migration id is not allowed.".format( + migration_id, impl_name)) + + # Next we want to trim all trailing zeros so ex: 5.3.0.0 == 5.3 + while array: + if not array[-1]: + # Remove the last element which is a zero from array + array.pop() + else: + break + + return array + + def get_sorted_ids(self): + # First sort the migration ids + self.__migration_ids.sort() + + # Then convert all these arrays to the usual string format. + return [ + '.'.join(str(i) for i in migration_id) + for migration_id in self.__migration_ids + ] + + def get_repository_impls_to_exec(self, migration_id_list): + return self.__get_impls(migration_id_list, self.get_repository_dict()) + + def get_source_config_impls_to_exec(self, migration_id_list): + return self.__get_impls(migration_id_list, + self.get_source_config_dict()) + + def get_linked_source_impls_to_exec(self, migration_id_list): + return self.__get_impls(migration_id_list, + self.get_linked_source_dict()) + + def get_virtual_source_impls_to_exec(self, migration_id_list): + return self.__get_impls(migration_id_list, + self.get_virtual_source_dict()) + + def get_snapshot_impls_to_exec(self, migration_id_list): + return self.__get_impls(migration_id_list, self.get_snapshot_dict()) + + @staticmethod + def __get_impls(migration_id_list, impl_dict): + return_list = [] + for migration_id in migration_id_list: + # Should only add the function if the id exists in the map. + if migration_id in impl_dict: + return_list.append(impl_dict[migration_id]) + return return_list + + +class LuaUpgradeMigrations(UpgradeMigrations): + LUA_VERSION_REGEX = re.compile(r'^\d+\.\d+$') + + def __init__(self): + super(LuaUpgradeMigrations, self).__init__() + + def add_repository(self, migration_id, repository_impl): + std_mig_id = self.__validate_lua_major_minor_version( + migration_id, repository_impl.__name__, + Op.UPGRADE_REPOSITORY.value, self.get_repository_dict) + super(LuaUpgradeMigrations, + self).add_repository(std_mig_id, repository_impl) + + def add_source_config(self, migration_id, source_config_impl): + std_mig_id = self.__validate_lua_major_minor_version( + migration_id, source_config_impl.__name__, + Op.UPGRADE_SOURCE_CONFIG.value, self.get_source_config_dict) + super(LuaUpgradeMigrations, + self).add_source_config(std_mig_id, source_config_impl) + + def add_linked_source(self, migration_id, linked_source_impl): + std_mig_id = self.__validate_lua_major_minor_version( + migration_id, linked_source_impl.__name__, + Op.UPGRADE_LINKED_SOURCE.value, self.get_linked_source_dict) + super(LuaUpgradeMigrations, + self).add_linked_source(std_mig_id, linked_source_impl) + + def add_virtual_source(self, migration_id, virtual_source_impl): + std_mig_id = self.__validate_lua_major_minor_version( + migration_id, virtual_source_impl.__name__, + Op.UPGRADE_VIRTUAL_SOURCE.value, self.get_virtual_source_dict) + super(LuaUpgradeMigrations, + self).add_virtual_source(std_mig_id, virtual_source_impl) + + def add_snapshot(self, migration_id, snapshot_impl): + std_mig_id = self.__validate_lua_major_minor_version( + migration_id, snapshot_impl.__name__, Op.UPGRADE_SNAPSHOT.value, + self.get_snapshot_dict) + super(LuaUpgradeMigrations, self).add_snapshot(std_mig_id, + snapshot_impl) + + @staticmethod + def __validate_lua_major_minor_version(migration_id, impl_name, + decorator_name, impl_getter): + # First validate that the major minor version is a string + if not isinstance(migration_id, basestring): + raise MigrationIdIncorrectTypeError(migration_id, impl_name) + + # Next check if the id already exists in this particular dictionary + if migration_id in impl_getter(): + raise MigrationIdAlreadyUsedError.fromLuaVersion( + migration_id, impl_name, decorator_name) + + # Lastly check if the id is the right format for a lua version + if not LuaUpgradeMigrations.LUA_VERSION_REGEX.match(migration_id): + raise MigrationIdIncorrectFormatError.from_fields( + migration_id, impl_name, + LuaUpgradeMigrations.LUA_VERSION_REGEX.pattern) + + # + # Now we want to decompose the version string to get rid of any + # leading zeros (such as 1.01 -> 1.1) + # + return '.'.join( + str(i) for i in [int(i) for i in migration_id.split('.')]) + + def get_repository_impls_to_exec(self, migration_id): + return self.__get_sorted_impls(migration_id, + self.get_repository_dict()) + + def get_source_config_impls_to_exec(self, migration_id): + return self.__get_sorted_impls(migration_id, + self.get_source_config_dict()) + + def get_linked_source_impls_to_exec(self, migration_id): + return self.__get_sorted_impls(migration_id, + self.get_linked_source_dict()) + + def get_virtual_source_impls_to_exec(self, migration_id): + return self.__get_sorted_impls(migration_id, + self.get_virtual_source_dict()) + + def get_snapshot_impls_to_exec(self, migration_id): + return self.__get_sorted_impls(migration_id, self.get_snapshot_dict()) + + @staticmethod + def __get_sorted_impls(migration_id, impl_dict): + # + # If there is no migration id, this means no lua version was provided + # so just return an empty list. + # + if not migration_id: + return [] + # + # First filter out all ids less than the migration id. We need to do + # this because even after sorting, we wouldn't know where in the list + # to start iterating over. + # + def filter_lower(current): + return (sorted([current, migration_id], + key=float)[0] == migration_id) + + # Filter and sort the list. + id_list = sorted(filter(filter_lower, impl_dict.keys()), key=float) + + # + # Loop through ids after filtering out lower ids and sorting to add + # the impl to the resulting list in the correct order. + # + return [impl_dict[found_id] for found_id in id_list] diff --git a/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py b/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py deleted file mode 100644 index 9dc6c142..00000000 --- a/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py +++ /dev/null @@ -1,102 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -import logging -import re - -from dlpx.virtualization.platform.exceptions import ( - MigrationIdAlreadyUsedError, MigrationIdIncorrectTypeError, - MigrationIdIncorrectFormatError) - -MIGRATION_ID_REGEX = re.compile(r'^\d+(\.\d+)*$') -logger = logging.getLogger(__name__) - - -class MigrationIdSet: - """ - Keeps track of all migrations and validites/standardizes them as they are - added / parsed. - - Exceptions can be thrown when trying to add a new migration id. Otherwise - at the end of reading in all migration functions can be gotten in the - correct order. - """ - def __init__(self): - """ - The list of migration ids will store migrations as an array of ids - where the id is represented by the standardized array of positive - integers. For example if there were these ids: 1.0.0, 1.2.03, and - 2.0.1.0, __migration_ids would be [ [1], [1, 2, 3], [2, 0, 1]] - """ - self.__migration_ids = [] - - def add(self, migration_id, impl_name): - """ - Validates that the migration id is the correct type/format and then - return the canonical format of the id. Add the id as an array of - integers into the list of migration ids. - """ - # First validate that the migration_id is the correct type/format. - self.validate_migration_id(migration_id, impl_name) - - # Then we must standardize the migration_id. - std_migration_id = self.standardize_migration_id_to_array( - migration_id, impl_name) - std_string = '.'.join(str(i) for i in std_migration_id) - - # Then we should check if this migration_id has already been used - if std_migration_id in self.__migration_ids: - raise MigrationIdAlreadyUsedError(migration_id, - std_string, - impl_name) - - # Lastly we should add this new array into the internal migration list. - self.__migration_ids.append(std_migration_id) - - # Return back the standardized format of the migration id - return std_string - - @staticmethod - def validate_migration_id(migration_id, impl_name): - # First validate that the id is a string - if not isinstance(migration_id, basestring): - raise MigrationIdIncorrectTypeError(migration_id, impl_name) - - # Next check if the id is the right format - if not MIGRATION_ID_REGEX.match(migration_id): - raise MigrationIdIncorrectFormatError.from_fields( - migration_id, impl_name, MIGRATION_ID_REGEX.pattern) - - @staticmethod - def standardize_migration_id_to_array(migration_id, impl_name): - # Split on the period and convert to integer - array = [int(i) for i in migration_id.split('.')] - - # - # We cannot allow a migration id of essentially '0' because otherwise - # there would be no way to add a migration that goes before this. - # - if not any(array): - raise MigrationIdIncorrectFormatError( - "The migration id '{}' used in the function '{}' cannot be" - " used because a 0 migration id is not allowed.".format( - migration_id, impl_name)) - - # Next we want to trim all trailing zeros so ex: 5.3.0.0 == 5.3 - while array: - if not array[-1]: - # Remove the last element which is a zero from array - array.pop() - else: - break - - return array - - def get_sorted_ids(self): - # First sort the migration ids - self.__migration_ids.sort() - - # Then convert all these arrays to the usual string format. - return ['.'.join(str(i) for i in migration_id) - for migration_id in self.__migration_ids] diff --git a/platform/src/main/python/dlpx/virtualization/platform/operation.py b/platform/src/main/python/dlpx/virtualization/platform/operation.py index b938c270..12f1fb6d 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/operation.py +++ b/platform/src/main/python/dlpx/virtualization/platform/operation.py @@ -4,6 +4,7 @@ from enum import Enum, unique + @unique class Operation(Enum): DISCOVERY_REPOSITORY = 'discovery.repository()' diff --git a/platform/src/main/python/dlpx/virtualization/platform/util.py b/platform/src/main/python/dlpx/virtualization/platform/util.py index 5e0a15c3..d80a8718 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/util.py +++ b/platform/src/main/python/dlpx/virtualization/platform/util.py @@ -3,9 +3,10 @@ # import dlpx.virtualization.api + def get_virtualization_api_version(): """Returns the Virutalization API version string. :return: version string """ - return dlpx.virtualization.api.__version__ \ No newline at end of file + return dlpx.virtualization.api.__version__ diff --git a/platform/src/main/python/dlpx/virtualization/platform/validation_util.py b/platform/src/main/python/dlpx/virtualization/platform/validation_util.py index bc39d098..f227a52b 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/validation_util.py +++ b/platform/src/main/python/dlpx/virtualization/platform/validation_util.py @@ -2,6 +2,7 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # import inspect + from dlpx.virtualization.platform.exceptions import DecoratorNotFunctionError diff --git a/platform/src/test/python/dlpx/virtualization/conftest.py b/platform/src/test/python/dlpx/virtualization/conftest.py new file mode 100644 index 00000000..c0802237 --- /dev/null +++ b/platform/src/test/python/dlpx/virtualization/conftest.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. +# + +import pytest + +# +# conftest.py is used to share fixtures among multiple tests files. pytest will +# automatically get discovered in the test class if the figure name is used +# as the input variable. The idea of fixtures is to define certain object +# configs and allow them to get used in different tests but also being allowed +# to set certain parts defined in other fixtures. Read more at: +# https://docs.pytest.org/en/latest/fixture.html +# + +OBJECT_TYPES = [ + 'repository', 'source_config', 'linked_source', 'virtual_source', + 'snapshot' +] + + +@pytest.fixture +def method_name(object_op): + return 'add_{}'.format(object_op) + + +@pytest.fixture +def get_impls_to_exec(object_op): + return 'get_{}_impls_to_exec'.format(object_op) diff --git a/platform/src/test/python/dlpx/virtualization/fake_generated_definitions.py b/platform/src/test/python/dlpx/virtualization/fake_generated_definitions.py index 99df6157..481c04a5 100644 --- a/platform/src/test/python/dlpx/virtualization/fake_generated_definitions.py +++ b/platform/src/test/python/dlpx/virtualization/fake_generated_definitions.py @@ -9,119 +9,99 @@ class Model(object): class RepositoryDefinition(Model): - def __init__(self, name): - self.swagger_types = { - 'name': str - } + def __init__(self, name): + self.swagger_types = {'name': str} - self.attribute_map = { - 'name': 'name' - } - self._name = name + self.attribute_map = {'name': 'name'} + self._name = name - @property - def name(self): - return self._name + @property + def name(self): + return self._name - @staticmethod - def from_dict(input_dict): - return RepositoryDefinition(input_dict['name']) + @staticmethod + def from_dict(input_dict): + return RepositoryDefinition(input_dict['name']) - def to_dict(self): - return { "name": self._name } + def to_dict(self): + return {"name": self._name} class SourceConfigDefinition(Model): - def __init__(self, name): - self.swagger_types = { - 'name': str - } + def __init__(self, name): + self.swagger_types = {'name': str} - self.attribute_map = { - 'name': 'name' - } - self._name = name + self.attribute_map = {'name': 'name'} + self._name = name - @property - def name(self): - return self._name + @property + def name(self): + return self._name - @staticmethod - def from_dict(input_dict): - return SourceConfigDefinition(input_dict['name']) + @staticmethod + def from_dict(input_dict): + return SourceConfigDefinition(input_dict['name']) - def to_dict(self): - return { "name": self._name } + def to_dict(self): + return {"name": self._name} class LinkedSourceDefinition(Model): - def __init__(self, name): - self.swagger_types = { - 'name': str - } + def __init__(self, name): + self.swagger_types = {'name': str} - self.attribute_map = { - 'name': 'name' - } - self._name = name + self.attribute_map = {'name': 'name'} + self._name = name - @property - def name(self): - return self._name + @property + def name(self): + return self._name - @staticmethod - def from_dict(input_dict): - return LinkedSourceDefinition(input_dict['name']) + @staticmethod + def from_dict(input_dict): + return LinkedSourceDefinition(input_dict['name']) class VirtualSourceDefinition(Model): - def __init__(self, name): - self.swagger_types = { - 'name': str - } + def __init__(self, name): + self.swagger_types = {'name': str} - self.attribute_map = { - 'name': 'name' - } - self._name = name + self.attribute_map = {'name': 'name'} + self._name = name - @property - def name(self): - return self._name + @property + def name(self): + return self._name - @staticmethod - def from_dict(input_dict): - return VirtualSourceDefinition(input_dict['name']) + @staticmethod + def from_dict(input_dict): + return VirtualSourceDefinition(input_dict['name']) - def to_dict(self): - return { "name": self._name } + def to_dict(self): + return {"name": self._name} class SnapshotDefinition(Model): - def __init__(self, name): - self.swagger_types = { - 'name': str - } + def __init__(self, name): + self.swagger_types = {'name': str} - self.attribute_map = { - 'name': 'name' - } - self._name = name + self.attribute_map = {'name': 'name'} + self._name = name - @property - def name(self): - return self._name + @property + def name(self): + return self._name - @staticmethod - def from_dict(input_dict): - return SnapshotDefinition(input_dict['name']) + @staticmethod + def from_dict(input_dict): + return SnapshotDefinition(input_dict['name']) - def to_dict(self): - return { "name": self._name } + def to_dict(self): + return {"name": self._name} class SnapshotParametersDefinition(Model): - """ + """ The appdata snapshot parameter will eventually be customizable but for now this just follows the old appdata parameter where the delphix user can decide if resync is true or not. This will now go into pre and post snapshot @@ -129,23 +109,19 @@ class SnapshotParametersDefinition(Model): set this to be true is that this means the operation is a "hard" resync and that all data should be refreshed. """ - def __init__(self, resync): - self.swagger_types = { - 'resync': bool - } - - self.attribute_map = { - 'resync': 'resync' - } - self._resync = resync - - @property - def resync(self): - return self._resync - - @staticmethod - def from_dict(input_dict): - return SnapshotParametersDefinition(input_dict['resync']) - - def to_dict(self): - return { "resync": self._resync } + def __init__(self, resync): + self.swagger_types = {'resync': bool} + + self.attribute_map = {'resync': 'resync'} + self._resync = resync + + @property + def resync(self): + return self._resync + + @staticmethod + def from_dict(input_dict): + return SnapshotParametersDefinition(input_dict['resync']) + + def to_dict(self): + return {"resync": self._resync} diff --git a/platform/src/test/python/dlpx/virtualization/test_migration_helper.py b/platform/src/test/python/dlpx/virtualization/test_migration_helper.py new file mode 100644 index 00000000..de90d246 --- /dev/null +++ b/platform/src/test/python/dlpx/virtualization/test_migration_helper.py @@ -0,0 +1,326 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import pytest +import conftest +from dlpx.virtualization.platform import migration_helper as m +from dlpx.virtualization.platform.exceptions import ( + MigrationIdAlreadyUsedError, MigrationIdIncorrectFormatError, + MigrationIdIncorrectTypeError) + + +class TestPlatformUpgradeMigrations: + @staticmethod + @pytest.fixture + def platform_migrations(): + yield m.PlatformUpgradeMigrations() + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + @pytest.mark.parametrize('migration_id,expected_std_id', + [('5.3.2.1', '5.3.2.1'), ('1000', '1000'), + ('50.0.0', '50'), ('50.0.0000.1', '50.0.0.1'), + ('2019.10.04', '2019.10.4')]) + def test_basic_add(platform_migrations, method_name, migration_id, + expected_std_id): + def function(): + pass + + # Add the migration id using the specific method passed in. + getattr(platform_migrations, method_name)(migration_id, function) + assert len(platform_migrations.get_sorted_ids()) == 1 + assert expected_std_id in platform_migrations.get_sorted_ids() + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + @pytest.mark.parametrize('id_one,id_two', + [('5.3.2.1', '5.3.2.1'), ('1000', '1000.0.0'), + ('50.0.0', '50'), + ('50.0.0000.1', '50.0.0.1.0000'), + ('2019.0010.0004', '2019.10.4')]) + def test_same_migration_id_used(platform_migrations, method_name, id_one, + id_two): + def function(): + pass + + def function_two(): + pass + + # Add the id into the correct dict/set first. + getattr(platform_migrations, method_name)(id_one, function) + + assert len(platform_migrations.get_sorted_ids()) == 1 + std_id = platform_migrations.get_sorted_ids()[0] + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info: + getattr(platform_migrations, method_name)(id_two, function_two) + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'function_two' has the" + " same canonical form '{}' as another migration.".format( + id_two, std_id)) + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + @pytest.mark.parametrize('migration_id', + [True, 1000, {'random set'}, ['random', 'list']]) + def test_migration_incorrect_type(platform_migrations, method_name, + migration_id): + def upgrade(): + pass + + with pytest.raises(MigrationIdIncorrectTypeError) as err_info: + getattr(platform_migrations, method_name)(migration_id, upgrade) + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' should" + " be a string.".format(migration_id)) + + @staticmethod + @pytest.mark.parametrize('method_name', [ + 'add_repository', 'add_source_config', 'add_linked_source', + 'add_virtual_source', 'add_snapshot' + ]) + @pytest.mark.parametrize('migration_id', + ['Not integers', '1000.', '2019 10 20']) + def test_migration_incorrect_format(platform_migrations, method_name, + migration_id): + def upgrade(): + pass + + with pytest.raises(MigrationIdIncorrectFormatError) as err_info: + getattr(platform_migrations, method_name)(migration_id, upgrade) + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' does not" + " follow the correct format '{}'.".format( + migration_id, + m.PlatformUpgradeMigrations.MIGRATION_ID_REGEX.pattern)) + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + @pytest.mark.parametrize('migration_id', ['0.0', '0', '0.000.000.00.0']) + def test_migration_id_is_zero(platform_migrations, method_name, + migration_id): + def upgrade(): + pass + + with pytest.raises(MigrationIdIncorrectFormatError) as err_info: + getattr(platform_migrations, method_name)(migration_id, upgrade) + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' cannot be" + " used because a 0 migration id is not allowed.".format( + migration_id)) + + @staticmethod + def test_get_sorted_ids(platform_migrations): + def function(): + pass + + platform_migrations.add_repository('2019.04.01', function) + platform_migrations.add_virtual_source('4.10.04', function) + platform_migrations.add_linked_source('20190.10.006', function) + platform_migrations.add_snapshot('1.2.3.4', function) + platform_migrations.add_source_config('5.4.3.2.1.0', function) + platform_migrations.add_linked_source('1', function) + platform_migrations.add_snapshot('10.01.10.00.1.0.0', function) + + assert platform_migrations.get_sorted_ids() == [ + '1', '1.2.3.4', '4.10.4', '5.4.3.2.1', '10.1.10.0.1', '2019.4.1', + '20190.10.6' + ] + + +class TestLuaUpgradeMigrations: + @staticmethod + @pytest.fixture + def lua_migrations(): + yield m.LuaUpgradeMigrations() + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + def test_basic_add(lua_migrations, object_op): + migration_id_list = ['0.0', '00.30', '1.00', '1.04', '2.5', '5.50'] + expected = ['0.0', '0.30', '1.0', '1.4', '2.5', '5.50'] + + def function(): + pass + + # Add all the migration ids using the specific method passed in. + for migration_id in migration_id_list: + getattr(lua_migrations, 'add_{}'.format(object_op))(migration_id, + function) + + impl_dict = getattr(lua_migrations, 'get_{}_dict'.format(object_op))() + assert len(impl_dict) == 6 + assert all(migration_id in impl_dict for migration_id in expected) + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + def test_same_migration_id_used(lua_migrations, object_op): + def function(): + pass + + def function_two(): + pass + + # Add the id into the correct dict/set first. + getattr(lua_migrations, 'add_{}'.format(object_op))('5.3', function) + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info: + getattr(lua_migrations, 'add_{}'.format(object_op))('5.3', + function_two) + + message = err_info.value.message + assert message == ( + "The lua major minor version '5.3' used in the function " + "'function_two' decorated by 'upgrade.{}()' has " + "already been used.".format(object_op)) + + @staticmethod + @pytest.mark.parametrize('method_name', [ + 'add_repository', 'add_source_config', 'add_linked_source', + 'add_virtual_source', 'add_snapshot' + ]) + @pytest.mark.parametrize('migration_id', + [True, 1000, {'random set'}, ['random', 'list']]) + def test_migration_incorrect_type(lua_migrations, method_name, + migration_id): + def upgrade(): + pass + + with pytest.raises(MigrationIdIncorrectTypeError) as err_info: + getattr(lua_migrations, method_name)(migration_id, upgrade) + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' should" + " be a string.".format(migration_id)) + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + @pytest.mark.parametrize( + 'migration_id', ['Not integers', '1000.', '2019 10 20', '5.4.testver']) + def test_migration_incorrect_format(lua_migrations, method_name, + migration_id): + def upgrade(): + pass + + with pytest.raises(MigrationIdIncorrectFormatError) as err_info: + getattr(lua_migrations, method_name)(migration_id, upgrade) + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' does not" + " follow the correct format '{}'.".format( + migration_id, + m.LuaUpgradeMigrations.LUA_VERSION_REGEX.pattern)) + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + def test_get_correct_impls(lua_migrations, method_name, get_impls_to_exec): + def f_one(): + pass + + def f_two(): + pass + + def f_three(): + pass + + def f_four(): + pass + + # Add the id/function in a random order + getattr(lua_migrations, method_name)('3.6', f_three) + getattr(lua_migrations, method_name)('1.02', f_one) + getattr(lua_migrations, method_name)('4.0', f_four) + getattr(lua_migrations, method_name)('2.01', f_two) + + ordered_impl_list = getattr(lua_migrations, get_impls_to_exec)('2.1') + + assert ordered_impl_list == [f_two, f_three, f_four] + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + def test_get_correct_impls_low_versions(lua_migrations, method_name, + get_impls_to_exec): + def f_one(): + pass + + def f_two(): + pass + + def f_three(): + pass + + def f_four(): + pass + + # Add the id/function in a random order + getattr(lua_migrations, method_name)('3.6', f_three) + getattr(lua_migrations, method_name)('1.02', f_one) + getattr(lua_migrations, method_name)('4.0', f_four) + getattr(lua_migrations, method_name)('2.01', f_two) + + ordered_impl_list = getattr(lua_migrations, get_impls_to_exec)('5.1') + + assert not ordered_impl_list + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + def test_get_correct_impls_high_versions(lua_migrations, method_name, + get_impls_to_exec): + def f_one(): + pass + + def f_two(): + pass + + def f_three(): + pass + + def f_four(): + pass + + # Add the id/function in a random order + getattr(lua_migrations, method_name)('3.6', f_three) + getattr(lua_migrations, method_name)('1.02', f_one) + getattr(lua_migrations, method_name)('4.0', f_four) + getattr(lua_migrations, method_name)('2.01', f_two) + + ordered_impl_list = getattr(lua_migrations, get_impls_to_exec)('0.0') + + assert ordered_impl_list == [f_one, f_two, f_three, f_four] + + @staticmethod + @pytest.mark.parametrize('object_op', conftest.OBJECT_TYPES) + def test_get_correct_impls_version_missing(lua_migrations, method_name, + get_impls_to_exec): + def f_one(): + pass + + def f_two(): + pass + + def f_three(): + pass + + def f_four(): + pass + + # Add the id/function in a random order + getattr(lua_migrations, method_name)('3.6', f_three) + getattr(lua_migrations, method_name)('1.02', f_one) + getattr(lua_migrations, method_name)('4.0', f_four) + getattr(lua_migrations, method_name)('2.01', f_two) + + ordered_impl_list = getattr(lua_migrations, get_impls_to_exec)('2.9') + + assert ordered_impl_list == [f_three, f_four] diff --git a/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py b/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py deleted file mode 100644 index 42db7f92..00000000 --- a/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py +++ /dev/null @@ -1,107 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -import pytest -from dlpx.virtualization.platform.exceptions import ( - MigrationIdAlreadyUsedError, MigrationIdIncorrectTypeError, - MigrationIdIncorrectFormatError) -from dlpx.virtualization.platform import migration_id_set as m - - -class TestMigrationIdSet: - @staticmethod - @pytest.fixture - def migration_set(): - yield m.MigrationIdSet() - - @staticmethod - @pytest.mark.parametrize('migration_id,expected_std_id', [ - ('5.3.2.1', '5.3.2.1'), - ('1000', '1000'), - ('50.0.0', '50'), - ('50.0.0000.1', '50.0.0.1'), - ('2019.10.04', '2019.10.4')]) - def test_basic_add(migration_set, migration_id, expected_std_id): - actual_std_id = migration_set.add(migration_id, 'function') - - assert actual_std_id == expected_std_id - - @staticmethod - @pytest.mark.parametrize('id_one,id_two', [ - ('5.3.2.1', '5.3.2.1'), - ('1000', '1000.0.0'), - ('50.0.0', '50'), - ('50.0.0000.1', '50.0.0.1.0000'), - ('2019.0010.0004', '2019.10.4')]) - def test_same_migration_id_used(migration_set, id_one, id_two): - std_id = migration_set.add(id_one, 'function') - - with pytest.raises(MigrationIdAlreadyUsedError) as err_info: - migration_set.add(id_two, 'function2') - - message = err_info.value.message - assert message == ( - "The migration id '{}' used in the function 'function2' has the" - " same canonical form '{}' as another migration.".format(id_two, - std_id)) - - @staticmethod - @pytest.mark.parametrize('migration_id', [True, - 1000, - {'random set'}, - ['random', 'list']]) - def test_migration_incorrect_type(migration_set, migration_id): - with pytest.raises(MigrationIdIncorrectTypeError) as err_info: - migration_set.add(migration_id, 'upgrade') - - message = err_info.value.message - assert message == ( - "The migration id '{}' used in the function 'upgrade' should" - " be a string.".format(migration_id)) - - @staticmethod - @pytest.mark.parametrize('migration_id', ['Not integers', - '1000.', - '2019 10 20']) - def test_migration_incorrect_format(migration_set, migration_id): - with pytest.raises(MigrationIdIncorrectFormatError) as err_info: - migration_set.add(migration_id, 'upgrade') - - message = err_info.value.message - assert message == ( - "The migration id '{}' used in the function 'upgrade' does not" - " follow the correct format '{}'.".format( - migration_id, m.MIGRATION_ID_REGEX.pattern)) - - @staticmethod - @pytest.mark.parametrize('migration_id', ['0.0', - '0', - '0.000.000.00.0']) - def test_migration_id_is_zero(migration_set, migration_id): - with pytest.raises(MigrationIdIncorrectFormatError) as err_info: - migration_set.add(migration_id, 'upgrade') - - message = err_info.value.message - assert message == ( - "The migration id '{}' used in the function 'upgrade' cannot be" - " used because a 0 migration id is not allowed.".format( - migration_id)) - - @staticmethod - def test_get_sorted_ids(migration_set): - migration_set.add('2019.04.01', 'one') - migration_set.add('4.10.04', 'two') - migration_set.add('20190.10.006', 'three') - migration_set.add('1.2.3.4', 'four') - migration_set.add('5.4.3.2.1.0', 'five') - migration_set.add('1', 'six') - migration_set.add('10.01.10.00.1.0.0', 'seven') - - assert migration_set.get_sorted_ids() == ['1', - '1.2.3.4', - '4.10.4', - '5.4.3.2.1', - '10.1.10.0.1', - '2019.4.1', - '20190.10.6'] diff --git a/platform/src/test/python/dlpx/virtualization/test_plugin.py b/platform/src/test/python/dlpx/virtualization/test_plugin.py index 90f97e7a..7a2060f8 100755 --- a/platform/src/test/python/dlpx/virtualization/test_plugin.py +++ b/platform/src/test/python/dlpx/virtualization/test_plugin.py @@ -3,24 +3,26 @@ # import json + import pytest -from dlpx.virtualization.api import (platform_pb2, common_pb2) -from dlpx.virtualization.common import (RemoteConnection, RemoteEnvironment, RemoteHost, RemoteUser) +from dlpx.virtualization.api import common_pb2, platform_pb2 +from dlpx.virtualization.common import (RemoteConnection, RemoteEnvironment, + RemoteHost, RemoteUser) from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, IncorrectUpgradeObjectTypeError, OperationAlreadyDefinedError, PluginRuntimeError) - + IncorrectReturnTypeError, IncorrectUpgradeObjectTypeError, + OperationAlreadyDefinedError, PluginRuntimeError) from mock import MagicMock, patch -import fake_generated_definitions -from fake_generated_definitions import SnapshotDefinition -from fake_generated_definitions import SourceConfigDefinition -from fake_generated_definitions import RepositoryDefinition +import fake_generated_definitions +from fake_generated_definitions import (RepositoryDefinition, + SnapshotDefinition, + SourceConfigDefinition) TEST_BINARY_PATH = '/binary/path' TEST_SCRATCH_PATH = '/scratch/path' TEST_MOUNT_PATH = '/mnt/path' TEST_SHARED_PATH = '/shared/path' -TEST_GUID='8e1442c2-64ce-48cf-848c-ce4deacca579' +TEST_GUID = '8e1442c2-64ce-48cf-848c-ce4deacca579' TEST_HOST_NAME = 'TestHost' TEST_HOST_REFERENCE = 'UNIX_HOST-1' TEST_ENVIRONMENT_NAME = 'TestEnvironment' @@ -36,7 +38,6 @@ TEST_STAGED_SOURCE = 'TestStagedSource' TEST_VIRTUAL_SOURCE = 'TestVirtualSource' - # This is a simple JSON object that has only "name" property defined. SIMPLE_JSON = '{{"name": "{0}"}}' @@ -48,15 +49,23 @@ TEST_VIRTUAL_SOURCE_JSON = SIMPLE_JSON.format(TEST_VIRTUAL_SOURCE) TEST_SNAPSHOT_PARAMS_JSON = '{"resync": false}' TEST_PRE_UPGRADE_PARAMS = {'obj': json.dumps({'name': 'upgrade'})} -TEST_POST_MIGRATION_METADATA_1 = ( - json.dumps({'obj': {'name': 'upgrade', 'prettyName': 'prettyUpgrade'}})) -TEST_POST_MIGRATION_METADATA_2 = ( - json.dumps({'obj': {'name': 'upgrade', 'prettyName': 'prettyUpgrade', - 'metadata': 'metadata'}})) -TEST_POST_UPGRADE_PARAMS = ( - {u'obj': '"{\\"obj\\": {\\"prettyName\\": \\"prettyUpgrade\\", ' - '\\"name\\": \\"upgrade\\", \\"metadata\\": \\"metadata\\"}}"'} -) +TEST_POST_MIGRATION_METADATA_1 = (json.dumps( + {'obj': { + 'name': 'upgrade', + 'prettyName': 'prettyUpgrade' + }})) +TEST_POST_MIGRATION_METADATA_2 = (json.dumps({ + 'obj': { + 'name': 'upgrade', + 'prettyName': 'prettyUpgrade', + 'metadata': 'metadata' + } +})) +TEST_POST_UPGRADE_PARAMS = ({ + u'obj': + '"{\\"obj\\": {\\"prettyName\\": \\"prettyUpgrade\\", ' + '\\"name\\": \\"upgrade\\", \\"metadata\\": \\"metadata\\"}}"' +}) MIGRATION_IDS = ('2020.1.1', '2020.2.2') @@ -77,12 +86,12 @@ def my_plugin(): @staticmethod def test_disallow_multiple_decorator_invocations(my_plugin): - @my_plugin.virtual.configure() def configure_impl(): pass with pytest.raises(OperationAlreadyDefinedError): + @my_plugin.virtual.configure() def configure_impl(): pass @@ -116,17 +125,14 @@ def attribute_map(self): return self._attribute_map @staticmethod - @pytest.fixture( - autouse=True, - params=[ - NotModel1(), - NotModel2(), - NotModel3(), - NotModel4(), - NotModel5(), - 'string', - 1 - ]) + @pytest.fixture(autouse=True, + params=[ + NotModel1(), + NotModel2(), + NotModel3(), + NotModel4(), + NotModel5(), 'string', 1 + ]) def not_model(request): return request.param @@ -417,9 +423,8 @@ def assert_plugin_args(**kwargs): kwargs['snapshot_parameters']) @staticmethod - def test_virtual_configure( - my_plugin, virtual_source, repository, snapshot): - + def test_virtual_configure(my_plugin, virtual_source, repository, + snapshot): @my_plugin.virtual.configure() def virtual_configure_impl(virtual_source, repository, snapshot): TestPlugin.assert_plugin_args(virtual_source=virtual_source, @@ -441,9 +446,8 @@ def virtual_configure_impl(virtual_source, repository, snapshot): assert config.parameters.json == expected_source_config @staticmethod - def test_virtual_configure_return_incorrect_type( - my_plugin, virtual_source, repository, snapshot): - + def test_virtual_configure_return_incorrect_type(my_plugin, virtual_source, + repository, snapshot): @my_plugin.virtual.configure() def virtual_configure_impl(virtual_source, repository, snapshot): TestPlugin.assert_plugin_args(virtual_source=virtual_source, @@ -460,8 +464,7 @@ def virtual_configure_impl(virtual_source, repository, snapshot): snapshot=snapshot) with pytest.raises(IncorrectReturnTypeError) as err_info: - my_plugin.virtual._internal_configure( - configure_request) + my_plugin.virtual._internal_configure(configure_request) message = err_info.value.message assert message == ( @@ -470,12 +473,11 @@ def virtual_configure_impl(virtual_source, repository, snapshot): "fake_generated_definitions.SourceConfigDefinition'.") @staticmethod - def test_virtual_unconfigure( - my_plugin, virtual_source, repository, source_config): - + def test_virtual_unconfigure(my_plugin, virtual_source, repository, + source_config): @my_plugin.virtual.unconfigure() - def virtual_unconfigure_impl( - virtual_source, repository, source_config): + def virtual_unconfigure_impl(virtual_source, repository, + source_config): TestPlugin.assert_plugin_args(virtual_source=virtual_source, repository=repository, source_config=source_config) @@ -497,12 +499,11 @@ def virtual_unconfigure_impl( assert unconfigure_response.return_value == expected_result @staticmethod - def test_virtual_reconfigure( - my_plugin, virtual_source, repository, source_config, snapshot): - + def test_virtual_reconfigure(my_plugin, virtual_source, repository, + source_config, snapshot): @my_plugin.virtual.reconfigure() - def virtual_reconfigure_impl( - virtual_source, repository, source_config, snapshot): + def virtual_reconfigure_impl(virtual_source, repository, source_config, + snapshot): TestPlugin.assert_plugin_args(virtual_source=virtual_source, source_config=source_config, repository=repository, @@ -525,12 +526,14 @@ def virtual_reconfigure_impl( assert config.parameters.json == expected_source_config @staticmethod - def test_virtual_reconfigure_return_incorrect_type( - my_plugin, virtual_source, repository, source_config, snapshot): - + def test_virtual_reconfigure_return_incorrect_type(my_plugin, + virtual_source, + repository, + source_config, + snapshot): @my_plugin.virtual.reconfigure() - def virtual_reconfigure_impl( - virtual_source, repository, source_config, snapshot): + def virtual_reconfigure_impl(virtual_source, repository, source_config, + snapshot): TestPlugin.assert_plugin_args(virtual_source=virtual_source, source_config=source_config, repository=repository, @@ -547,8 +550,7 @@ def virtual_reconfigure_impl( snapshot=snapshot) with pytest.raises(IncorrectReturnTypeError) as err_info: - my_plugin.virtual._internal_reconfigure( - reconfigure_request) + my_plugin.virtual._internal_reconfigure(reconfigure_request) message = err_info.value.message assert message == ( @@ -557,9 +559,8 @@ def virtual_reconfigure_impl( "fake_generated_definitions.SourceConfigDefinition'.") @staticmethod - def test_virtual_start( - my_plugin, virtual_source, repository, source_config): - + def test_virtual_start(my_plugin, virtual_source, repository, + source_config): @my_plugin.virtual.start() def virtual_start_impl(virtual_source, repository, source_config): TestPlugin.assert_plugin_args(virtual_source=virtual_source, @@ -581,9 +582,8 @@ def virtual_start_impl(virtual_source, repository, source_config): assert start_response.return_value == expected_result @staticmethod - def test_virtual_stop( - my_plugin, virtual_source, repository, source_config): - + def test_virtual_stop(my_plugin, virtual_source, repository, + source_config): @my_plugin.virtual.stop() def start_impl(virtual_source, repository, source_config): TestPlugin.assert_plugin_args(virtual_source=virtual_source, @@ -605,11 +605,11 @@ def start_impl(virtual_source, repository, source_config): assert stop_response.return_value == expected_result @staticmethod - def test_virtual_pre_snapshot( - my_plugin, virtual_source, repository, source_config): - + def test_virtual_pre_snapshot(my_plugin, virtual_source, repository, + source_config): @my_plugin.virtual.pre_snapshot() - def virtual_pre_snapshot_impl(virtual_source, repository, source_config): + def virtual_pre_snapshot_impl(virtual_source, repository, + source_config): TestPlugin.assert_plugin_args(virtual_source=virtual_source, repository=repository, source_config=source_config) @@ -632,12 +632,11 @@ def virtual_pre_snapshot_impl(virtual_source, repository, source_config): assert virtual_pre_snapshot_response.return_value == expected_result @staticmethod - def test_virtual_post_snapshot( - my_plugin, virtual_source, repository, source_config): - + def test_virtual_post_snapshot(my_plugin, virtual_source, repository, + source_config): @my_plugin.virtual.post_snapshot() - def virtual_post_snapshot_impl( - virtual_source, repository, source_config): + def virtual_post_snapshot_impl(virtual_source, repository, + source_config): TestPlugin.assert_plugin_args(virtual_source=virtual_source, repository=repository, source_config=source_config) @@ -655,12 +654,12 @@ def virtual_post_snapshot_impl( virtual_post_snapshot_request)) expected_snapshot = TEST_SNAPSHOT_JSON - assert (virtual_post_snapshot_response - .return_value.snapshot.parameters.json == expected_snapshot) + assert (virtual_post_snapshot_response.return_value.snapshot. + parameters.json == expected_snapshot) @staticmethod - def test_virtual_status( - my_plugin, virtual_source, repository, source_config): + def test_virtual_status(my_plugin, virtual_source, repository, + source_config): from dlpx.virtualization.platform import Status @@ -684,9 +683,8 @@ def virtual_status_impl(virtual_source, repository, source_config): assert virtual_status_response.return_value.status == expected_status @staticmethod - def test_virtual_initialize( - my_plugin, virtual_source, repository, source_config): - + def test_virtual_initialize(my_plugin, virtual_source, repository, + source_config): @my_plugin.virtual.initialize() def virtual_initialize_impl(virtual_source, repository, source_config): TestPlugin.assert_plugin_args(virtual_source=virtual_source, @@ -711,24 +709,22 @@ def virtual_initialize_impl(virtual_source, repository, source_config): @staticmethod def test_virtual_mount_spec(my_plugin, virtual_source, repository): - from dlpx.virtualization.platform import ( - Mount, MountSpecification, OwnershipSpecification) + from dlpx.virtualization.platform import (Mount, MountSpecification, + OwnershipSpecification) @my_plugin.virtual.mount_specification() def virtual_mount_spec_impl(virtual_source, repository): TestPlugin.assert_plugin_args(virtual_source=virtual_source, - repository=repository) + repository=repository) primary_mount = Mount(virtual_source.connection.environment, - TEST_MOUNT_PATH, - TEST_SHARED_PATH) + TEST_MOUNT_PATH, TEST_SHARED_PATH) another_mount = Mount(virtual_source.connection.environment, - TEST_MOUNT_PATH, - TEST_SHARED_PATH) + TEST_MOUNT_PATH, TEST_SHARED_PATH) ownership_spec = OwnershipSpecification(TEST_UID, TEST_GID) - return MountSpecification( - [primary_mount, another_mount], ownership_spec) + return MountSpecification([primary_mount, another_mount], + ownership_spec) virtual_mount_spec_request = platform_pb2.VirtualMountSpecRequest() TestPlugin.setup_request(request=virtual_mount_spec_request, @@ -750,12 +746,13 @@ def virtual_mount_spec_impl(virtual_source, repository): @staticmethod def test_repository_discovery(my_plugin, connection): - @my_plugin.discovery.repository() def repository_discovery_impl(source_connection): TestPlugin.assert_connection(source_connection) - return [RepositoryDefinition(TEST_REPOSITORY), - RepositoryDefinition(TEST_REPOSITORY)] + return [ + RepositoryDefinition(TEST_REPOSITORY), + RepositoryDefinition(TEST_REPOSITORY) + ] repository_discovery_request = ( platform_pb2.RepositoryDiscoveryRequest()) @@ -768,10 +765,8 @@ def repository_discovery_impl(source_connection): for repository in repositories: assert repository.parameters.json == TEST_REPOSITORY_JSON - @staticmethod def test_repository_discovery_bad_return_type(my_plugin, connection): - @my_plugin.discovery.repository() def repository_discovery_impl(source_connection): TestPlugin.assert_connection(source_connection) @@ -793,16 +788,16 @@ def repository_discovery_impl(source_connection): " be of type 'list of dlpx.virtualization" ".fake_generated_definitions.RepositoryDefinition'.") - @staticmethod def test_source_config_discovery(my_plugin, connection, repository): - @my_plugin.discovery.source_config() def source_config_discovery_impl(source_connection, repository): TestPlugin.assert_connection(source_connection) TestPlugin.assert_repository(repository) - return [SourceConfigDefinition(TEST_REPOSITORY), - SourceConfigDefinition(TEST_REPOSITORY)] + return [ + SourceConfigDefinition(TEST_REPOSITORY), + SourceConfigDefinition(TEST_REPOSITORY) + ] source_config_discovery_request = ( platform_pb2.SourceConfigDiscoveryRequest()) @@ -818,14 +813,13 @@ def source_config_discovery_impl(source_connection, repository): assert source_config.parameters.json == TEST_REPOSITORY_JSON @staticmethod - def test_direct_pre_snapshot( - my_plugin, direct_source, repository, source_config): - + def test_direct_pre_snapshot(my_plugin, direct_source, repository, + source_config): @my_plugin.linked.pre_snapshot() def mock_direct_pre_snapshot(direct_source, repository, source_config): TestPlugin.assert_plugin_args(direct_source=direct_source, - repository=repository, - source_config=source_config) + repository=repository, + source_config=source_config) return direct_pre_snapshot_request = platform_pb2.DirectPreSnapshotRequest() @@ -845,12 +839,11 @@ def mock_direct_pre_snapshot(direct_source, repository, source_config): assert direct_pre_snapshot_response.return_value == expected_result @staticmethod - def test_direct_post_snapshot( - my_plugin, direct_source, repository, source_config): - + def test_direct_post_snapshot(my_plugin, direct_source, repository, + source_config): @my_plugin.linked.post_snapshot() - def direct_post_snapshot_impl( - direct_source, repository, source_config): + def direct_post_snapshot_impl(direct_source, repository, + source_config): TestPlugin.assert_plugin_args(direct_source=direct_source, repository=repository, source_config=source_config) @@ -870,16 +863,11 @@ def direct_post_snapshot_impl( assert snapshot.parameters.json == expected_snapshot @staticmethod - def test_staged_pre_snapshot( - my_plugin, - staged_source, - repository, - source_config, - snapshot_parameters): - + def test_staged_pre_snapshot(my_plugin, staged_source, repository, + source_config, snapshot_parameters): @my_plugin.linked.pre_snapshot() - def staged_pre_snapshot_impl( - staged_source, repository, source_config, snapshot_parameters): + def staged_pre_snapshot_impl(staged_source, repository, source_config, + snapshot_parameters): TestPlugin.assert_plugin_args( staged_source=staged_source, repository=repository, @@ -900,19 +888,14 @@ def staged_pre_snapshot_impl( # Check that the response's oneof is set to return_value and not error assert response.WhichOneof('result') == 'return_value' - assert(response.return_value == expected_result) + assert (response.return_value == expected_result) @staticmethod - def test_staged_post_snapshot( - my_plugin, - staged_source, - repository, - source_config, - snapshot_parameters): - + def test_staged_post_snapshot(my_plugin, staged_source, repository, + source_config, snapshot_parameters): @my_plugin.linked.post_snapshot() - def staged_post_snapshot_impl( - staged_source, repository, source_config, snapshot_parameters): + def staged_post_snapshot_impl(staged_source, repository, source_config, + snapshot_parameters): TestPlugin.assert_plugin_args( staged_source=staged_source, repository=repository, @@ -934,9 +917,8 @@ def staged_post_snapshot_impl( assert response.return_value.snapshot.parameters.json == expected @staticmethod - def test_start_staging( - my_plugin, staged_source, repository, source_config): - + def test_start_staging(my_plugin, staged_source, repository, + source_config): @my_plugin.linked.start_staging() def start_staging_impl(staged_source, repository, source_config): TestPlugin.assert_plugin_args(staged_source=staged_source, @@ -960,7 +942,6 @@ def start_staging_impl(staged_source, repository, source_config): @staticmethod def test_stop_staging(my_plugin, staged_source, repository, source_config): - @my_plugin.linked.stop_staging() def stop_staging_impl(staged_source, repository, source_config): TestPlugin.assert_plugin_args(staged_source=staged_source, @@ -983,8 +964,8 @@ def stop_staging_impl(staged_source, repository, source_config): assert stop_staging_response.return_value == expected_result @staticmethod - def test_staged_status( - my_plugin, staged_source, repository, source_config): + def test_staged_status(my_plugin, staged_source, repository, + source_config): from dlpx.virtualization.platform import Status @@ -1008,9 +989,8 @@ def staged_status_impl(staged_source, repository, source_config): assert staged_status_response.return_value.status == expected_status @staticmethod - def test_staged_worker( - my_plugin, staged_source, repository, source_config): - + def test_staged_worker(my_plugin, staged_source, repository, + source_config): @my_plugin.linked.worker() def staged_worker_impl(staged_source, repository, source_config): TestPlugin.assert_plugin_args(staged_source=staged_source, @@ -1035,24 +1015,24 @@ def staged_worker_impl(staged_source, repository, source_config): @staticmethod def test_staged_mount_spec(my_plugin, staged_source, repository): - from dlpx.virtualization.platform import ( - Mount, MountSpecification, OwnershipSpecification) + from dlpx.virtualization.platform import (Mount, MountSpecification, + OwnershipSpecification) @my_plugin.linked.mount_specification() def staged_mount_spec_impl(staged_source, repository): TestPlugin.assert_plugin_args(staged_source=staged_source, - repository=repository) + repository=repository) - mount = Mount( - staged_source.source_connection.environment, TEST_MOUNT_PATH) + mount = Mount(staged_source.source_connection.environment, + TEST_MOUNT_PATH) ownership_spec = OwnershipSpecification(TEST_UID, TEST_GID) return MountSpecification([mount], ownership_spec) staged_mount_spec_request = platform_pb2.StagedMountSpecRequest() TestPlugin.setup_request(request=staged_mount_spec_request, - staged_source=staged_source, - repository=repository) + staged_source=staged_source, + repository=repository) staged_mount_spec_response = ( my_plugin.linked._internal_mount_specification( @@ -1071,18 +1051,16 @@ def staged_mount_spec_impl(staged_source, repository): @staticmethod def test_staged_mount_spec_fail(my_plugin, staged_source, repository): - from dlpx.virtualization.platform import ( - Mount, MountSpecification, OwnershipSpecification) + from dlpx.virtualization.platform import (Mount, MountSpecification, + OwnershipSpecification) @my_plugin.linked.mount_specification() def staged_mount_spec_impl(staged_source, repository): TestPlugin.assert_plugin_args(staged_source=staged_source, - repository=repository) + repository=repository) # setting the shared_path should fail in the wrapper - mount = Mount( - staged_source.source_connection.environment, - TEST_MOUNT_PATH, - TEST_SHARED_PATH) + mount = Mount(staged_source.source_connection.environment, + TEST_MOUNT_PATH, TEST_SHARED_PATH) ownership_spec = OwnershipSpecification(TEST_UID, TEST_GID) return MountSpecification([mount], ownership_spec) @@ -1100,7 +1078,6 @@ def staged_mount_spec_impl(staged_source, repository): @staticmethod def test_upgrade_repository_success(my_plugin): - @my_plugin.upgrade.repository('2020.1.1') def upgrade_repository(old_repository): return TEST_POST_MIGRATION_METADATA_1 @@ -1125,7 +1102,6 @@ def upgrade_repository(old_repository): @staticmethod def test_upgrade_source_config_success(my_plugin): - @my_plugin.upgrade.source_config('2020.1.1') def upgrade_source_config(old_source_config): return TEST_POST_MIGRATION_METADATA_1 @@ -1150,7 +1126,6 @@ def upgrade_source_config(old_source_config): @staticmethod def test_upgrade_linked_source_success(my_plugin): - @my_plugin.upgrade.linked_source('2020.1.1') def upgrade_linked_source(old_linked_source): return TEST_POST_MIGRATION_METADATA_1 @@ -1175,7 +1150,6 @@ def upgrade_linked_source(old_linked_source): @staticmethod def test_upgrade_virtual_source_success(my_plugin): - @my_plugin.upgrade.virtual_source('2020.1.1') def upgrade_virtual_source(old_virtual_source): return TEST_POST_MIGRATION_METADATA_1 @@ -1200,7 +1174,6 @@ def upgrade_virtual_source(old_virtual_source): @staticmethod def test_upgrade_snapshot_success(my_plugin): - @my_plugin.upgrade.snapshot('2020.1.1') def upgrade_snapshot(old_snapshot): return TEST_POST_MIGRATION_METADATA_1 @@ -1285,7 +1258,6 @@ def test_upgrade_snapshot_incorrect_upgrade_object_type(my_plugin): @staticmethod def test_upgrade_snapshot_fail_with_runtime_error(my_plugin): - @my_plugin.upgrade.snapshot('2020.1.1') def upgrade_snapshot(old_snapshot): raise RuntimeError('RuntimeError in snapshot migration') diff --git a/platform/src/test/python/dlpx/virtualization/test_plugin_classes.py b/platform/src/test/python/dlpx/virtualization/test_plugin_classes.py index 4c36927e..c1e5215a 100644 --- a/platform/src/test/python/dlpx/virtualization/test_plugin_classes.py +++ b/platform/src/test/python/dlpx/virtualization/test_plugin_classes.py @@ -3,11 +3,11 @@ # import pytest -from dlpx.virtualization.common._common_classes import RemoteEnvironment, RemoteHost +from dlpx.virtualization.common._common_classes import (RemoteEnvironment, + RemoteHost) from dlpx.virtualization.common.exceptions import IncorrectTypeError -from dlpx.virtualization.platform import Mount -from dlpx.virtualization.platform import OwnershipSpecification -from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import (Mount, MountSpecification, + OwnershipSpecification) from dlpx.virtualization.platform.exceptions import \ IncorrectReferenceFormatError @@ -19,8 +19,7 @@ def remote_host(): @pytest.fixture def remote_environment(remote_host): - return RemoteEnvironment("environment", - "environment-reference", + return RemoteEnvironment("environment", "environment-reference", remote_host) @@ -34,8 +33,8 @@ def test_init_mount_bad_remote_env(): with pytest.raises(IncorrectReferenceFormatError) as err_info: Mount('bad string', 'mount_path', 'shared_path') assert err_info.value.message == ( - "Reference 'bad string' is not a correctly formatted host environment " - "reference.") + "Reference 'bad string' is not a correctly formatted host " + "environment reference.") @staticmethod def test_init_mount_bad_mount_path(remote_environment): @@ -78,22 +77,31 @@ def test_init_mount_spec(remote_environment): mount = Mount(remote_environment, 'mount_path', 'shared_path') MountSpecification([mount], OwnershipSpecification(10, 10)) - # Test for passing in a reference string instead of a remote_environment object, - # which a plugin author would want to do when creating an additional mount + # + # Test for passing in a reference string instead of a remote_environment + # object, which a plugin author would want to do when creating an + # additional mount. + # @staticmethod - @pytest.mark.parametrize("reference_string", ['UNIX_HOST_ENVIRONMENT-10', 'WINDOWS_HOST_ENVIRONMENT-24']) + @pytest.mark.parametrize( + "reference_string", + ['UNIX_HOST_ENVIRONMENT-10', 'WINDOWS_HOST_ENVIRONMENT-24']) def test_init_mount_reference_string_success(reference_string): mount = Mount(reference_string, 'mount_path', 'shared_path') - assert mount.remote_environment.reference == reference_string and mount.remote_environment.host.reference == 'dummy reference' + assert (mount.remote_environment.reference == reference_string and + mount.remote_environment.host.reference == 'dummy reference') @staticmethod - @pytest.mark.parametrize("reference_string", ['UNIX_HOST-ENVIRONMENT-15', 'UNIX-10', 'USER-9', 'ALERT-17', 'HOST-24', 'random string']) + @pytest.mark.parametrize("reference_string", [ + 'UNIX_HOST-ENVIRONMENT-15', 'UNIX-10', 'USER-9', 'ALERT-17', 'HOST-24', + 'random string' + ]) def test_init_mount_incorrect_format_reference_string(reference_string): with pytest.raises(IncorrectReferenceFormatError) as err_info: Mount(reference_string, 'mount_path', 'shared_path') assert err_info.value.message == ( - "Reference '{}' is not a correctly formatted host environment reference.".format(reference_string) - ) + "Reference '{}' is not a correctly formatted host" + " environment reference.".format(reference_string)) @staticmethod @pytest.mark.parametrize("reference", [False, None, 1010]) @@ -102,8 +110,9 @@ def test_init_mount_invalid_reference_type(reference): Mount(reference, 'mount_path', 'shared_path') assert err_info.value.message == ( "Mount's parameter 'remote_environment' was type '{}' but " - "should be of any one of the following types: '['dlpx.virtualization.common._common_classes.RemoteEnvironment', 'basestring']'.".format(type(reference).__name__) - ) + "should be of any one of the following types: " + "'['dlpx.virtualization.common._common_classes.RemoteEnvironment'," + " 'basestring']'.".format(type(reference).__name__)) @staticmethod def test_init_mount_spec_mounts_not_list(): diff --git a/platform/src/test/python/dlpx/virtualization/test_upgrade.py b/platform/src/test/python/dlpx/virtualization/test_upgrade.py index 1ede1e5e..f5801e28 100755 --- a/platform/src/test/python/dlpx/virtualization/test_upgrade.py +++ b/platform/src/test/python/dlpx/virtualization/test_upgrade.py @@ -2,9 +2,13 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -import pytest +import copy +import json import logging + +import pytest from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.platform import MigrationType from dlpx.virtualization.platform.exceptions import ( DecoratorNotFunctionError, MigrationIdAlreadyUsedError) from dlpx.virtualization.platform.operation import Operation as Op @@ -13,12 +17,38 @@ class TestUpgrade: @staticmethod @pytest.fixture - def my_plugin(): + def my_upgrade(): from dlpx.virtualization.platform import Plugin - yield Plugin() + yield Plugin().upgrade + + @staticmethod + @pytest.fixture + def caplog(caplog): + caplog.set_level(logging.DEBUG) + return caplog + + @staticmethod + @pytest.fixture + def upgrade_request(fake_map_param, upgrade_type, lua_version, + migration_ids): + return platform_pb2.UpgradeRequest( + pre_upgrade_parameters=fake_map_param, + type=upgrade_type, + lua_upgrade_version=lua_version, + migration_ids=migration_ids) @staticmethod - def basic_upgrade_helper(decorator, id_to_impl, upgrade_operation): + @pytest.fixture + def lua_version(): + return None + + @staticmethod + @pytest.fixture + def migration_ids(): + return [] + + @staticmethod + def basic_upgrade_helper(decorator, dict_getter, my_upgrade): @decorator('2019.10.01') def repo_upgrade_one(input_dict): output_dict = {'in': input_dict['in'], 'out': 'first'} @@ -29,152 +59,152 @@ def repo_upgrade_two(input_dict): output_dict = {'in': input_dict['in'], 'out': 'second'} return output_dict + id_to_impl = dict_getter() migration_one = id_to_impl['2019.10.1'] migration_two = id_to_impl['2019.10.2'] assert migration_one == repo_upgrade_one assert migration_two == repo_upgrade_two - assert migration_one({'in':'in_one'}) == {'in': 'in_one', - 'out': 'first'} - assert migration_two({'in':'in_two'}) == {'in': 'in_two', - 'out': 'second'} + assert migration_one({'in': 'in_one'}) == { + 'in': 'in_one', + 'out': 'first' + } + assert migration_two({'in': 'in_two'}) == { + 'in': 'in_two', + 'out': 'second' + } - assert upgrade_operation.migration_id_list == ['2019.10.1', - '2019.10.2'] + assert my_upgrade.migration_id_list == ['2019.10.1', '2019.10.2'] @staticmethod def decorator_not_function_helper(decorator, op): with pytest.raises(DecoratorNotFunctionError) as err_info: + @decorator('2019.10.03') class RandomClass(object): pass message = err_info.value.message assert message == ("The object '{}' decorated by '{}' is" - " not a function.".format('RandomClass', - op.value)) + " not a function.".format('RandomClass', op.value)) @staticmethod - def test_upgrade_repository(my_plugin): + def test_upgrade_repository(my_upgrade): TestUpgrade.basic_upgrade_helper( - my_plugin.upgrade.repository, - my_plugin.upgrade.repository_id_to_impl, - my_plugin.upgrade) + my_upgrade.repository, + my_upgrade.platform_migrations.get_repository_dict, my_upgrade) - TestUpgrade.decorator_not_function_helper( - my_plugin.upgrade.repository, Op.UPGRADE_REPOSITORY) + TestUpgrade.decorator_not_function_helper(my_upgrade.repository, + Op.UPGRADE_REPOSITORY) @staticmethod - def test_upgrade_source_config(my_plugin): + def test_upgrade_source_config(my_upgrade): TestUpgrade.basic_upgrade_helper( - my_plugin.upgrade.source_config, - my_plugin.upgrade.source_config_id_to_impl, - my_plugin.upgrade) + my_upgrade.source_config, + my_upgrade.platform_migrations.get_source_config_dict, my_upgrade) - TestUpgrade.decorator_not_function_helper( - my_plugin.upgrade.source_config, Op.UPGRADE_SOURCE_CONFIG) + TestUpgrade.decorator_not_function_helper(my_upgrade.source_config, + Op.UPGRADE_SOURCE_CONFIG) @staticmethod - def test_upgrade_linked_source(my_plugin): + def test_upgrade_linked_source(my_upgrade): TestUpgrade.basic_upgrade_helper( - my_plugin.upgrade.linked_source, - my_plugin.upgrade.linked_source_id_to_impl, - my_plugin.upgrade) + my_upgrade.linked_source, + my_upgrade.platform_migrations.get_linked_source_dict, my_upgrade) - TestUpgrade.decorator_not_function_helper( - my_plugin.upgrade.linked_source, Op.UPGRADE_LINKED_SOURCE) + TestUpgrade.decorator_not_function_helper(my_upgrade.linked_source, + Op.UPGRADE_LINKED_SOURCE) @staticmethod - def test_upgrade_virtual_source(my_plugin): + def test_upgrade_virtual_source(my_upgrade): TestUpgrade.basic_upgrade_helper( - my_plugin.upgrade.virtual_source, - my_plugin.upgrade.virtual_source_id_to_impl, - my_plugin.upgrade) + my_upgrade.virtual_source, + my_upgrade.platform_migrations.get_virtual_source_dict, my_upgrade) - TestUpgrade.decorator_not_function_helper( - my_plugin.upgrade.virtual_source, Op.UPGRADE_VIRTUAL_SOURCE) + TestUpgrade.decorator_not_function_helper(my_upgrade.virtual_source, + Op.UPGRADE_VIRTUAL_SOURCE) @staticmethod - def test_upgrade_snapshot(my_plugin): + def test_upgrade_snapshot(my_upgrade): TestUpgrade.basic_upgrade_helper( - my_plugin.upgrade.snapshot, - my_plugin.upgrade.snapshot_id_to_impl, - my_plugin.upgrade) + my_upgrade.snapshot, + my_upgrade.platform_migrations.get_snapshot_dict, my_upgrade) - TestUpgrade.decorator_not_function_helper( - my_plugin.upgrade.snapshot, Op.UPGRADE_SNAPSHOT) + TestUpgrade.decorator_not_function_helper(my_upgrade.snapshot, + Op.UPGRADE_SNAPSHOT) @staticmethod - def test_upgrade_same_migration_id_used(my_plugin): - @my_plugin.upgrade.repository('2019.10.01') + def test_upgrade_same_migration_id_used(my_upgrade): + @my_upgrade.repository('2019.10.01') def repo_upgrade_one(): return 'repo_one' - @my_plugin.upgrade.repository('2019.10.04') + @my_upgrade.repository('2019.10.04') def repo_upgrade_two(): return 'repo_two' - @my_plugin.upgrade.repository('2019.10.006') + @my_upgrade.repository('2019.10.006') def repo_upgrade_three(): return 'repo_three' - @my_plugin.upgrade.source_config('2019.10.02') + @my_upgrade.source_config('2019.10.02') def sc_upgrade_one(): return 'sc_one' with pytest.raises(MigrationIdAlreadyUsedError) as err_info_one: - @my_plugin.upgrade.source_config('2019.10.0004') + + @my_upgrade.source_config('2019.10.0004') def sc_upgrade_two(): return 'sc_two' - @my_plugin.upgrade.linked_source('2019.10.3.000.0') + @my_upgrade.linked_source('2019.10.3.000.0') def ls_upgrade_one(): return 'ls_one' with pytest.raises(MigrationIdAlreadyUsedError) as err_info_two: - @my_plugin.upgrade.virtual_source('2019.10.03') + + @my_upgrade.virtual_source('2019.10.03') def vs_upgrade_one(): return 'vs_one' - @my_plugin.upgrade.virtual_source('2019.10.05') + @my_upgrade.virtual_source('2019.10.05') def vs_upgrade_two(): return 'vs_two' with pytest.raises(MigrationIdAlreadyUsedError) as err_info_three: - @my_plugin.upgrade.snapshot('2019.010.001') + + @my_upgrade.snapshot('2019.010.001') def snap_upgrade_one(): return 'snap_one' - @my_plugin.upgrade.snapshot('2019.10.12') + @my_upgrade.snapshot('2019.10.12') def snap_upgrade_two(): return 'snap_two' - assert my_plugin.upgrade.migration_id_list == ['2019.10.1', - '2019.10.2', - '2019.10.3', - '2019.10.4', - '2019.10.5', - '2019.10.6', - '2019.10.12'] - - repo_one = my_plugin.upgrade.repository_id_to_impl['2019.10.1'] - repo_two = my_plugin.upgrade.repository_id_to_impl['2019.10.4'] - repo_three = my_plugin.upgrade.repository_id_to_impl['2019.10.6'] + assert my_upgrade.migration_id_list == [ + '2019.10.1', '2019.10.2', '2019.10.3', '2019.10.4', '2019.10.5', + '2019.10.6', '2019.10.12' + ] + + platform_migrations = my_upgrade.platform_migrations + repo_one = platform_migrations.get_repository_dict()['2019.10.1'] + repo_two = platform_migrations.get_repository_dict()['2019.10.4'] + repo_three = platform_migrations.get_repository_dict()['2019.10.6'] assert repo_one == repo_upgrade_one assert repo_two == repo_upgrade_two assert repo_three == repo_upgrade_three - sc_one = my_plugin.upgrade.source_config_id_to_impl['2019.10.2'] + sc_one = platform_migrations.get_source_config_dict()['2019.10.2'] assert sc_one == sc_upgrade_one - ls_one = my_plugin.upgrade.linked_source_id_to_impl['2019.10.3'] + ls_one = platform_migrations.get_linked_source_dict()['2019.10.3'] assert ls_one == ls_upgrade_one - vs_two = my_plugin.upgrade.virtual_source_id_to_impl['2019.10.5'] + vs_two = platform_migrations.get_virtual_source_dict()['2019.10.5'] assert vs_two == vs_upgrade_two - snap_two = my_plugin.upgrade.snapshot_id_to_impl['2019.10.12'] + snap_two = platform_migrations.get_snapshot_dict()['2019.10.12'] assert snap_two == snap_upgrade_two assert err_info_one.value.message == ( @@ -193,115 +223,118 @@ def snap_upgrade_two(): " as another migration.") @staticmethod - @pytest.fixture - def caplog(caplog): - caplog.set_level(logging.DEBUG) - return caplog - - @staticmethod - @pytest.fixture - def upgrade_request(fake_map_param, upgrade_type): - return platform_pb2.UpgradeRequest( - pre_upgrade_parameters=fake_map_param, - type=upgrade_type, - migration_ids=[] - ) + @pytest.mark.parametrize('fake_map_param,upgrade_type,object_op', [ + ({ + 'APPDATA_REPOSITORY-1': '{"name": "repo", "migrations": []}' + }, platform_pb2.UpgradeRequest.REPOSITORY, 'repository'), + ({ + 'APPDATA_SOURCE_CONFIG-1': '{"name": "sc", "migrations": []}' + }, platform_pb2.UpgradeRequest.SOURCECONFIG, 'source_config'), + ({ + 'APPDATA_STAGED_SOURCE-1': '{"name": "ls", "migrations": []}' + }, platform_pb2.UpgradeRequest.LINKEDSOURCE, 'linked_source'), + ({ + 'APPDATA_VIRTUAL_SOURCE-1': '{"name": "vs", "migrations": []}' + }, platform_pb2.UpgradeRequest.VIRTUALSOURCE, 'virtual_source'), + ({ + 'APPDATA_SNAPSHOT-1': '{"name": "snap", "migrations": []}' + }, platform_pb2.UpgradeRequest.SNAPSHOT, 'snapshot'), + ]) + @pytest.mark.parametrize('lua_version,migration_ids', [( + '1.2', + ['2020.4.2', '2020.4.4'], + )]) + def test_lua_upgrade(my_upgrade, upgrade_request, object_op, + get_impls_to_exec): + upgrade_type_decorator = getattr(my_upgrade, object_op) + + @upgrade_type_decorator('1.1', MigrationType.LUA) + def repo_upgrade_one(input_dict): + output_dict = copy.deepcopy(input_dict) + output_dict['migrations'].append('lua repo 1.1') + return output_dict - @staticmethod - @pytest.mark.parametrize('fake_map_param,upgrade_type', - [({ - 'APPDATA_REPOSITORY-1': '{}', - 'APPDATA_REPOSITORY-2': '{}', - 'APPDATA_REPOSITORY-3': '{}' - }, platform_pb2.UpgradeRequest.REPOSITORY, - )]) - def test_repository(my_plugin, upgrade_request, fake_map_param, caplog): - upgrade_response = my_plugin.upgrade._internal_repository( - upgrade_request) + @upgrade_type_decorator('1.2', MigrationType.LUA) + def repo_upgrade_one(input_dict): + output_dict = copy.deepcopy(input_dict) + output_dict['migrations'].append('lua repo 1.2') + return output_dict - # Check that the response's oneof is set to return_value and not error - assert upgrade_response.WhichOneof('result') == 'return_value' - assert (upgrade_response.return_value.post_upgrade_parameters - == fake_map_param) - assert (caplog.records[0].message == - 'Upgrade repositories [APPDATA_REPOSITORY-1,' - ' APPDATA_REPOSITORY-2, APPDATA_REPOSITORY-3]') + @upgrade_type_decorator('2020.4.2') + def repo_upgrade_two(input_dict): + output_dict = copy.deepcopy(input_dict) + output_dict['migrations'].append('platform repo 2020.4.2') + return output_dict - @staticmethod - @pytest.mark.parametrize('fake_map_param,upgrade_type', - [({ - 'APPDATA_SOURCE_CONFIG-1': '{}', - 'APPDATA_SOURCE_CONFIG-2': '{}', - 'APPDATA_SOURCE_CONFIG-3': '{}', - 'APPDATA_SOURCE_CONFIG-4': '{}' - }, platform_pb2.UpgradeRequest.SOURCECONFIG, - )]) - def test_source_config(my_plugin, upgrade_request, fake_map_param, caplog): - upgrade_response = my_plugin.upgrade._internal_source_config( - upgrade_request) + @upgrade_type_decorator('2020.4.3') + def repo_upgrade_two(input_dict): + output_dict = copy.deepcopy(input_dict) + output_dict['migrations'].append('platform repo 2020.4.3') + return output_dict - # Check that the response's oneof is set to return_value and not error - assert upgrade_response.WhichOneof('result') == 'return_value' - assert (upgrade_response.return_value.post_upgrade_parameters - == fake_map_param) - assert (caplog.records[0].message == - 'Upgrade source configs [APPDATA_SOURCE_CONFIG-1,' - ' APPDATA_SOURCE_CONFIG-2, APPDATA_SOURCE_CONFIG-3,' - ' APPDATA_SOURCE_CONFIG-4]') + @upgrade_type_decorator('2020.4.4') + def repo_upgrade_two(input_dict): + output_dict = copy.deepcopy(input_dict) + output_dict['migrations'].append('platform repo 2020.4.4') + return output_dict - @staticmethod - @pytest.mark.parametrize('fake_map_param,upgrade_type', - [({ - 'APPDATA_STAGED_SOURCE-1': '{}', - 'APPDATA_STAGED_SOURCE-2': '{}', - 'APPDATA_STAGED_SOURCE-3': '{}' - }, platform_pb2.UpgradeRequest.LINKEDSOURCE, - )]) - def test_linked_source(my_plugin, upgrade_request, fake_map_param, caplog): - upgrade_response = my_plugin.upgrade._internal_linked_source( - upgrade_request) + lua_getter = getattr(my_upgrade.lua_migrations, get_impls_to_exec) - # Check that the response's oneof is set to return_value and not error - assert upgrade_response.WhichOneof('result') == 'return_value' - assert (upgrade_response.return_value.post_upgrade_parameters - == fake_map_param) - assert (caplog.records[0].message == - 'Upgrade linked sources [APPDATA_STAGED_SOURCE-1,' - ' APPDATA_STAGED_SOURCE-2, APPDATA_STAGED_SOURCE-3]') + platform_getter = getattr(my_upgrade.platform_migrations, + get_impls_to_exec) - @staticmethod - @pytest.mark.parametrize('fake_map_param,upgrade_type', - [({ - 'APPDATA_VIRTUAL_SOURCE-1': '{}', - 'APPDATA_VIRTUAL_SOURCE-2': '{}' - }, platform_pb2.UpgradeRequest.VIRTUALSOURCE, - )]) - def test_virtual_source( - my_plugin, upgrade_request, fake_map_param, caplog): - upgrade_response = my_plugin.upgrade._internal_virtual_source( - upgrade_request) + post_upgrade_parameters = my_upgrade._run_migration_upgrades( + upgrade_request, lua_getter, platform_getter) - # Check that the response's oneof is set to return_value and not error - assert upgrade_response.WhichOneof('result') == 'return_value' - assert (upgrade_response.return_value.post_upgrade_parameters - == fake_map_param) - assert (caplog.records[0].message == - 'Upgrade virtual sources [APPDATA_VIRTUAL_SOURCE-1,' - ' APPDATA_VIRTUAL_SOURCE-2]') + expected = [ + "lua repo 1.2", "platform repo 2020.4.2", "platform repo 2020.4.4" + ] + for metadata in post_upgrade_parameters.values(): + current_metadata = json.loads(metadata) + assert current_metadata['migrations'] == expected @staticmethod - @pytest.mark.parametrize('fake_map_param,upgrade_type', - [({ - 'APPDATA_SNAPSHOT-1': '{}' - }, platform_pb2.UpgradeRequest.SNAPSHOT, - )]) - def test_snapshot(my_plugin, upgrade_request, fake_map_param, caplog): - upgrade_response = my_plugin.upgrade._internal_snapshot( - upgrade_request) + @pytest.mark.parametrize( + 'func_name,fake_map_param,upgrade_type,expected_logs', + [('_internal_repository', { + 'APPDATA_REPOSITORY-1': '{}', + 'APPDATA_REPOSITORY-2': '{}', + 'APPDATA_REPOSITORY-3': '{}' + }, platform_pb2.UpgradeRequest.REPOSITORY, + 'Upgrade repositories [APPDATA_REPOSITORY-1,' + ' APPDATA_REPOSITORY-2, APPDATA_REPOSITORY-3]'), + ('_internal_source_config', { + 'APPDATA_SOURCE_CONFIG-1': '{}', + 'APPDATA_SOURCE_CONFIG-2': '{}', + 'APPDATA_SOURCE_CONFIG-3': '{}', + 'APPDATA_SOURCE_CONFIG-4': '{}' + }, platform_pb2.UpgradeRequest.SOURCECONFIG, + 'Upgrade source configs [APPDATA_SOURCE_CONFIG-1,' + ' APPDATA_SOURCE_CONFIG-2, APPDATA_SOURCE_CONFIG-3,' + ' APPDATA_SOURCE_CONFIG-4]'), + ('_internal_linked_source', { + 'APPDATA_STAGED_SOURCE-1': '{}', + 'APPDATA_STAGED_SOURCE-2': '{}', + 'APPDATA_STAGED_SOURCE-3': '{}' + }, platform_pb2.UpgradeRequest.LINKEDSOURCE, + 'Upgrade linked sources [APPDATA_STAGED_SOURCE-1,' + ' APPDATA_STAGED_SOURCE-2, APPDATA_STAGED_SOURCE-3]'), + ('_internal_virtual_source', { + 'APPDATA_VIRTUAL_SOURCE-1': '{}', + 'APPDATA_VIRTUAL_SOURCE-2': '{}' + }, platform_pb2.UpgradeRequest.VIRTUALSOURCE, + 'Upgrade virtual sources [APPDATA_VIRTUAL_SOURCE-1,' + ' APPDATA_VIRTUAL_SOURCE-2]'), + ('_internal_snapshot', { + 'APPDATA_SNAPSHOT-1': '{}' + }, platform_pb2.UpgradeRequest.SNAPSHOT, + 'Upgrade snapshots [APPDATA_SNAPSHOT-1]')]) + def test_upgrade_requests(my_upgrade, func_name, fake_map_param, + expected_logs, upgrade_request, caplog): + upgrade_response = getattr(my_upgrade, func_name)(upgrade_request) # Check that the response's oneof is set to return_value and not error assert upgrade_response.WhichOneof('result') == 'return_value' - assert (upgrade_response.return_value.post_upgrade_parameters - == fake_map_param) - assert (caplog.records[0].message == - 'Upgrade snapshots [APPDATA_SNAPSHOT-1]') + assert (upgrade_response.return_value.post_upgrade_parameters == + fake_map_param) + assert (caplog.records[0].message == expected_logs) diff --git a/tools/.python-version b/tools/.python-version new file mode 100644 index 00000000..43c4dbe6 --- /dev/null +++ b/tools/.python-version @@ -0,0 +1 @@ +2.7.17 diff --git a/tools/requirements.txt b/tools/requirements.txt index bc7f0831..c48971c5 100644 --- a/tools/requirements.txt +++ b/tools/requirements.txt @@ -25,10 +25,9 @@ pycodestyle==2.5.0 pyflakes==2.1.1 pyparsing==2.4.6 pytest-cov==2.8.1 -pytest==4.6.9 +pytest==4.6.11 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 typing==3.7.4.1 ; python_version < '3.5' -wcwidth==0.1.8 yapf==0.28 zipp==0.6.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/VERSION b/tools/src/main/python/dlpx/virtualization/_internal/VERSION index 359a5b95..50aea0e7 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/VERSION +++ b/tools/src/main/python/dlpx/virtualization/_internal/VERSION @@ -1 +1 @@ -2.0.0 \ No newline at end of file +2.1.0 \ No newline at end of file diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py index 79c353a5..8db88052 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py @@ -214,6 +214,13 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): if plugin_config_content.get('externalVersion'): artifact['externalVersion'] = plugin_config_content['externalVersion'] + if plugin_config_content.get('luaName'): + artifact['luaName'] = plugin_config_content['luaName'] + + if plugin_config_content.get('minimumLuaVersion'): + artifact['minimumLuaVersion'] = plugin_config_content[ + 'minimumLuaVersion'] + return artifact diff --git a/tools/src/main/python/dlpx/virtualization/_internal/file_util.py b/tools/src/main/python/dlpx/virtualization/_internal/file_util.py index 6c888126..504a9bd1 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/file_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/file_util.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import logging @@ -62,8 +62,8 @@ def standardize_path(path): if standardized_path == '.': standardized_path = os.path.realpath(standardized_path) else: - standardized_path = os.path.normpath(standardized_path) - standardized_path = os.path.normcase(standardized_path) + standardized_path = os.path.abspath(standardized_path) + standardized_path = os.path.abspath(standardized_path) return standardized_path @@ -95,7 +95,6 @@ def get_src_dir_path(config_file_path, src_dir): # absolute for comparison later. plugin_root_dir = os.path.dirname(config_file_path) plugin_root_dir = standardize_path(plugin_root_dir) - plugin_root_dir = os.path.abspath(plugin_root_dir) # The plugin's src directory is relative to the plugin root not to the # current working directory. os.path.abspath makes a relative path @@ -109,8 +108,13 @@ def get_src_dir_path(config_file_path, src_dir): if not os.path.isdir(src_dir_absolute): raise exceptions.PathTypeError(src_dir_absolute, 'directory') - if not src_dir_absolute.startswith( - plugin_root_dir) or src_dir_absolute == plugin_root_dir: + normcase_src_dir = os.path.normcase(src_dir_absolute) + normcase_plugin_root = os.path.normcase(plugin_root_dir) + + if ( + not normcase_src_dir.startswith(normcase_plugin_root) + or normcase_src_dir == normcase_plugin_root + ): raise exceptions.UserError( "The src directory {} is not a subdirectory " "of the plugin root at {}".format(src_dir_absolute, diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 9f58feb1..f3ad2119 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -114,7 +114,7 @@ def __import_in_subprocess(src_dir, module, entry_point, plugin_type, in a sub-process and on completion return the output. """ queue = Queue() - process = Process(target=_get_manifest, + process = Process(target=_import_module_and_get_manifest, args=(queue, src_dir, module, entry_point, plugin_type, validate)) process.start() @@ -184,7 +184,8 @@ def __check_for_required_methods(self): return warnings -def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): +def _import_module_and_get_manifest(queue, src_dir, module, entry_point, + plugin_type, validate): """ Imports the plugin module, runs validations and returns the manifest. """ @@ -199,6 +200,26 @@ def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): # return + manifest = get_manifest(src_dir, module, entry_point, + module_content, plugin_type, + validate, queue) + queue.put({'manifest': manifest}) + + +def get_manifest(src_dir, module, entry_point, module_content, + plugin_type, validate, queue): + """ + Helper method to run validations and prepare the manifest. + + NOTE: + This code is moved out into a separate method to help running + unit tests on windows for validations. Since the behaviour of + multiprocessing.Process module is different for windows and linux, + unit testing validate_plugin_module method using mock has issues. + + More details at : + https://rhodesmill.org/brandon/2010/python-multiprocessing-linux-windows/ + """ # # Create an instance of plugin module with associated state to pass around # to the validation code. @@ -219,8 +240,7 @@ def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): warnings = import_util.validate_post_import(plugin_module) _process_warnings(queue, warnings) - manifest = _prepare_manifest(entry_point, module_content) - queue.put({'manifest': manifest}) + return _prepare_manifest(entry_point, module_content) def _import_helper(queue, src_dir, module): diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py index 46c46435..9608a2db 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py @@ -40,7 +40,8 @@ def __init__(self, self.__pre_import_checks = [ self.__validate_plugin_config_content, self.__validate_plugin_entry_point, - self.__check_for_undefined_names + self.__check_for_undefined_names, + self.__check_for_lua_name_and_min_version ] @property @@ -218,6 +219,28 @@ def __check_for_undefined_names(self): if warnings and len(warnings) > 0: raise exceptions.ValidationFailedError(warnings) + def __check_for_lua_name_and_min_version(self): + """ + Check if both lua name and minimum lua version are present if either + property is set. + """ + warnings = defaultdict(list) + + if (self.__plugin_config_content.get('luaName') and not + self.__plugin_config_content.get('minimumLuaVersion')): + msg = ('Failed to process property "luaName" without ' + '"minimumLuaVersion" set in the plugin config.') + warnings['exception'].append(exceptions.UserError(msg)) + + if (self.__plugin_config_content.get('minimumLuaVersion') and not + self.__plugin_config_content.get('luaName')): + msg = ('Failed to process property "minimumLuaVersion" without ' + '"luaName" set in the plugin config.') + warnings['exception'].append(exceptions.UserError(msg)) + + if warnings and len(warnings) > 0: + raise exceptions.ValidationFailedError(warnings) + @staticmethod def split_entry_point(entry_point): entry_point_strings = entry_point.split(':') diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 54a8c453..c12e9e3b 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -20,7 +20,7 @@ # versions in those packages until they are shipped out of band. # [General] -engine_api_version = 1.11.2 +engine_api_version = 1.11.3 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json index 5d49e478..81241641 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json @@ -50,6 +50,14 @@ "buildNumber": { "type": "string", "pattern": "^([0-9]+\\.)*[0-9]*[1-9][0-9]*(\\.[0-9]+)*$" + }, + "luaName": { + "type": "string", + "pattern": "^[a-z0-9_:-]+$" + }, + "minimumLuaVersion": { + "type": "string", + "pattern": "^([0-9]+)\\.([0-9]+)$" } }, "additionalProperties": false, diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json index 703382fa..175fbedf 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json @@ -49,6 +49,14 @@ "buildNumber": { "type": "string", "pattern": "^([0-9]+\\.)*[0-9]*[1-9][0-9]*(\\.[0-9]+)*$" + }, + "luaName": { + "type": "string", + "pattern": "^[a-z0-9_:-]+$" + }, + "minimumLuaVersion": { + "type": "string", + "pattern": "^([0-9]+)\\.([0-9]+)$" } }, "additionalProperties": false, diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml index 063877ad..bb95c585 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml @@ -140,15 +140,15 @@ EXPECTED_DIRECT_ARGS_BY_OP: - repository EXPECTED_UPGRADE_ARGS: - repository_id_to_impl: + _repository_id_to_impl: - old_repository - source_config_id_to_impl: + _source_config_id_to_impl: - old_source_config - linked_source_id_to_impl: + _linked_source_id_to_impl: - old_linked_source - virtual_source_id_to_impl: + _virtual_source_id_to_impl: - old_virtual_source - snapshot_id_to_impl: + _snapshot_id_to_impl: - old_snapshot REQUIRED_METHODS_BY_PLUGIN_TYPE: diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 7e4271b1..4daf8afa 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -320,7 +320,8 @@ def test_zip_and_encode_source_files_invalid_dir(src_dir): @staticmethod @mock.patch('compileall.compile_dir') - def test_zip_and_encode_source_files_compileall_fail(mock_compile, src_dir): + def test_zip_and_encode_source_files_compileall_fail( + mock_compile, src_dir): mock_compile.return_value = 0 with pytest.raises(exceptions.UserError) as err_info: build.zip_and_encode_source_files(src_dir) @@ -432,16 +433,17 @@ def test_plugin_bad_language(mock_generate_python, plugin_config_file, assert not mock_generate_python.called @staticmethod - @pytest.mark.parametrize('src_dir', ['/not/a/real/dir/src']) + @pytest.mark.parametrize('src_dir', [os.path.join('fake', 'dir')]) @mock.patch('os.path.isabs', return_value=False) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') def test_plugin_no_src_dir(mock_generate_python, mock_path_is_relative, - plugin_config_file, artifact_file): + plugin_config_file, artifact_file, tmpdir): with pytest.raises(exceptions.UserError) as err_info: build.build(plugin_config_file, artifact_file, False, False) message = err_info.value.message - assert message == "The path '/not/a/real/dir/src' does not exist." + assert message == "The path '{}' does not exist.".format( + tmpdir.join(os.path.join('fake', 'dir')).strpath) assert not mock_generate_python.called @@ -499,7 +501,17 @@ def test_schema_file_bad_permission(mock_generate_python, plugin_config_file, artifact_file, schema_file): # Make it so we can't read the file - os.chmod(schema_file, 0000) + if os.name == 'nt': + pytest.skip( + 'skipping this test on windows as os.chmod has issues removing' + ' permissions on file') + # + # The schema_file can be made unreadable on windows using pypiwin32 but + # since it adds dependency on pypiwin32 for the sdk, skipping this test + # instead of potentially destabilizing the sdk by adding this dependency. + # + else: + os.chmod(schema_file, 0000) with pytest.raises(exceptions.UserError) as err_info: build.build(plugin_config_file, artifact_file, False, False) @@ -651,3 +663,25 @@ def test_build_number_parameter(plugin_config_content, src_dir, plugin_config_content, src_dir, schema_content, {}) assert expected == upload_artifact['buildNumber'] + + @staticmethod + @pytest.mark.parametrize('lua_name, expected', [ + pytest.param('lua-toolkit-1', 'lua-toolkit-1'), + pytest.param('nix_staged_python', 'nix_staged_python') + ]) + def test_lua_name_parameter(plugin_config_content, src_dir, schema_content, + expected): + upload_artifact = build.prepare_upload_artifact( + plugin_config_content, src_dir, schema_content, {}) + assert expected == upload_artifact.get('luaName') + + @staticmethod + @pytest.mark.parametrize( + 'minimum_lua_version, expected', + [pytest.param('2.3', '2.3'), + pytest.param('2.4', '2.4')]) + def test_minimum_lua_version_parameter(plugin_config_content, src_dir, + schema_content, expected): + upload_artifact = build.prepare_upload_artifact( + plugin_config_content, src_dir, schema_content, {}) + assert expected == upload_artifact.get('minimumLuaVersion') diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py index adca36ce..383e9189 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py @@ -363,12 +363,17 @@ def test_copy_generated_to_dir_success(tmpdir, popen_helper): @staticmethod def test_copy_generated_to_dir_fail(tmpdir): - src_dir = '/not/a/real/dir' + src_dir = os.path.join('fake', 'dir') # dst_dir needs to be real so that making the dir inside it works. dst_dir = tmpdir.strpath with pytest.raises(OSError) as err_info: codegen._copy_generated_to_dir(src_dir, dst_dir) - assert err_info.value.strerror == 'No such file or directory' - assert err_info.value.filename.startswith('/not/a/real/dir') + if os.name == 'nt': + assert err_info.value.strerror == 'The system cannot find the path' \ + ' specified' + else: + assert err_info.value.strerror == 'No such file or directory' + + assert err_info.value.filename.startswith(src_dir) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_delphix_client.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_delphix_client.py index d663e7c3..6a9e158c 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_delphix_client.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_delphix_client.py @@ -224,27 +224,27 @@ def httpretty_enabled(): JOB_RESP_FAIL = (('{"type": "OKResult", "status": "OK", "result": ' '{"jobState": "FAILED", "events": []}}'), { - 'X-Frame-Options': 'SAMEORIGIN', - 'X-Content-Type-Options': 'nosniff', - 'X-XSS-Protection': '1; mode=block', - 'Cache-Control': 'max-age=0', - 'Expires': 'Mon, 04 Feb 2019 23:12:00 GMT', - 'Content-Type': 'application/json', - 'Content-Length': '71', - 'Date': 'Mon, 09 Mar 2020 12:09:27 GMT' - }) + 'X-Frame-Options': 'SAMEORIGIN', + 'X-Content-Type-Options': 'nosniff', + 'X-XSS-Protection': '1; mode=block', + 'Cache-Control': 'max-age=0', + 'Expires': 'Mon, 04 Feb 2019 23:12:00 GMT', + 'Content-Type': 'application/json', + 'Content-Length': '71', + 'Date': 'Mon, 09 Mar 2020 12:09:27 GMT' + }) JOB_RESP_TIMED_OUT = (('{"type": "OKResult", "status": "OK", "result": ' '{"jobState": "RUNNING", "events": []}}'), { - 'X-Frame-Options': 'SAMEORIGIN', - 'X-Content-Type-Options': 'nosniff', - 'X-XSS-Protection': '1; mode=block', - 'Cache-Control': 'max-age=0', - 'Expires': 'Mon, 04 Feb 2019 23:12:00 GMT', - 'Content-Type': 'application/json', - 'Content-Length': '71', - 'Date': 'Mon, 09 Mar 2020 12:09:27 GMT' - }) + 'X-Frame-Options': 'SAMEORIGIN', + 'X-Content-Type-Options': 'nosniff', + 'X-XSS-Protection': '1; mode=block', + 'Cache-Control': 'max-age=0', + 'Expires': 'Mon, 04 Feb 2019 23:12:00 GMT', + 'Content-Type': 'application/json', + 'Content-Length': '71', + 'Date': 'Mon, 09 Mar 2020 12:09:27 GMT' + }) PLUGIN_RESP_SUCCESS = ( '{"type": "ListResult", "status": "OK", "result": [' diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index 28ccaef1..8c02f01c 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -171,7 +171,8 @@ def artifact_file_created(): @pytest.fixture def plugin_config_content(plugin_id, plugin_name, external_version, language, host_types, plugin_type, entry_point, src_dir, - schema_file, manual_discovery, build_number): + schema_file, manual_discovery, build_number, + lua_name, minimum_lua_version): """ This fixutre creates the dict expected in the properties yaml file the customer must provide for the build and compile commands. @@ -216,6 +217,12 @@ def plugin_config_content(plugin_id, plugin_name, external_version, language, if build_number: config['buildNumber'] = build_number + if lua_name: + config['luaName'] = lua_name + + if minimum_lua_version: + config['minimumLuaVersion'] = minimum_lua_version + return config @@ -274,6 +281,16 @@ def build_number(): return '2.0.0' +@pytest.fixture +def lua_name(): + return 'lua-toolkit-1' + + +@pytest.fixture +def minimum_lua_version(): + return "2.3" + + @pytest.fixture def artifact_manual_discovery(): return True @@ -561,6 +578,8 @@ def basic_artifact_content(engine_api, virtual_source_definition, 'engineApi': engine_api, 'rootSquashEnabled': True, 'buildNumber': '2', + 'luaName': 'lua-toolkit-1', + 'minimumLuaVersion': '2.3', 'sourceCode': 'UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==', 'manifest': {} } @@ -608,6 +627,8 @@ def artifact_content(engine_api, virtual_source_definition, 'sourceCode': 'UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==', 'rootSquashEnabled': True, 'buildNumber': '2', + 'luaName': 'lua-toolkit-1', + 'minimumLuaVersion': '2.3', 'manifest': {} } @@ -637,7 +658,7 @@ def artifact_content(engine_api, virtual_source_definition, @pytest.fixture def engine_api(): - return {'type': 'APIVersion', 'major': 1, 'minor': 11, 'micro': 2} + return {'type': 'APIVersion', 'major': 1, 'minor': 11, 'micro': 3} @pytest.fixture diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/lua_id_bad_format.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/lua_id_bad_format.py new file mode 100644 index 00000000..ed4a7c47 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/lua_id_bad_format.py @@ -0,0 +1,12 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import MigrationType, Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository('1.0.patchversion', MigrationType.LUA) +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/lua_id_not_string.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/lua_id_not_string.py new file mode 100644 index 00000000..9171d934 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/lua_id_not_string.py @@ -0,0 +1,12 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import MigrationType, Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository(['testing', 'out', 'validation'], MigrationType.LUA) +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/lua_id_used.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/lua_id_used.py new file mode 100644 index 00000000..cb6a2aa2 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/lua_id_used.py @@ -0,0 +1,22 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import MigrationType, Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository('5.4', MigrationType.LUA) +def repo_upgrade(old_repository): + return old_repository + + +@plugin.upgrade.snapshot('5.04', MigrationType.LUA) +def snap_upgrade(old_snapshot): + return old_snapshot + + +@plugin.upgrade.repository('5.4', MigrationType.LUA) +def repo_upgrade_two(old_repository): + return old_repository \ No newline at end of file diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py index 010c705a..24d57a34 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py @@ -2,7 +2,7 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # # flake8: noqa -from dlpx.virtualization.platform import Plugin, Status +from dlpx.virtualization.platform import MigrationType, Plugin, Status direct = Plugin() @@ -74,6 +74,16 @@ def unconfigure(repository, source_config, virtual_source): pass +@direct.upgrade.repository('1.3', MigrationType.LUA) +def repo_upgrade(old_repository): + return old_repository + + +@direct.upgrade.snapshot('1.3', MigrationType.LUA) +def snap_upgrade(old_snapshot): + return old_snapshot + + @direct.upgrade.repository('2019.10.30') def repo_upgrade(old_repository): return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py index 0c925bec..117aaa94 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import os @@ -34,13 +34,13 @@ def test_get_src_dir_path_relative(tmp_path): cwd = os.getcwd() try: - os.chdir(tmp_path.as_posix()) - actual = file_util.get_src_dir_path('plugin/plugin_config.yml', - 'src') + os.chdir(str(tmp_path)) + actual = file_util.get_src_dir_path( + os.path.join('plugin', 'plugin_config.yml'), 'src') finally: os.chdir(cwd) - assert actual == src_dir.as_posix() + assert actual == str(src_dir) @staticmethod def test_get_src_dir_path_is_abs_fail(): @@ -78,15 +78,16 @@ def test_get_src_dir_path_is_dir_fail(mock_existing_path, @mock.patch('os.path.isdir', return_value=True) @mock.patch('os.path.exists', return_value=True) @mock.patch('os.path.isabs', return_value=False) - @pytest.mark.parametrize( - 'plugin_config_file_path, src_dir_path', - [(os.path.join(os.getenv('HOME'), 'plugin/file_name'), '.'), - ('/mongo/file_name', '/src'), ('/plugin/mongo/file_name', '/plugin'), - ('/plugin/file_name', '/plugin/src/../..')]) + @pytest.mark.parametrize('plugin_config_file_path, src_dir_path', + [('plugin/file_name', '.'), + ('/mongo/file_name', '/src'), + ('/plugin/mongo/file_name', '/plugin'), + ('/plugin/file_name', '/plugin/src/../..')]) def test_get_src_dir_path_fail(mock_relative_path, mock_existing_path, mock_directory_path, plugin_config_file_path, src_dir_path): - expected_plugin_root_dir = os.path.dirname(plugin_config_file_path) + expected_plugin_root_dir = os.path.join( + os.getcwd(), os.path.dirname(plugin_config_file_path)) expected_plugin_root_dir = file_util.standardize_path( expected_plugin_root_dir) @@ -113,12 +114,14 @@ def test_get_src_dir_path_fail(mock_relative_path, mock_existing_path, 'plugin_config_file_path, src_dir_path', [(os.path.join(os.path.dirname(os.getcwd()), 'plugin/filename'), '../plugin/src'), - (os.path.join(os.getenv('HOME'), 'plugin/file_name'), '~/plugin/src'), - (os.path.join(os.getcwd(), 'plugin/file_name'), './plugin/src'), - ('/UPPERCASE/file_name', '/UPPERCASE/src'), - ('/mongo/file_name', '/mongo/src/main/python'), - ('~/plugin/file_name', '~/plugin/src'), - (r'windows\path\some_file', r'windows\path')]) + (os.path.join(os.path.dirname(os.getcwd()), + 'plugin/filename'), './plugin/src'), + (os.path.join(os.path.dirname(os.getcwd()), + '/UPPERCASE/file_name'), '/UPPERCASE/src'), + (os.path.join(os.path.dirname(os.getcwd()), + '/mongo/file_name'), '/mongo/src/main/python'), + (os.path.join(os.path.dirname(os.getcwd()), + r'windows\path\some_file'), r'windows\path')]) def test_get_src_dir_path_success(mock_relative_path, mock_existing_path, mock_directory_path, plugin_config_file_path, src_dir_path): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 3b7c70da..949f96f1 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -1,7 +1,7 @@ # # Copyright (c) 2019, 2020 by Delphix. All rights reserved. # - +import os from dlpx.virtualization._internal import package_util import pytest @@ -10,22 +10,22 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '2.0.0' + assert package_util.get_version() == '2.1.0' @staticmethod def test_get_virtualization_api_version(): - assert package_util.get_virtualization_api_version() == '1.1.0' + assert package_util.get_virtualization_api_version() == '1.3.0' @staticmethod def test_get_engine_api_version(): - assert package_util.get_engine_api_version_from_settings() == '1.11.2' + assert package_util.get_engine_api_version_from_settings() == '1.11.3' @staticmethod def test_get_build_api_version_json(): build_api_version = { 'type': 'APIVersion', 'major': 1, - 'minor': 1, + 'minor': 3, 'micro': 0 } assert package_util.get_build_api_version() == build_api_version @@ -36,14 +36,14 @@ def test_get_engine_api_version_json(): 'type': 'APIVersion', 'major': 1, 'minor': 11, - 'micro': 2 + 'micro': 3 } assert package_util.get_engine_api_version() == engine_api_version @staticmethod def test_get_internal_package_root(): assert package_util.get_internal_package_root().endswith( - 'dlpx/virtualization/_internal') + os.path.join('dlpx', 'virtualization', '_internal')) @staticmethod @pytest.mark.parametrize('version_string', [ diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py index ccd294d7..817a3ab9 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import os @@ -51,18 +51,17 @@ def build_wheel(package, dir): dist_path.touch() global packages - packages.add(dist_path.as_posix()) + packages.add(str(dist_path)) def clean_up(a, b, c): - file_util.delete_paths(wheel_dir.as_posix()) + file_util.delete_paths(str(wheel_dir)) - mock_tmpdir.return_value.__enter__.return_value = wheel_dir.as_posix() + mock_tmpdir.return_value.__enter__.return_value = str(wheel_dir) mock_tmpdir.return_value.__exit__.side_effect = clean_up mock_build_wheel.side_effect = build_wheel - pdu.install_deps(build_dir.as_posix(), local_vsdk_root='vsdk') - mock_install_to_dir.assert_called_once_with(packages, - build_dir.as_posix()) + pdu.install_deps(str(build_dir), local_vsdk_root='vsdk') + mock_install_to_dir.assert_called_once_with(packages, str(build_dir)) @staticmethod @mock.patch.object(subprocess, 'Popen') diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py index ab31c65f..7a1ce1ab 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py @@ -1,14 +1,15 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import exceptions import os import uuid from collections import OrderedDict +from multiprocessing import Queue -from dlpx.virtualization._internal.plugin_importer import PluginImporter from dlpx.virtualization._internal import (file_util, plugin_util, - plugin_validator) + plugin_validator, plugin_importer) +from dlpx.virtualization._internal.plugin_importer import PluginImporter import mock import pytest @@ -40,92 +41,75 @@ def get_plugin_importer(plugin_config_file): class TestPluginImporter: + """ + This class tests the plugin_importer module of sdk. Though some of these tests + used mock initially to mock out the calls to subprocess, it was found + that the behaviour is different between windows and linux causing these tests + to fail on windows. So, some refactoring is done in plugin_importer + to facilitate testing without the mocks. + + The issue is described in detail here: + https://rhodesmill.org/brandon/2010/python-multiprocessing-linux-windows/ + """ @staticmethod - @mock.patch('importlib.import_module') - def test_get_plugin_manifest(mock_import, src_dir, plugin_type, - entry_point_module, entry_point_object, - plugin_module_content, plugin_manifest): - mock_import.return_value = plugin_module_content - - importer = PluginImporter(src_dir, entry_point_module, - entry_point_object, plugin_type, False) - importer.validate_plugin_module() + def test_get_plugin_manifest(src_dir, plugin_type, entry_point_module, + entry_point_object, plugin_module_content, + plugin_manifest): + queue = Queue() + manifest = plugin_importer.get_manifest(src_dir, entry_point_module, + entry_point_object, + plugin_module_content, + plugin_type, False, queue) - assert importer.result.plugin_manifest == plugin_manifest + assert manifest == plugin_manifest @staticmethod - @mock.patch('importlib.import_module') - def test_plugin_module_content_none(mock_import, src_dir, plugin_type, + def test_plugin_module_content_none(src_dir, plugin_type, entry_point_module, entry_point_object): - mock_import.return_value = None - importer = PluginImporter(src_dir, entry_point_module, - entry_point_object, plugin_type, False) - importer.validate_plugin_module() - result = importer.result - - # - # If module_content is None, importer does not perform any validations - # and just does a return. So result should have an empty manifest and - # assert to make sure it is the case. - # - assert result.plugin_manifest == {} + queue = Queue() + manifest = plugin_importer.get_manifest(src_dir, entry_point_module, + entry_point_object, None, + plugin_type, False, queue) + assert manifest is None @staticmethod - @mock.patch('importlib.import_module') - def test_plugin_entry_object_none(mock_import, src_dir, plugin_type, - plugin_name, plugin_module_content): - mock_import.return_value = plugin_module_content - result = () - - with pytest.raises(exceptions.UserError) as err_info: - importer = PluginImporter(src_dir, plugin_name, None, plugin_type, - False) - importer.validate_plugin_module() - result = importer.result - - message = str(err_info) - assert result == () + def test_plugin_entry_object_none(src_dir, plugin_type, entry_point_module, + plugin_module_content): + queue = Queue() + plugin_importer.get_manifest(src_dir, entry_point_module, None, + plugin_module_content, plugin_type, False, + queue) + + message = str(queue.get('exception')) assert 'Plugin entry point object is None.' in message @staticmethod - @mock.patch('importlib.import_module') - def test_plugin_entry_point_nonexistent(mock_import, src_dir, plugin_type, - plugin_name, + def test_plugin_entry_point_nonexistent(src_dir, plugin_type, + entry_point_module, plugin_name, plugin_module_content): entry_point_name = "nonexistent entry point" - mock_import.return_value = plugin_module_content - result = () + queue = Queue() + plugin_importer.get_manifest(src_dir, entry_point_module, + entry_point_name, plugin_module_content, + plugin_type, False, queue) - with pytest.raises(exceptions.UserError) as err_info: - importer = PluginImporter(src_dir, plugin_name, entry_point_name, - plugin_type, False) - importer.validate_plugin_module() - result = importer.result - - message = err_info.value.message - assert result == () - assert ('\'{}\' is not a symbol in module'.format(entry_point_name) in - message) + message = str(queue.get('exception')) + assert ("'{}' is not a symbol in module".format(entry_point_name) + in message) @staticmethod - @mock.patch('importlib.import_module') - def test_plugin_object_none(mock_import, src_dir, plugin_type, plugin_name, - plugin_module_content): + def test_plugin_object_none(src_dir, plugin_type, entry_point_module, + plugin_name, plugin_module_content): none_entry_point = "none_entry_point" setattr(plugin_module_content, none_entry_point, None) - mock_import.return_value = plugin_module_content - result = () + queue = Queue() + plugin_importer.get_manifest(src_dir, entry_point_module, + none_entry_point, plugin_module_content, + plugin_type, False, queue) - with pytest.raises(exceptions.UserError) as err_info: - importer = PluginImporter(src_dir, plugin_name, none_entry_point, - plugin_type, False) - importer.validate_plugin_module() - result = importer.result - - message = err_info.value.message - assert result == () + message = str(queue.get('exception')) assert ('Plugin object retrieved from the entry point {} is' ' None'.format(none_entry_point)) in message @@ -198,12 +182,20 @@ def test_upgrade_warnings(mock_file_util, plugin_config_file, fake_src_dir, ('dec_not_function:plugin', "decorated by 'linked.pre_snapshot()'" " is not a function"), ('id_not_string:plugin', "The migration id '['testing', 'out'," - " 'validation']' used in the function" - " 'repo_upgrade' should be a string."), + " 'validation']' used in the function 'repo_upgrade' should be a" + " string."), + ('lua_id_not_string:plugin', "The migration id '['testing', 'out'," + " 'validation']' used in the function 'repo_upgrade' should be a" + " string."), ('id_bad_format:plugin', "used in the function 'repo_upgrade' does" " not follow the correct format"), + ('lua_id_bad_format:plugin', "used in the function 'repo_upgrade'" + " does not follow the correct format"), ('id_used:plugin', "'5.04.000.01' used in the function 'snap_upgrade'" - " has the same canonical form '5.4.0.1' as another migration")]) + " has the same canonical form '5.4.0.1' as another migration"), + ('lua_id_used:plugin', "The lua major minor version '5.4' used in the" + " function 'repo_upgrade_two' decorated by 'upgrade.repository()'" + " has already been used.")]) @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') def test_wrapper_failures(mock_file_util, plugin_config_file, fake_src_dir, expected_error): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index 52722bd4..893e6e41 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -42,7 +42,7 @@ def test_plugin_bad_config_file(plugin_config_file): @staticmethod @mock.patch('os.path.isabs', return_value=False) - def test_plugin_valid_content(src_dir, plugin_config_file, + def test_plugin_valid_content(plugin_config_file, plugin_config_content): validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, @@ -67,7 +67,7 @@ def test_plugin_missing_field(plugin_config_file, plugin_config_content): (1.0, "1.0 is not of type 'string'"), ('my_version', None), ('1.0.0', None), ('1.0.0_HF', None)]) - def test_plugin_version_format(src_dir, plugin_config_file, + def test_plugin_version_format(plugin_config_file, plugin_config_content, expected): try: validator = PluginValidator.from_config_content( @@ -88,7 +88,7 @@ def test_plugin_version_format(src_dir, plugin_config_file, ('staged_plugin::staged', "'staged_plugin::staged' does not match"), (':staged_plugin:staged:', "':staged_plugin:staged:' does not match"), ('staged_plugin:staged', None)]) - def test_plugin_entry_point(src_dir, plugin_config_file, + def test_plugin_entry_point(plugin_config_file, plugin_config_content, expected): try: validator = PluginValidator.from_config_content( @@ -100,7 +100,7 @@ def test_plugin_entry_point(src_dir, plugin_config_file, assert expected in message @staticmethod - def test_plugin_additional_properties(src_dir, plugin_config_file, + def test_plugin_additional_properties(plugin_config_file, plugin_config_content): # Adding an unknown key plugin_config_content['unknown_key'] = 'unknown_value' @@ -138,7 +138,7 @@ def test_multiple_validation_errors(plugin_config_file, ('STAGED', "'STAGED' does not match"), ('E3b69c61-4c30-44f7-92c0-504c8388b91e', None), ('e3b69c61-4c30-44f7-92c0-504c8388b91e', None)]) - def test_plugin_id(mock_import_plugin, src_dir, plugin_config_file, + def test_plugin_id(plugin_config_file, plugin_config_content, expected): try: validator = PluginValidator.from_config_content( @@ -161,7 +161,7 @@ def test_plugin_id(mock_import_plugin, src_dir, plugin_config_file, ('0', "'0' does not match"), ('0.0.00', "'0.0.00' does not match"), ('0.1', None)]) - def test_plugin_build_number_format(src_dir, plugin_config_file, + def test_plugin_build_number_format(plugin_config_file, plugin_config_content, expected): try: validator = PluginValidator.from_config_content( @@ -171,3 +171,66 @@ def test_plugin_build_number_format(src_dir, plugin_config_file, except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message + + @staticmethod + @mock.patch('os.path.isabs', return_value=False) + @pytest.mark.parametrize( + 'lua_name, expected', + [('lua toolkit', "'lua toolkit' does not match"), + ('!lua#toolkit', "'!lua#toolkit' does not match")]) + def test_plugin_lua_name_format(plugin_config_file, + plugin_config_content, expected): + try: + validator = PluginValidator.from_config_content( + plugin_config_file, plugin_config_content, + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() + except exceptions.SchemaValidationError as err_info: + message = err_info.message + assert expected in message + + @staticmethod + @mock.patch('os.path.isabs', return_value=False) + @pytest.mark.parametrize('minimum_lua_version, expected', + [('1-2-3', "'1-2-3' does not match"), + ('version1.0!', "'version1.0!' does not match"), + ('2.3.4', "'2.3.4' does not match")]) + def test_plugin_minimum_lua_version_format(plugin_config_file, + plugin_config_content, + expected): + try: + validator = PluginValidator.from_config_content( + plugin_config_file, plugin_config_content, + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() + except exceptions.SchemaValidationError as err_info: + message = err_info.message + assert expected in message + + @staticmethod + @pytest.mark.parametrize('minimum_lua_version', [None]) + def test_plugin_lua_name_without_minimum_lua_version( + plugin_config_file, plugin_config_content): + try: + validator = PluginValidator.from_config_content( + plugin_config_file, plugin_config_content, + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() + except exceptions.ValidationFailedError as err_info: + message = err_info.message + assert ('Failed to process property "luaName" without ' + '"minimumLuaVersion" set in the plugin config.' in message) + + @staticmethod + @pytest.mark.parametrize('lua_name', [None]) + def test_plugin_minimum_lua_version_without_lua_name( + plugin_config_file, plugin_config_content): + try: + validator = PluginValidator.from_config_content( + plugin_config_file, plugin_config_content, + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() + except exceptions.ValidationFailedError as err_info: + message = err_info.message + assert ('Failed to process property "minimumLuaVersion" without ' + '"luaName" set in the plugin config.' in message) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py index 2b064b57..2901c0cf 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py @@ -21,9 +21,9 @@ def test_bad_meta_schema(schema_file, tmpdir, schema_filename): validator.validate() message = err_info.value.message - assert ('Failed to load schemas because {!r} is not a valid json file.' - ' Error: Extra data: line 2 column 1 - line 2 column 9' - ' (char 19 - 27)'.format(schema_file)) in message + assert ("Failed to load schemas because '{}' is not a valid json file." + " Error: Extra data: line 2 column 1 - line 2 column 9" + " (char 19 - 27)".format(schema_file)) in message @staticmethod def test_bad_schema_file(schema_file): @@ -354,5 +354,5 @@ def test_bad_type_in_array(schema_file): validator.validate() message = err_info.value.message - assert ( - "'strings' is not valid under any of the given schemas" in message) + assert ("'strings' is not valid under any of the given schemas" + in message)