From 6c4f9f4555969648c3e553c681587dbd003a89f5 Mon Sep 17 00:00:00 2001 From: Lindsey Nguyen Date: Tue, 5 Nov 2019 09:23:01 -0800 Subject: [PATCH 01/25] PYT-811 Add plugin wrapper calls to upgrade decorator Reviewed at: http://reviews.delphix.com/r/53587/ --- Pipfile.lock | 146 +- build.gradle | 2 +- common/build.gradle | 1 - dvp/Pipfile.lock | 75 +- dvp/lock.dev-requirements.txt | 18 +- dvp/lock.requirements.txt | 8 +- libs/Pipfile.lock | 38 +- libs/lock.dev-requirements.txt | 12 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 38 +- platform/lock.dev-requirements.txt | 12 +- platform/lock.requirements.txt | 2 +- .../dlpx/virtualization/platform/__init__.py | 4 + .../virtualization/platform/_discovery.py | 160 ++ .../dlpx/virtualization/platform/_linked.py | 671 ++++++++ .../dlpx/virtualization/platform/_plugin.py | 1482 +---------------- .../dlpx/virtualization/platform/_upgrade.py | 81 + .../dlpx/virtualization/platform/_virtual.py | 696 ++++++++ .../virtualization/platform/exceptions.py | 25 + .../python/dlpx/virtualization/test_plugin.py | 6 +- .../dlpx/virtualization/test_upgrade.py | 206 +++ tools/Pipfile.lock | 161 +- tools/lock.dev-requirements.txt | 30 +- tools/lock.requirements.txt | 22 +- .../_internal/plugin_importer.py | 6 + .../_internal/test_plugin_validator.py | 18 +- 26 files changed, 2253 insertions(+), 1669 deletions(-) create mode 100644 platform/src/main/python/dlpx/virtualization/platform/_discovery.py create mode 100644 platform/src/main/python/dlpx/virtualization/platform/_linked.py create mode 100644 platform/src/main/python/dlpx/virtualization/platform/_upgrade.py create mode 100644 platform/src/main/python/dlpx/virtualization/platform/_virtual.py create mode 100755 platform/src/test/python/dlpx/virtualization/test_upgrade.py diff --git a/Pipfile.lock b/Pipfile.lock index 637b90ff..76131953 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "ae4bdd7d4157baab65ae9d0e8389a6011e6b640995372c45ec81fa5d1ddfae9f" + "sha256": "b418e58b464533e1a126fc8482179cf47d1b3e5f8205e5c794fe8c72b663b52c" }, "pipfile-spec": 6, "requires": { @@ -15,6 +15,148 @@ } ] }, - "default": {}, + "default": { + "attrs": { + "hashes": [ + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + ], + "version": "==19.3.0" + }, + "configparser": { + "hashes": [ + "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", + "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" + ], + "markers": "python_version < '3'", + "version": "==4.0.2" + }, + "contextlib2": { + "hashes": [ + "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", + "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" + ], + "markers": "python_version < '3'", + "version": "==0.6.0.post1" + }, + "funcsigs": { + "hashes": [ + "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", + "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" + ], + "markers": "python_version < '3.3'", + "version": "==1.0.2" + }, + "functools32": { + "hashes": [ + "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", + "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" + ], + "markers": "python_version < '3'", + "version": "==3.2.3.post2" + }, + "importlib-metadata": { + "hashes": [ + "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", + "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" + ], + "version": "==0.23" + }, + "json-extensions": { + "hashes": [ + "sha256:504f90e66bf562aa4d6684766c465156d097f1c9fe5adc661fc8cebce9ef44e7" + ], + "index": "pypi", + "version": "==0.2.1" + }, + "jsonschema": { + "hashes": [ + "sha256:2fa0684276b6333ff3c0b1b27081f4b2305f0a36cf702a23db50edb141893c3f", + "sha256:94c0a13b4a0616458b42529091624e66700a17f847453e52279e35509a5b7631" + ], + "index": "pypi", + "version": "==3.1.1" + }, + "mock": { + "hashes": [ + "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", + "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" + ], + "index": "pypi", + "version": "==3.0.5" + }, + "more-itertools": { + "hashes": [ + "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", + "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", + "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" + ], + "version": "==5.0.0" + }, + "pathlib2": { + "hashes": [ + "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", + "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" + ], + "markers": "python_version == '3.4.*' or python_version < '3'", + "version": "==2.3.5" + }, + "pyrsistent": { + "hashes": [ + "sha256:eb6545dbeb1aa69ab1fb4809bfbf5a8705e44d92ef8fc7c2361682a47c46c778" + ], + "version": "==0.15.5" + }, + "pyyaml": { + "hashes": [ + "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", + "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", + "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", + "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", + "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", + "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", + "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", + "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", + "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", + "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", + "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", + "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", + "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" + ], + "index": "pypi", + "version": "==5.1.2" + }, + "scandir": { + "hashes": [ + "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", + "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", + "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", + "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", + "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", + "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", + "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", + "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", + "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", + "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", + "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" + ], + "markers": "python_version < '3.5'", + "version": "==1.10.0" + }, + "six": { + "hashes": [ + "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", + "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" + ], + "version": "==1.12.0" + }, + "zipp": { + "hashes": [ + "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", + "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + ], + "version": "==0.6.0" + } + }, "develop": {} } diff --git a/build.gradle b/build.gradle index b13f85a7..fdedf007 100644 --- a/build.gradle +++ b/build.gradle @@ -8,7 +8,7 @@ plugins { } subprojects { - version = "1.0.1-internal-003" + version = "1.1.0-internal-upgrade-001" } def binDir = "${rootProject.projectDir}/bin" diff --git a/common/build.gradle b/common/build.gradle index b159f58b..faace22b 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -85,7 +85,6 @@ task removeEggInfo(type: Delete) { } task wheel(type: SetupPyTask) { - println("ANDREA $project.version".replace("-", "_")) setupPyCommand "bdist_wheel" distFile String.format("%s-%s-%s-%s-%s.whl", dist.name.get().replace("-", "_"), "$project.version".replace("-", "_"), "py2", "none", "any") diff --git a/dvp/Pipfile.lock b/dvp/Pipfile.lock index 91b9005b..e668917c 100644 --- a/dvp/Pipfile.lock +++ b/dvp/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "c5b133a57dbfcbf754277d2f436bbf8381668569135cf09849e92348081c5d2b" + "sha256": "a3fd3dcc18adade3d62eb1dce89074b19108e752edd39551824c4f1b6cfc9ccb" }, "pipfile-spec": 6, "requires": {}, @@ -15,20 +15,20 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.0.1-internal-002.tar.gz", - "version": "== 1.0.1-internal-002" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz", + "version": "== 1.1.0-internal-upgrade-001" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.0.1-internal-002.tar.gz", - "version": "== 1.0.1-internal-002" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-001.tar.gz", + "version": "== 1.1.0-internal-upgrade-001" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.0.1-internal-002.tar.gz", - "version": "== 1.0.1-internal-002" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-001.tar.gz", + "version": "== 1.1.0-internal-upgrade-001" }, "dvp-tools": { - "path": "../tools/build/python-dist/dvp-tools-1.0.1-internal-002.tar.gz", - "version": "== 1.0.1-internal-002" + "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-001.tar.gz", + "version": "== 1.1.0-internal-upgrade-001" } }, "develop": { @@ -41,26 +41,26 @@ }, "attrs": { "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "version": "==19.1.0" + "version": "==19.3.0" }, "configparser": { "hashes": [ - "sha256:45d1272aad6cfd7a8a06cf5c73f2ceb6a190f6acc1fa707e7f82a4c053b28b18", - "sha256:bc37850f0cc42a1725a796ef7d92690651bf1af37d744cc63161dac62cabee17" + "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", + "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], "markers": "python_version < '3'", - "version": "==3.8.1" + "version": "==4.0.2" }, "contextlib2": { "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" + "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", + "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" ], "markers": "python_version < '3'", - "version": "==0.5.5" + "version": "==0.6.0.post1" }, "funcsigs": { "hashes": [ @@ -72,10 +72,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8", - "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3" + "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", + "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" ], - "version": "==0.19" + "markers": "python_version < '3.8'", + "version": "==0.23" }, "more-itertools": { "hashes": [ @@ -88,25 +89,25 @@ }, "packaging": { "hashes": [ - "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9", - "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe" + "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", + "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" ], - "version": "==19.1" + "version": "==19.2" }, "pathlib2": { "hashes": [ - "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e", - "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8" + "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", + "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version == '3.4.*' or python_version < '3'", - "version": "==2.3.4" + "markers": "python_version < '3.6'", + "version": "==2.3.5" }, "pluggy": { "hashes": [ - "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc", - "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c" + "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", + "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34" ], - "version": "==0.12.0" + "version": "==0.13.0" }, "py": { "hashes": [ @@ -124,11 +125,11 @@ }, "pytest": { "hashes": [ - "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347", - "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0" + "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", + "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" ], "index": "delphix", - "version": "==4.6.5" + "version": "==4.6.6" }, "scandir": { "hashes": [ @@ -163,10 +164,10 @@ }, "zipp": { "hashes": [ - "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a", - "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec" + "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", + "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" ], - "version": "==0.5.2" + "version": "==0.6.0" } } } diff --git a/dvp/lock.dev-requirements.txt b/dvp/lock.dev-requirements.txt index 6ff23d34..9e1f773b 100644 --- a/dvp/lock.dev-requirements.txt +++ b/dvp/lock.dev-requirements.txt @@ -1,18 +1,18 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ atomicwrites==1.3.0 -attrs==19.1.0 -configparser==3.8.1 ; python_version < '3' -contextlib2==0.5.5 ; python_version < '3' +attrs==19.3.0 +configparser==4.0.2 ; python_version < '3' +contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.0' -importlib-metadata==0.19 +importlib-metadata==0.23 ; python_version < '3.8' more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.1 -pathlib2==2.3.4 ; python_version == '3.4.*' or python_version < '3' -pluggy==0.12.0 +packaging==19.2 +pathlib2==2.3.5 ; python_version < '3.6' +pluggy==0.13.0 py==1.8.0 pyparsing==2.4.2 -pytest==4.6.5 +pytest==4.6.6 scandir==1.10.0 ; python_version < '3.5' six==1.12.0 wcwidth==0.1.7 -zipp==0.5.2 +zipp==0.6.0 diff --git a/dvp/lock.requirements.txt b/dvp/lock.requirements.txt index 2dcd2273..e79335b7 100644 --- a/dvp/lock.requirements.txt +++ b/dvp/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.0.1-internal-002.tar.gz -./../libs/build/python-dist/dvp-libs-1.0.1-internal-002.tar.gz -./../platform/build/python-dist/dvp-platform-1.0.1-internal-002.tar.gz -./../tools/build/python-dist/dvp-tools-1.0.1-internal-002.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-001.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-001.tar.gz +./../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-001.tar.gz diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index af5e8b7d..c26e79d2 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "aa58414287124827b744a0e2148c01dbffef40838d73e9e804a14326f80f61ab" + "sha256": "2cd7bb308b4a60375bf03feaee321063287f8f0a8dfbee915bb8bf149e2cf6af" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.0.1-internal-003.tar.gz", - "version": "== 1.0.1-internal-003" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz", + "version": "== 1.1.0-internal-upgrade-001" }, "protobuf": { "hashes": [ @@ -58,10 +58,10 @@ }, "attrs": { "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "version": "==19.1.0" + "version": "==19.3.0" }, "configparser": { "hashes": [ @@ -73,18 +73,18 @@ }, "contextlib2": { "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" + "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", + "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" ], "markers": "python_version < '3'", - "version": "==0.5.5" + "version": "==0.6.0.post1" }, "funcsigs": { "hashes": [ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "importlib-metadata": { @@ -114,18 +114,18 @@ }, "packaging": { "hashes": [ - "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9", - "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe" + "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", + "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" ], - "version": "==19.1" + "version": "==19.2" }, "pathlib2": { "hashes": [ - "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e", - "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8" + "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", + "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], "markers": "python_version == '3.4.*' or python_version < '3'", - "version": "==2.3.4" + "version": "==2.3.5" }, "pluggy": { "hashes": [ @@ -150,11 +150,11 @@ }, "pytest": { "hashes": [ - "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347", - "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0" + "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", + "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" ], "index": "delphix", - "version": "==4.6.5" + "version": "==4.6.6" }, "scandir": { "hashes": [ diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 77172463..5b59fa94 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -1,18 +1,18 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ atomicwrites==1.3.0 -attrs==19.1.0 +attrs==19.3.0 configparser==4.0.2 ; python_version < '3' -contextlib2==0.5.5 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' +contextlib2==0.6.0.post1 ; python_version < '3' +funcsigs==1.0.2 ; python_version < '3.3' importlib-metadata==0.23 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.1 -pathlib2==2.3.4 ; python_version == '3.4.*' or python_version < '3' +packaging==19.2 +pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' pluggy==0.13.0 py==1.8.0 pyparsing==2.4.2 -pytest==4.6.5 +pytest==4.6.6 scandir==1.10.0 ; python_version < '3.5' six==1.12.0 wcwidth==0.1.7 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 057fdc5b..98ca3313 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,4 +1,4 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.0.1-internal-003.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz protobuf==3.6.1 six==1.12.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index b88310b1..5b92eaf1 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "e5fb2ca5551838972076eedf44c4471f6d7b2c104ebf9c83b0e84b55c9451b02" + "sha256": "bf9205c13cb754d6f02bf99aeb2faee9a06815cffe907f08cfcbef58431436c8" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.0.1-internal-003.tar.gz", - "version": "== 1.0.1-internal-003" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz", + "version": "== 1.1.0-internal-upgrade-001" }, "enum34": { "hashes": [ @@ -69,10 +69,10 @@ }, "attrs": { "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "version": "==19.1.0" + "version": "==19.3.0" }, "configparser": { "hashes": [ @@ -84,18 +84,18 @@ }, "contextlib2": { "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" + "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", + "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" ], "markers": "python_version < '3'", - "version": "==0.5.5" + "version": "==0.6.0.post1" }, "funcsigs": { "hashes": [ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "importlib-metadata": { @@ -125,18 +125,18 @@ }, "packaging": { "hashes": [ - "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9", - "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe" + "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", + "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" ], - "version": "==19.1" + "version": "==19.2" }, "pathlib2": { "hashes": [ - "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e", - "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8" + "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", + "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], "markers": "python_version == '3.4.*' or python_version < '3'", - "version": "==2.3.4" + "version": "==2.3.5" }, "pluggy": { "hashes": [ @@ -161,11 +161,11 @@ }, "pytest": { "hashes": [ - "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347", - "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0" + "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", + "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" ], "index": "delphix", - "version": "==4.6.5" + "version": "==4.6.6" }, "scandir": { "hashes": [ diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index 290c2177..f9d1c992 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -1,18 +1,18 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ atomicwrites==1.3.0 -attrs==19.1.0 +attrs==19.3.0 configparser==4.0.2 ; python_version < '3' -contextlib2==0.5.5 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' +contextlib2==0.6.0.post1 ; python_version < '3' +funcsigs==1.0.2 ; python_version < '3.0' importlib-metadata==0.23 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.1 -pathlib2==2.3.4 ; python_version == '3.4.*' or python_version < '3' +packaging==19.2 +pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' pluggy==0.13.0 py==1.8.0 pyparsing==2.4.2 -pytest==4.6.5 +pytest==4.6.6 scandir==1.10.0 ; python_version < '3.5' six==1.12.0 wcwidth==0.1.7 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 7eb118fb..0e7ff488 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.0.1-internal-003.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 six==1.12.0 diff --git a/platform/src/main/python/dlpx/virtualization/platform/__init__.py b/platform/src/main/python/dlpx/virtualization/platform/__init__.py index 20a2dd90..23ca4a11 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/__init__.py +++ b/platform/src/main/python/dlpx/virtualization/platform/__init__.py @@ -5,4 +5,8 @@ __path__ = __import__('pkgutil').extend_path(__path__, __name__) from dlpx.virtualization.platform._plugin_classes import * +from dlpx.virtualization.platform._discovery import * +from dlpx.virtualization.platform._linked import * +from dlpx.virtualization.platform._upgrade import * +from dlpx.virtualization.platform._virtual import * from dlpx.virtualization.platform._plugin import * diff --git a/platform/src/main/python/dlpx/virtualization/platform/_discovery.py b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py new file mode 100644 index 00000000..6e8ce6cd --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py @@ -0,0 +1,160 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""DiscoveryOperations for the Virtualization Platform + +""" +import json +from dlpx.virtualization.common import RemoteConnection +from dlpx.virtualization import common_pb2 +from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectReturnTypeError, OperationNotDefinedError, + OperationAlreadyDefinedError) + + +__all__ = ['DiscoveryOperations'] + + +class DiscoveryOperations(object): + + def __init__(self): + self.repository_impl = None + self.source_config_impl = None + + def repository(self): + def repository_decorator(repository_impl): + if self.repository_impl: + raise OperationAlreadyDefinedError(Op.DISCOVERY_REPOSITORY) + + self.repository_impl = repository_impl + return repository_impl + return repository_decorator + + def source_config(self): + def source_config_decorator(source_config_impl): + if self.source_config_impl: + raise OperationAlreadyDefinedError(Op.DISCOVERY_SOURCE_CONFIG) + self.source_config_impl = source_config_impl + return source_config_impl + return source_config_decorator + + def _internal_repository(self, request): + """Repository discovery wrapper. + + Executed just after adding or refreshing an environment. This plugin + operation is run prior to discovering source configs. This plugin + operation returns a list of repositories installed on a environment. + + Discover the repositories on an environment given a source connection. + + Args: + request (RepositoryDiscoveryRequest): Repository + Discovery operation arguments. + + Returns: + RepositoryDiscoveryResponse: The return value of repository + discovery operation. + """ + from generated.definitions import RepositoryDefinition + + def to_protobuf(repository): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(repository.to_dict()) + repository_protobuf = common_pb2.Repository() + repository_protobuf.parameters.CopyFrom(parameters) + return repository_protobuf + + if not self.repository_impl: + raise OperationNotDefinedError(Op.DISCOVERY_REPOSITORY) + + repositories = self.repository_impl( + source_connection=RemoteConnection.from_proto(request.source_connection)) + + # Validate that this is a list of Repository objects + if not isinstance(repositories, list): + raise IncorrectReturnTypeError( + Op.DISCOVERY_REPOSITORY, + type(repositories), + [RepositoryDefinition]) + + if not all(isinstance(repo, RepositoryDefinition) + for repo in repositories): + raise IncorrectReturnTypeError( + Op.DISCOVERY_REPOSITORY, + [type(repo) for repo in repositories], + [RepositoryDefinition]) + + repository_discovery_response = ( + platform_pb2.RepositoryDiscoveryResponse()) + repository_protobuf_list = [to_protobuf(repo) for repo in repositories] + repository_discovery_response.return_value.repositories.extend( + repository_protobuf_list) + return repository_discovery_response + + def _internal_source_config(self, request): + """Source config discovery wrapper. + + Executed when adding or refreshing an environment. This plugin + operation is run after discovering repositories and before + persisting/updating repository and source config data in MDS. This + plugin operation returns a list of source configs from a discovered + repository. + + Discover the source configs on an environment given a discovered + repository. + + Args: + request (SourceConfigDiscoveryRequest): Source + Config Discovery arguments. + + Returns: + SourceConfigDiscoveryResponse: The return value of source config + discovery operation. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + def to_protobuf(source_config): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(source_config.to_dict()) + source_config_protobuf = common_pb2.SourceConfig() + source_config_protobuf.parameters.CopyFrom(parameters) + return source_config_protobuf + + if not self.source_config_impl: + raise OperationNotDefinedError(Op.DISCOVERY_SOURCE_CONFIG) + + repository_definition = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + source_configs = self.source_config_impl( + source_connection=RemoteConnection.from_proto(request.source_connection), + repository=repository_definition) + + # Validate that this is a list of SourceConfigDefinition objects + if not isinstance(source_configs, list): + raise IncorrectReturnTypeError( + Op.DISCOVERY_SOURCE_CONFIG, + type(source_configs), + [SourceConfigDefinition]) + + if not all(isinstance(config, SourceConfigDefinition) + for config in source_configs): + raise IncorrectReturnTypeError( + Op.DISCOVERY_SOURCE_CONFIG, + [type(config) for config in source_configs], + [SourceConfigDefinition]) + + source_config_discovery_response = ( + platform_pb2.SourceConfigDiscoveryResponse()) + source_config_protobuf_list = [to_protobuf(config) + for config in source_configs] + source_config_discovery_response.return_value.source_configs.extend( + source_config_protobuf_list) + return source_config_discovery_response diff --git a/platform/src/main/python/dlpx/virtualization/platform/_linked.py b/platform/src/main/python/dlpx/virtualization/platform/_linked.py new file mode 100644 index 00000000..b511a6e7 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_linked.py @@ -0,0 +1,671 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""LinkedOperations for the Virtualization Platform + +""" +import json +from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment +from dlpx.virtualization import common_pb2 +from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.common.exceptions import PluginRuntimeError +from dlpx.virtualization.platform import Status +from dlpx.virtualization.platform import DirectSource +from dlpx.virtualization.platform import StagedSource +from dlpx.virtualization.platform import Mount +from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectReturnTypeError, OperationNotDefinedError, + OperationAlreadyDefinedError) + + +__all__ = ['LinkedOperations'] + +class LinkedOperations(object): + + def __init__(self): + self.pre_snapshot_impl = None + self.post_snapshot_impl = None + self.start_staging_impl = None + self.stop_staging_impl = None + self.status_impl = None + self.worker_impl = None + self.mount_specification_impl = None + + def pre_snapshot(self): + def pre_snapshot_decorator(pre_snapshot_impl): + if self.pre_snapshot_impl: + raise OperationAlreadyDefinedError(Op.LINKED_PRE_SNAPSHOT) + self.pre_snapshot_impl = pre_snapshot_impl + return pre_snapshot_impl + return pre_snapshot_decorator + + def post_snapshot(self): + def post_snapshot_decorator(post_snapshot_impl): + if self.post_snapshot_impl: + raise OperationAlreadyDefinedError(Op.LINKED_POST_SNAPSHOT) + self.post_snapshot_impl = post_snapshot_impl + return post_snapshot_impl + return post_snapshot_decorator + + def start_staging(self): + def start_staging_decorator(start_staging_impl): + if self.start_staging_impl: + raise OperationAlreadyDefinedError(Op.LINKED_START_STAGING) + self.start_staging_impl = start_staging_impl + return start_staging_impl + return start_staging_decorator + + def stop_staging(self): + def stop_staging_decorator(stop_staging_impl): + if self.stop_staging_impl: + raise OperationAlreadyDefinedError(Op.LINKED_STOP_STAGING) + self.stop_staging_impl = stop_staging_impl + return stop_staging_impl + return stop_staging_decorator + + def status(self): + def status_decorator(status_impl): + if self.status_impl: + raise OperationAlreadyDefinedError(Op.LINKED_STATUS) + self.status_impl = status_impl + return status_impl + return status_decorator + + def worker(self): + def worker_decorator(worker_impl): + if self.worker_impl: + raise OperationAlreadyDefinedError(Op.LINKED_WORKER) + self.worker_impl = worker_impl + return worker_impl + return worker_decorator + + def mount_specification(self): + def mount_specification_decorator(mount_specification_impl): + if self.mount_specification_impl: + raise OperationAlreadyDefinedError( + Op.LINKED_MOUNT_SPEC) + self.mount_specification_impl = mount_specification_impl + return mount_specification_impl + return mount_specification_decorator + + def _internal_direct_pre_snapshot(self, request): + """Pre Snapshot Wrapper for direct plugins. + + Executed before creating a snapshot. This plugin + operation is run prior to creating a snapshot for a direct source. + + Run pre-snapshot operation for a direct source. + + Args: + request (DirectPreSnapshotRequest): Pre Snapshot arguments. + + Returns: + DirectPreSnapshotResponse: A response containing + DirectPreSnapshotResult if successful or PluginErrorResult in case + of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.pre_snapshot() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.pre_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) + + direct_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.direct_source.linked_source.parameters.json)) + direct_source = DirectSource( + guid=request.direct_source.linked_source.guid, + connection=RemoteConnection.from_proto(request.direct_source.connection), + parameters=direct_source_definition) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.pre_snapshot_impl( + direct_source=direct_source, + repository=repository, + source_config=source_config) + + direct_pre_snapshot_response = platform_pb2.DirectPreSnapshotResponse() + direct_pre_snapshot_response.return_value.CopyFrom( + platform_pb2.DirectPreSnapshotResult()) + + return direct_pre_snapshot_response + + def _internal_direct_post_snapshot(self, request): + """Post Snapshot Wrapper for direct plugins. + + Executed after creating a snapshot. This plugin + operation is run after creating a snapshot for a direct source. + + Run post-snapshot operation for a direct source. + + Args: + request (DirectPostSnapshotRequest): Post Snapshot arguments. + + Returns: + DirectPostSnapshotResponse: A response containing the return value - + DirectPostSnapshotResult which has the snapshot metadata on success. + In case of errors, response object will contain PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import SnapshotDefinition + + def to_protobuf(snapshot): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(snapshot.to_dict()) + snapshot_protobuf = common_pb2.Snapshot() + snapshot_protobuf.parameters.CopyFrom(parameters) + return snapshot_protobuf + + if not self.post_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) + + direct_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.direct_source.linked_source.parameters.json)) + direct_source = DirectSource( + guid=request.direct_source.linked_source.guid, + connection=RemoteConnection.from_proto(request.direct_source.connection), + parameters=direct_source_definition) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + snapshot = self.post_snapshot_impl( + direct_source=direct_source, + repository=repository, + source_config=source_config) + + # Validate that this is a SnapshotDefinition object + if not isinstance(snapshot, SnapshotDefinition): + raise IncorrectReturnTypeError( + Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + + direct_post_snapshot_response = ( + platform_pb2.DirectPostSnapshotResponse()) + direct_post_snapshot_response.return_value.snapshot.CopyFrom( + to_protobuf(snapshot)) + + return direct_post_snapshot_response + + def _internal_staged_pre_snapshot(self, request): + """Pre Snapshot Wrapper for staged plugins. + + Executed before creating a snapshot. This plugin + operation is run prior to creating a snapshot for a staged source. + + Run pre-snapshot operation for a staged source. + + Args: + request (StagedPreSnapshotRequest): Pre Snapshot arguments. + + Returns: + StagedPreSnapshotResponse: A response containing + StagedPreSnapshotResult if successful or PluginErrorResult + in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import SnapshotParametersDefinition + + # + # While linked.pre_snapshot() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.pre_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) + + linked_source = request.staged_source.linked_source + staged_source_definition = (LinkedSourceDefinition.from_dict( + json.loads(linked_source.parameters.json))) + staged_mount = request.staged_source.staged_mount + mount = Mount( + remote_environment=RemoteEnvironment.from_proto(staged_mount.remote_environment), + mount_path=staged_mount.mount_path, + shared_path=staged_mount.shared_path) + staged_source = StagedSource( + guid=linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + snapshot_parameters = SnapshotParametersDefinition.from_dict( + json.loads(request.snapshot_parameters.parameters.json)) + + self.pre_snapshot_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config, + snapshot_parameters=snapshot_parameters) + + response = platform_pb2.StagedPreSnapshotResponse() + response.return_value.CopyFrom(platform_pb2.StagedPreSnapshotResult()) + + return response + + def _internal_staged_post_snapshot(self, request): + """Post Snapshot Wrapper for staged plugins. + + Executed after creating a snapshot. This plugin + operation is run after creating a snapshot for a staged source. + + Run post-snapshot operation for a staged source. + + Args: + request (StagedPostSnapshotRequest): Post Snapshot arguments. + + Returns: + StagedPostSnapshotResponse: A response containing the return value + StagedPostSnapshotResult which has the snapshot metadata on + success. In case of errors, response object will contain + PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SnapshotParametersDefinition + + def to_protobuf(snapshot): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(snapshot.to_dict()) + snapshot_protobuf = common_pb2.Snapshot() + snapshot_protobuf.parameters.CopyFrom(parameters) + return snapshot_protobuf + + if not self.post_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment= + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + snapshot_parameters = SnapshotParametersDefinition.from_dict( + json.loads(request.snapshot_parameters.parameters.json)) + + snapshot = self.post_snapshot_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config, + snapshot_parameters=snapshot_parameters) + + # Validate that this is a SnapshotDefinition object + if not isinstance(snapshot, SnapshotDefinition): + raise IncorrectReturnTypeError( + Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + + response = platform_pb2.StagedPostSnapshotResponse() + response.return_value.snapshot.CopyFrom(to_protobuf(snapshot)) + + return response + + def _internal_start_staging(self, request): + """Start staging Wrapper for staged plugins. + + Executed when enabling the staging source. This plugin + operation is run to start the staging source as part + of the enable operation. + + Run start operation for a staged source. + + Args: + request (StartStagingRequest): Start arguments. + + Returns: + StartStagingResponse: A response containing StartStagingResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.start_staging() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.start_staging_impl: + raise OperationNotDefinedError(Op.LINKED_START_STAGING) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.start_staging_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + start_staging_response = platform_pb2.StartStagingResponse() + start_staging_response.return_value.CopyFrom( + platform_pb2.StartStagingResult()) + + return start_staging_response + + def _internal_stop_staging(self, request): + """Stop staging Wrapper for staged plugins. + + Executed when disabling the staging source. This plugin + operation is run to stop the staging source as part + of the disable operation. + + Run stop operation for a staged source. + + Args: + request (StopStagingRequest): Stop arguments. + + Returns: + StopStagingResponse: A response containing StopStagingResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.stop_staging() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.stop_staging_impl: + raise OperationNotDefinedError(Op.LINKED_STOP_STAGING) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.stop_staging_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + stop_staging_response = platform_pb2.StopStagingResponse() + stop_staging_response.return_value.CopyFrom( + platform_pb2.StopStagingResult()) + + return stop_staging_response + + def _internal_status(self, request): + """Staged Status Wrapper for staged plugins. + + Executed as part of several operations to get the status + of a staged source - active or inactive. + + Run status operation for a staged source. + + Args: + request (StagedStatusRequest): Post Snapshot arguments. + + Returns: + StagedStatusResponse: A response containing the return value - + StagedStatusResult which has active or inactive status. In + case of errors, response object will contain PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.status() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.status_impl: + raise OperationNotDefinedError(Op.LINKED_STATUS) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + status = self.status_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + # Validate that this is a Status object. + if not isinstance(status, Status): + raise IncorrectReturnTypeError( + Op.LINKED_STATUS, type(status), Status) + + staged_status_response = platform_pb2.StagedStatusResponse() + staged_status_response.return_value.status = status.value + + return staged_status_response + + def _internal_worker(self, request): + """Staged Worker Wrapper for staged plugins. + + Executed as part of validated sync. This plugin + operation is run to sync staging source as part + of the validated sync operation. + + Run worker operation for a staged source. + + Args: + request (StagedWorkerRequest): Worker arguments. + + Returns: + StagedWorkerResponse: A response containing StagedWorkerResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.worker() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.worker_impl: + raise OperationNotDefinedError(Op.LINKED_WORKER) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.worker_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + staged_worker_response = platform_pb2.StagedWorkerResponse() + staged_worker_response.return_value.CopyFrom( + platform_pb2.StagedWorkerResult()) + + return staged_worker_response + + def _internal_mount_specification(self, request): + """Staged Mount/Ownership Spec Wrapper for staged plugins. + + Executed before creating a snapshot during sync or before + enable/disable. This plugin operation is run before mounting datasets + on staging to set the mount path and/or ownership. + + Run mount/ownership spec operation for a staged source. + + Args: + request (StagedMountSpecRequest): Mount Spec arguments. + + Returns: + StagedMountSpecResponse: A response containing the return value - + StagedMountSpecResult which has the mount/ownership metadata on + success. In case of errors, response object will contain + PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + + def to_protobuf_single_mount(single_mount): + if single_mount.shared_path: + raise PluginRuntimeError( + 'Shared path is not supported for linked sources.') + + single_mount_protobuf = common_pb2.SingleEntireMount() + single_mount_protobuf.mount_path = single_mount.mount_path + single_mount_protobuf.remote_environment.CopyFrom( + single_mount.remote_environment.to_proto()) + return single_mount_protobuf + + def to_protobuf_ownership_spec(ownership_spec): + ownership_spec_protobuf = common_pb2.OwnershipSpec() + ownership_spec_protobuf.uid = ownership_spec.uid + ownership_spec_protobuf.gid = ownership_spec.gid + return ownership_spec_protobuf + + if not self.mount_specification_impl: + raise OperationNotDefinedError(Op.LINKED_MOUNT_SPEC) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + mount_spec = self.mount_specification_impl( + staged_source=staged_source, + repository=repository) + + # Validate that this is a MountSpecification object. + if not isinstance(mount_spec, MountSpecification): + raise IncorrectReturnTypeError( + Op.LINKED_MOUNT_SPEC, + type(mount_spec), + MountSpecification) + + # Only one mount is supported for linked sources. + mount_len = len(mount_spec.mounts) + if mount_len != 1: + raise PluginRuntimeError( + 'Exactly one mount must be provided for staging sources.' + ' Found {}'.format(mount_len)) + + staged_mount = to_protobuf_single_mount(mount_spec.mounts[0]) + + staged_mount_spec_response = platform_pb2.StagedMountSpecResponse() + staged_mount_spec_response.return_value.staged_mount.CopyFrom( + staged_mount) + + # Ownership spec is optional for linked sources. + if mount_spec.ownership_specification: + ownership_spec = to_protobuf_ownership_spec( + mount_spec.ownership_specification) + staged_mount_spec_response.return_value.ownership_spec.CopyFrom( + ownership_spec) + + return staged_mount_spec_response \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/_plugin.py b/platform/src/main/python/dlpx/virtualization/platform/_plugin.py index 4e0debcd..2758f34b 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_plugin.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_plugin.py @@ -80,1489 +80,21 @@ def my_configure_implementation(source, repository, snapshot): fail. The internal methods should only be called by the platform so it's safe to have the import in the methods as the objects will exist at runtime. """ -import json -from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment -from dlpx.virtualization import common_pb2 -from dlpx.virtualization import platform_pb2 -from dlpx.virtualization.common.exceptions import PluginRuntimeError -from dlpx.virtualization.platform import VirtualSource -from dlpx.virtualization.platform import DirectSource -from dlpx.virtualization.platform import StagedSource -from dlpx.virtualization.platform import Status -from dlpx.virtualization.platform import Mount -from dlpx.virtualization.platform import MountSpecification -from dlpx.virtualization.platform.operation import Operation as Op -from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationNotDefinedError, - OperationAlreadyDefinedError) +from dlpx.virtualization.platform import (DiscoveryOperations, + LinkedOperations, + VirtualOperations, + UpgradeOperations) __all__ = ['Plugin'] -class DiscoveryOperations(object): - - def __init__(self): - self.repository_impl = None - self.source_config_impl = None - - def repository(self): - def repository_decorator(repository_impl): - if self.repository_impl: - raise OperationAlreadyDefinedError(Op.DISCOVERY_REPOSITORY) - - self.repository_impl = repository_impl - return repository_impl - return repository_decorator - - def source_config(self): - def source_config_decorator(source_config_impl): - if self.source_config_impl: - raise OperationAlreadyDefinedError(Op.DISCOVERY_SOURCE_CONFIG) - self.source_config_impl = source_config_impl - return source_config_impl - return source_config_decorator - - def _internal_repository(self, request): - """Repository discovery wrapper. - - Executed just after adding or refreshing an environment. This plugin - operation is run prior to discovering source configs. This plugin - operation returns a list of repositories installed on a environment. - - Discover the repositories on an environment given a source connection. - - Args: - request (RepositoryDiscoveryRequest): Repository - Discovery operation arguments. - - Returns: - RepositoryDiscoveryResponse: The return value of repository - discovery operation. - """ - from generated.definitions import RepositoryDefinition - - def to_protobuf(repository): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(repository.to_dict()) - repository_protobuf = common_pb2.Repository() - repository_protobuf.parameters.CopyFrom(parameters) - return repository_protobuf - - if not self.repository_impl: - raise OperationNotDefinedError(Op.DISCOVERY_REPOSITORY) - - repositories = self.repository_impl( - source_connection=RemoteConnection.from_proto(request.source_connection)) - - # Validate that this is a list of Repository objects - if not isinstance(repositories, list): - raise IncorrectReturnTypeError( - Op.DISCOVERY_REPOSITORY, - type(repositories), - [RepositoryDefinition]) - - if not all(isinstance(repo, RepositoryDefinition) - for repo in repositories): - raise IncorrectReturnTypeError( - Op.DISCOVERY_REPOSITORY, - [type(repo) for repo in repositories], - [RepositoryDefinition]) - - repository_discovery_response = ( - platform_pb2.RepositoryDiscoveryResponse()) - repository_protobuf_list = [to_protobuf(repo) for repo in repositories] - repository_discovery_response.return_value.repositories.extend( - repository_protobuf_list) - return repository_discovery_response - - def _internal_source_config(self, request): - """Source config discovery wrapper. - - Executed when adding or refreshing an environment. This plugin - operation is run after discovering repositories and before - persisting/updating repository and source config data in MDS. This - plugin operation returns a list of source configs from a discovered - repository. - - Discover the source configs on an environment given a discovered - repository. - - Args: - request (SourceConfigDiscoveryRequest): Source - Config Discovery arguments. - - Returns: - SourceConfigDiscoveryResponse: The return value of source config - discovery operation. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - def to_protobuf(source_config): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(source_config.to_dict()) - source_config_protobuf = common_pb2.SourceConfig() - source_config_protobuf.parameters.CopyFrom(parameters) - return source_config_protobuf - - if not self.source_config_impl: - raise OperationNotDefinedError(Op.DISCOVERY_SOURCE_CONFIG) - - repository_definition = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - source_configs = self.source_config_impl( - source_connection=RemoteConnection.from_proto(request.source_connection), - repository=repository_definition) - - # Validate that this is a list of SourceConfigDefinition objects - if not isinstance(source_configs, list): - raise IncorrectReturnTypeError( - Op.DISCOVERY_SOURCE_CONFIG, - type(source_configs), - [SourceConfigDefinition]) - - if not all(isinstance(config, SourceConfigDefinition) - for config in source_configs): - raise IncorrectReturnTypeError( - Op.DISCOVERY_SOURCE_CONFIG, - [type(config) for config in source_configs], - [SourceConfigDefinition]) - - source_config_discovery_response = ( - platform_pb2.SourceConfigDiscoveryResponse()) - source_config_protobuf_list = [to_protobuf(config) - for config in source_configs] - source_config_discovery_response.return_value.source_configs.extend( - source_config_protobuf_list) - return source_config_discovery_response - - -class LinkedOperations(object): - - def __init__(self): - self.pre_snapshot_impl = None - self.post_snapshot_impl = None - self.start_staging_impl = None - self.stop_staging_impl = None - self.status_impl = None - self.worker_impl = None - self.mount_specification_impl = None - - def pre_snapshot(self): - def pre_snapshot_decorator(pre_snapshot_impl): - if self.pre_snapshot_impl: - raise OperationAlreadyDefinedError(Op.LINKED_PRE_SNAPSHOT) - self.pre_snapshot_impl = pre_snapshot_impl - return pre_snapshot_impl - return pre_snapshot_decorator - - def post_snapshot(self): - def post_snapshot_decorator(post_snapshot_impl): - if self.post_snapshot_impl: - raise OperationAlreadyDefinedError(Op.LINKED_POST_SNAPSHOT) - self.post_snapshot_impl = post_snapshot_impl - return post_snapshot_impl - return post_snapshot_decorator - - def start_staging(self): - def start_staging_decorator(start_staging_impl): - if self.start_staging_impl: - raise OperationAlreadyDefinedError(Op.LINKED_START_STAGING) - self.start_staging_impl = start_staging_impl - return start_staging_impl - return start_staging_decorator - - def stop_staging(self): - def stop_staging_decorator(stop_staging_impl): - if self.stop_staging_impl: - raise OperationAlreadyDefinedError(Op.LINKED_STOP_STAGING) - self.stop_staging_impl = stop_staging_impl - return stop_staging_impl - return stop_staging_decorator - - def status(self): - def status_decorator(status_impl): - if self.status_impl: - raise OperationAlreadyDefinedError(Op.LINKED_STATUS) - self.status_impl = status_impl - return status_impl - return status_decorator - - def worker(self): - def worker_decorator(worker_impl): - if self.worker_impl: - raise OperationAlreadyDefinedError(Op.LINKED_WORKER) - self.worker_impl = worker_impl - return worker_impl - return worker_decorator - - def mount_specification(self): - def mount_specification_decorator(mount_specification_impl): - if self.mount_specification_impl: - raise OperationAlreadyDefinedError( - Op.LINKED_MOUNT_SPEC) - self.mount_specification_impl = mount_specification_impl - return mount_specification_impl - return mount_specification_decorator - - def _internal_direct_pre_snapshot(self, request): - """Pre Snapshot Wrapper for direct plugins. - - Executed before creating a snapshot. This plugin - operation is run prior to creating a snapshot for a direct source. - - Run pre-snapshot operation for a direct source. - - Args: - request (DirectPreSnapshotRequest): Pre Snapshot arguments. - - Returns: - DirectPreSnapshotResponse: A response containing - DirectPreSnapshotResult if successful or PluginErrorResult in case - of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.pre_snapshot() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.pre_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) - - direct_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.direct_source.linked_source.parameters.json)) - direct_source = DirectSource( - guid=request.direct_source.linked_source.guid, - connection=RemoteConnection.from_proto(request.direct_source.connection), - parameters=direct_source_definition) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.pre_snapshot_impl( - direct_source=direct_source, - repository=repository, - source_config=source_config) - - direct_pre_snapshot_response = platform_pb2.DirectPreSnapshotResponse() - direct_pre_snapshot_response.return_value.CopyFrom( - platform_pb2.DirectPreSnapshotResult()) - - return direct_pre_snapshot_response - - def _internal_direct_post_snapshot(self, request): - """Post Snapshot Wrapper for direct plugins. - - Executed after creating a snapshot. This plugin - operation is run after creating a snapshot for a direct source. - - Run post-snapshot operation for a direct source. - - Args: - request (DirectPostSnapshotRequest): Post Snapshot arguments. - - Returns: - DirectPostSnapshotResponse: A response containing the return value - - DirectPostSnapshotResult which has the snapshot metadata on success. - In case of errors, response object will contain PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import SnapshotDefinition - - def to_protobuf(snapshot): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(snapshot.to_dict()) - snapshot_protobuf = common_pb2.Snapshot() - snapshot_protobuf.parameters.CopyFrom(parameters) - return snapshot_protobuf - - if not self.post_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) - - direct_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.direct_source.linked_source.parameters.json)) - direct_source = DirectSource( - guid=request.direct_source.linked_source.guid, - connection=RemoteConnection.from_proto(request.direct_source.connection), - parameters=direct_source_definition) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - snapshot = self.post_snapshot_impl( - direct_source=direct_source, - repository=repository, - source_config=source_config) - - # Validate that this is a SnapshotDefinition object - if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) - - direct_post_snapshot_response = ( - platform_pb2.DirectPostSnapshotResponse()) - direct_post_snapshot_response.return_value.snapshot.CopyFrom( - to_protobuf(snapshot)) - - return direct_post_snapshot_response - - def _internal_staged_pre_snapshot(self, request): - """Pre Snapshot Wrapper for staged plugins. - - Executed before creating a snapshot. This plugin - operation is run prior to creating a snapshot for a staged source. - - Run pre-snapshot operation for a staged source. - - Args: - request (StagedPreSnapshotRequest): Pre Snapshot arguments. - - Returns: - StagedPreSnapshotResponse: A response containing - StagedPreSnapshotResult if successful or PluginErrorResult - in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import SnapshotParametersDefinition - - # - # While linked.pre_snapshot() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.pre_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) - - linked_source = request.staged_source.linked_source - staged_source_definition = (LinkedSourceDefinition.from_dict( - json.loads(linked_source.parameters.json))) - staged_mount = request.staged_source.staged_mount - mount = Mount( - remote_environment=RemoteEnvironment.from_proto(staged_mount.remote_environment), - mount_path=staged_mount.mount_path, - shared_path=staged_mount.shared_path) - staged_source = StagedSource( - guid=linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - snapshot_parameters = SnapshotParametersDefinition.from_dict( - json.loads(request.snapshot_parameters.parameters.json)) - - self.pre_snapshot_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config, - snapshot_parameters=snapshot_parameters) - - response = platform_pb2.StagedPreSnapshotResponse() - response.return_value.CopyFrom(platform_pb2.StagedPreSnapshotResult()) - - return response - - def _internal_staged_post_snapshot(self, request): - """Post Snapshot Wrapper for staged plugins. - - Executed after creating a snapshot. This plugin - operation is run after creating a snapshot for a staged source. - - Run post-snapshot operation for a staged source. - - Args: - request (StagedPostSnapshotRequest): Post Snapshot arguments. - - Returns: - StagedPostSnapshotResponse: A response containing the return value - StagedPostSnapshotResult which has the snapshot metadata on - success. In case of errors, response object will contain - PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SnapshotParametersDefinition - - def to_protobuf(snapshot): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(snapshot.to_dict()) - snapshot_protobuf = common_pb2.Snapshot() - snapshot_protobuf.parameters.CopyFrom(parameters) - return snapshot_protobuf - - if not self.post_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment= - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - snapshot_parameters = SnapshotParametersDefinition.from_dict( - json.loads(request.snapshot_parameters.parameters.json)) - - snapshot = self.post_snapshot_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config, - snapshot_parameters=snapshot_parameters) - - # Validate that this is a SnapshotDefinition object - if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) - - response = platform_pb2.StagedPostSnapshotResponse() - response.return_value.snapshot.CopyFrom(to_protobuf(snapshot)) - - return response - - def _internal_start_staging(self, request): - """Start staging Wrapper for staged plugins. - - Executed when enabling the staging source. This plugin - operation is run to start the staging source as part - of the enable operation. - - Run start operation for a staged source. - - Args: - request (StartStagingRequest): Start arguments. - - Returns: - StartStagingResponse: A response containing StartStagingResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.start_staging() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.start_staging_impl: - raise OperationNotDefinedError(Op.LINKED_START_STAGING) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.start_staging_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - start_staging_response = platform_pb2.StartStagingResponse() - start_staging_response.return_value.CopyFrom( - platform_pb2.StartStagingResult()) - - return start_staging_response - - def _internal_stop_staging(self, request): - """Stop staging Wrapper for staged plugins. - - Executed when disabling the staging source. This plugin - operation is run to stop the staging source as part - of the disable operation. - - Run stop operation for a staged source. - - Args: - request (StopStagingRequest): Stop arguments. - - Returns: - StopStagingResponse: A response containing StopStagingResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.stop_staging() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.stop_staging_impl: - raise OperationNotDefinedError(Op.LINKED_STOP_STAGING) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.stop_staging_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - stop_staging_response = platform_pb2.StopStagingResponse() - stop_staging_response.return_value.CopyFrom( - platform_pb2.StopStagingResult()) - - return stop_staging_response - - def _internal_status(self, request): - """Staged Status Wrapper for staged plugins. - - Executed as part of several operations to get the status - of a staged source - active or inactive. - - Run status operation for a staged source. - - Args: - request (StagedStatusRequest): Post Snapshot arguments. - - Returns: - StagedStatusResponse: A response containing the return value - - StagedStatusResult which has active or inactive status. In - case of errors, response object will contain PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.status() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.status_impl: - raise OperationNotDefinedError(Op.LINKED_STATUS) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - status = self.status_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - # Validate that this is a Status object. - if not isinstance(status, Status): - raise IncorrectReturnTypeError( - Op.LINKED_STATUS, type(status), Status) - - staged_status_response = platform_pb2.StagedStatusResponse() - staged_status_response.return_value.status = status.value - - return staged_status_response - - def _internal_worker(self, request): - """Staged Worker Wrapper for staged plugins. - - Executed as part of validated sync. This plugin - operation is run to sync staging source as part - of the validated sync operation. - - Run worker operation for a staged source. - - Args: - request (StagedWorkerRequest): Worker arguments. - - Returns: - StagedWorkerResponse: A response containing StagedWorkerResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.worker() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.worker_impl: - raise OperationNotDefinedError(Op.LINKED_WORKER) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.worker_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - staged_worker_response = platform_pb2.StagedWorkerResponse() - staged_worker_response.return_value.CopyFrom( - platform_pb2.StagedWorkerResult()) - - return staged_worker_response - - def _internal_mount_specification(self, request): - """Staged Mount/Ownership Spec Wrapper for staged plugins. - - Executed before creating a snapshot during sync or before - enable/disable. This plugin operation is run before mounting datasets - on staging to set the mount path and/or ownership. - - Run mount/ownership spec operation for a staged source. - - Args: - request (StagedMountSpecRequest): Mount Spec arguments. - - Returns: - StagedMountSpecResponse: A response containing the return value - - StagedMountSpecResult which has the mount/ownership metadata on - success. In case of errors, response object will contain - PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - - def to_protobuf_single_mount(single_mount): - if single_mount.shared_path: - raise PluginRuntimeError( - 'Shared path is not supported for linked sources.') - - single_mount_protobuf = common_pb2.SingleEntireMount() - single_mount_protobuf.mount_path = single_mount.mount_path - single_mount_protobuf.remote_environment.CopyFrom( - single_mount.remote_environment.to_proto()) - return single_mount_protobuf - - def to_protobuf_ownership_spec(ownership_spec): - ownership_spec_protobuf = common_pb2.OwnershipSpec() - ownership_spec_protobuf.uid = ownership_spec.uid - ownership_spec_protobuf.gid = ownership_spec.gid - return ownership_spec_protobuf - - if not self.mount_specification_impl: - raise OperationNotDefinedError(Op.LINKED_MOUNT_SPEC) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - mount_spec = self.mount_specification_impl( - staged_source=staged_source, - repository=repository) - - # Validate that this is a MountSpecification object. - if not isinstance(mount_spec, MountSpecification): - raise IncorrectReturnTypeError( - Op.LINKED_MOUNT_SPEC, - type(mount_spec), - MountSpecification) - - # Only one mount is supported for linked sources. - mount_len = len(mount_spec.mounts) - if mount_len != 1: - raise PluginRuntimeError( - 'Exactly one mount must be provided for staging sources.' - ' Found {}'.format(mount_len)) - - staged_mount = to_protobuf_single_mount(mount_spec.mounts[0]) - - staged_mount_spec_response = platform_pb2.StagedMountSpecResponse() - staged_mount_spec_response.return_value.staged_mount.CopyFrom( - staged_mount) - - # Ownership spec is optional for linked sources. - if mount_spec.ownership_specification: - ownership_spec = to_protobuf_ownership_spec( - mount_spec.ownership_specification) - staged_mount_spec_response.return_value.ownership_spec.CopyFrom( - ownership_spec) - - return staged_mount_spec_response - - -class VirtualOperations(object): - - def __init__(self): - self.configure_impl = None - self.unconfigure_impl = None - self.reconfigure_impl = None - self.start_impl = None - self.stop_impl = None - self.pre_snapshot_impl = None - self.post_snapshot_impl = None - self.status_impl = None - self.initialize_impl = None - self.mount_specification_impl = None - - def configure(self): - def configure_decorator(configure_impl): - if self.configure_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_CONFIGURE) - self.configure_impl = configure_impl - return configure_impl - return configure_decorator - - def unconfigure(self): - def unconfigure_decorator(unconfigure_impl): - if self.unconfigure_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_UNCONFIGURE) - self.unconfigure_impl = unconfigure_impl - return unconfigure_impl - return unconfigure_decorator - - def reconfigure(self): - def reconfigure_decorator(reconfigure_impl): - if self.reconfigure_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_RECONFIGURE) - self.reconfigure_impl = reconfigure_impl - return reconfigure_impl - return reconfigure_decorator - - def start(self): - def start_decorator(start_impl): - if self.start_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_START) - self.start_impl = start_impl - return start_impl - return start_decorator - - def stop(self): - def stop_decorator(stop_impl): - if self.stop_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_STOP) - self.stop_impl = stop_impl - return stop_impl - return stop_decorator - - def pre_snapshot(self): - def pre_snapshot_decorator(pre_snapshot_impl): - if self.pre_snapshot_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) - self.pre_snapshot_impl = pre_snapshot_impl - return pre_snapshot_impl - return pre_snapshot_decorator - - def post_snapshot(self): - def post_snapshot_decorator(post_snapshot_impl): - if self.post_snapshot_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_POST_SNAPSHOT) - self.post_snapshot_impl = post_snapshot_impl - return post_snapshot_impl - return post_snapshot_decorator - - def status(self): - def status_decorator(status_impl): - if self.status_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_STATUS) - self.status_impl = status_impl - return status_impl - return status_decorator - - def initialize(self): - def initialize_decorator(initialize_impl): - if self.initialize_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_INITIALIZE) - self.initialize_impl = initialize_impl - return initialize_impl - return initialize_decorator - - def mount_specification(self): - def mount_specification_decorator(mount_specification_impl): - if self.mount_specification_impl: - raise OperationAlreadyDefinedError( - Op.VIRTUAL_MOUNT_SPEC) - self.mount_specification_impl = mount_specification_impl - return mount_specification_impl - return mount_specification_decorator - - @staticmethod - def _from_protobuf_single_subset_mount(single_subset_mount): - return Mount( - remote_environment=RemoteEnvironment.from_proto(single_subset_mount.remote_environment), - mount_path=single_subset_mount.mount_path, - shared_path=single_subset_mount.shared_path) - - def _internal_configure(self, request): - """Configure operation wrapper. - - Executed just after cloning the captured data and mounting it to a - target environment. Specifically, this plugin operation is run during - provision and refresh, prior to taking the initial snapshot of the - clone. This plugin operation is run before the user-customizable - Configure Clone and Before Refresh operations are run. It must return - a sourceConfig object that represents the new dataset. - - Configure the data to be usable on the target environment. For database - data files, this may mean recovering from a crash consistent format or - backup. For application files, this may mean reconfiguring XML files or - rewriting hostnames and symlinks. - - Args: - request (ConfigureRequest): Configure operation arguments. - - Returns: - ConfigureResponse: A response containing the return value of the - configure operation, as a ConfigureResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SourceConfigDefinition - - if not self.configure_impl: - raise OperationNotDefinedError(Op.VIRTUAL_CONFIGURE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - snapshot = SnapshotDefinition.from_dict( - json.loads(request.snapshot.parameters.json)) - - config = self.configure_impl( - virtual_source=virtual_source, - repository=repository, - snapshot=snapshot) - - # Validate that this is a SourceConfigDefinition object. - if not isinstance(config, SourceConfigDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_CONFIGURE, type(config), SourceConfigDefinition) - - configure_response = platform_pb2.ConfigureResponse() - configure_response.return_value.source_config.parameters.json = ( - json.dumps(config.to_dict())) - return configure_response - - def _internal_unconfigure(self, request): - """Unconfigure operation wrapper. - - Executed when disabling or deleting an existing virtual source which - has already been mounted to a target environment. This plugin operation - is run before unmounting the virtual source from the target - environment. - - Args: - request (UnconfigureRequest): Unconfigure operation arguments. - - Returns: - UnconfigureResponse: A response containing UnconfigureResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.unconfigure() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.unconfigure_impl: - raise OperationNotDefinedError(Op.VIRTUAL_UNCONFIGURE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.unconfigure_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - unconfigure_response = platform_pb2.UnconfigureResponse() - unconfigure_response.return_value.CopyFrom( - platform_pb2.UnconfigureResult()) - return unconfigure_response - - def _internal_reconfigure(self, request): - """Reconfigure operation wrapper. - - Executed while attaching a VDB during a virtual source enable job and - returns a virtual source config. - - Args: - request (ReconfigureRequest): Reconfigure operation arguments. - - Returns: - ReconfigureResponse: A response containing the return value of the - reconfigure operation, as a ReconfigureResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import RepositoryDefinition - - if not self.reconfigure_impl: - raise OperationNotDefinedError(Op.VIRTUAL_RECONFIGURE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - snapshot = SnapshotDefinition.from_dict( - json.loads(request.snapshot.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - config = self.reconfigure_impl( - snapshot=snapshot, - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - # Validate that this is a SourceConfigDefinition object. - if not isinstance(config, SourceConfigDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_RECONFIGURE, type(config), SourceConfigDefinition) - - reconfigure_response = platform_pb2.ReconfigureResponse() - reconfigure_response.return_value.source_config.parameters.json = ( - json.dumps(config.to_dict())) - return reconfigure_response - - def _internal_start(self, request): - """Start operation wrapper. - - Executed after attaching a VDB during a virtual source enable job to - start the database. - - Args: - request (StartRequest): Start operation arguments. - - Returns: - StartResponse: A response containing StartResult if successful or - PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.start() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.start_impl: - raise OperationNotDefinedError(Op.VIRTUAL_START) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.start_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - start_response = platform_pb2.StartResponse() - start_response.return_value.CopyFrom(platform_pb2.StartResult()) - return start_response - - def _internal_stop(self, request): - """Stop operation wrapper. - - Executed before unmounting a VDB during a virtual source stop job. - - Args: - request (StopRequest): Stop operation arguments. - - Returns: - StopResponse: A response containing StopResult if successful or - PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.stop() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.stop_impl: - raise OperationNotDefinedError(Op.VIRTUAL_STOP) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.stop_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - stop_response = platform_pb2.StopResponse() - stop_response.return_value.CopyFrom(platform_pb2.StopResult()) - return stop_response - - def _internal_pre_snapshot(self, request): - """Virtual pre snapshot operation wrapper. - - Executed before creating a ZFS snapshot. This plugin operation is run - prior to creating a snapshot for a virtual source. - - Run pre-snapshot operation for a virtual source. - - Args: - virtual_pre_snapshot_request (VirtualPreSnapshotRequest): - Virtual pre snapshot operation arguments. - - Returns: - VirtualPreSnapshotResponse: A response containing - VirtualPreSnapshotResult if successful or PluginErrorResult in case - of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.pre_snapshot() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.pre_snapshot_impl: - raise OperationNotDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.pre_snapshot_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - virtual_pre_snapshot_response = ( - platform_pb2.VirtualPreSnapshotResponse()) - virtual_pre_snapshot_response.return_value.CopyFrom( - platform_pb2.VirtualPreSnapshotResult()) - return virtual_pre_snapshot_response - - def _internal_post_snapshot(self, request): - """Virtual post snapshot operation wrapper. - - Executed after creating a ZFS snapshot. This plugin operation is run - after creating a snapshot for a virtual source. - - Run post-snapshot operation for a virtual source. - - Args: - request (VirtualPostSnapshotRequest): Virtual post snapshot operation - arguments. - - Returns: - VirtualPostSnapshotResponse: A response containing the return value - of the virtual post snapshot operation, as a - VirtualPostSnapshotResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SourceConfigDefinition - - def to_protobuf(snapshot): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(snapshot.to_dict()) - snapshot_protobuf = common_pb2.Snapshot() - snapshot_protobuf.parameters.CopyFrom(parameters) - return snapshot_protobuf - - if not self.post_snapshot_impl: - raise OperationNotDefinedError(Op.VIRTUAL_POST_SNAPSHOT) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - snapshot = self.post_snapshot_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - # Validate that this is a SnapshotDefinition object - if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) - - virtual_post_snapshot_response = ( - platform_pb2.VirtualPostSnapshotResponse()) - virtual_post_snapshot_response.return_value.snapshot.CopyFrom( - to_protobuf(snapshot)) - return virtual_post_snapshot_response - - def _internal_status(self, request): - """Virtual status operation wrapper. - - Executed to get the status of a virtual source - active or inactive. - - Run status operation for a virtual source. - - Args: - request (VirtualStatusRequest): - Virtual status operation arguments. - - Returns: - VirtualStatusResponse: A response containing VirtualStatusResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.status() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.status_impl: - raise OperationNotDefinedError(Op.VIRTUAL_STATUS) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - virtual_status = self.status_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - # Validate that this is a Status object. - if not isinstance(virtual_status, Status): - raise IncorrectReturnTypeError( - Op.VIRTUAL_STATUS, type(virtual_status), Status) - - virtual_status_response = platform_pb2.VirtualStatusResponse() - virtual_status_response.return_value.status = virtual_status.value - return virtual_status_response - - def _internal_initialize(self, request): - """Initialize operation wrapper. - - Executed during VDB creation after mounting onto the target - environment. - - Run initialize operation for an empty virtual source. - - Args: - request (InitializeRequest): Initialize operation arguments. - - Returns: - InitializeResponse: A response containing InitializeResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - if not self.initialize_impl: - raise OperationNotDefinedError(Op.VIRTUAL_INITIALIZE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.initialize_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - initialize_response = platform_pb2.InitializeResponse() - initialize_response.return_value.CopyFrom( - platform_pb2.InitializeResult()) - return initialize_response - - def _internal_mount_specification(self, request): - """Virtual mount spec operation wrapper. - - Executed to fetch the ownership spec before mounting onto a target - environment. - - Run mount spec operation for a virtual source. - - Args: - virtual_mount_spec_request (VirtualMountSpecRequest): - Virtual mount spec operation arguments. - - Returns: - VirtualMountSpecResponse: A response containing the return value of - the virtual mount spec operation, as a VirtualMountSpecResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - - def to_protobuf_single_mount(single_mount): - single_mount_protobuf = common_pb2.SingleSubsetMount() - - environment_protobuf = single_mount.remote_environment.to_proto() - - single_mount_protobuf.remote_environment.CopyFrom( - environment_protobuf) - single_mount_protobuf.mount_path = single_mount.mount_path - - if single_mount.shared_path: - single_mount_protobuf.shared_path = single_mount.shared_path - - return single_mount_protobuf - - def to_protobuf_ownership_spec(ownership_spec): - ownership_spec_protobuf = common_pb2.OwnershipSpec() - ownership_spec_protobuf.uid = ownership_spec.uid - ownership_spec_protobuf.gid = ownership_spec.gid - return ownership_spec_protobuf - - if not self.mount_specification_impl: - raise OperationNotDefinedError(Op.VIRTUAL_MOUNT_SPEC) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - virtual_mount_spec = self.mount_specification_impl( - repository=repository, - virtual_source=virtual_source) - - # Validate that this is a MountSpecification object - if not isinstance(virtual_mount_spec, MountSpecification): - raise IncorrectReturnTypeError( - Op.VIRTUAL_MOUNT_SPEC, - type(virtual_mount_spec), - MountSpecification) - - virtual_mount_spec_response = platform_pb2.VirtualMountSpecResponse() - - if virtual_mount_spec.ownership_specification: - ownership_spec = to_protobuf_ownership_spec( - virtual_mount_spec.ownership_specification) - virtual_mount_spec_response.return_value.ownership_spec.CopyFrom( - ownership_spec) - - mounts_list = [to_protobuf_single_mount(m) - for m in virtual_mount_spec.mounts] - virtual_mount_spec_response.return_value.mounts.extend(mounts_list) - return virtual_mount_spec_response - - class Plugin(object): def __init__(self): self.__discovery = DiscoveryOperations() self.__linked = LinkedOperations() self.__virtual = VirtualOperations() + self.__upgrade = UpgradeOperations() @property def discovery(self): @@ -1575,3 +107,7 @@ def linked(self): @property def virtual(self): return self.__virtual + + @property + def upgrade(self): + return self.__upgrade diff --git a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py new file mode 100644 index 00000000..f20e466e --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py @@ -0,0 +1,81 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""UpgradeOperations for the Virtualization Platform + +There are 5 different objects that we can upgrade. All migration ids must be +unique. +""" +from dlpx.virtualization.platform.exceptions import MigrationIdAlreadyUsedError + + +__all__ = ['UpgradeOperations'] + +class UpgradeOperations(object): + + def __init__(self): + self.__migration_ids = set() + + self.repository_id_to_impl = {} + self.source_config_id_to_impl = {} + self.linked_source_id_to_impl = {} + self.virtual_source_id_to_impl = {} + self.snapshot_id_to_impl = {} + + def repository(self, migration_id): + def repository_decorator(repository_impl): + self._migration_id_check_helper(migration_id, repository_impl) + self.repository_id_to_impl[migration_id] = repository_impl + self.__migration_ids.add(migration_id) + return repository_impl + return repository_decorator + + def source_config(self, migration_id): + def source_config_decorator(source_config_impl): + self._migration_id_check_helper(migration_id, source_config_impl) + self.source_config_id_to_impl[migration_id] = source_config_impl + self.__migration_ids.add(migration_id) + return source_config_impl + return source_config_decorator + + def linked_source(self, migration_id): + def linked_source_decorator(linked_source_impl): + self._migration_id_check_helper(migration_id, linked_source_impl) + self.linked_source_id_to_impl[migration_id] = linked_source_impl + self.__migration_ids.add(migration_id) + return linked_source_impl + return linked_source_decorator + + def virtual_source(self, migration_id): + def virtual_source_decorator(virtual_source_impl): + self._migration_id_check_helper(migration_id, virtual_source_impl) + self.virtual_source_id_to_impl[migration_id] = virtual_source_impl + self.__migration_ids.add(migration_id) + return virtual_source_impl + return virtual_source_decorator + + def snapshot(self, migration_id): + def snapshot_decorator(snapshot_impl): + self._migration_id_check_helper(migration_id, snapshot_impl) + self.snapshot_id_to_impl[migration_id] = snapshot_impl + self.__migration_ids.add(migration_id) + return snapshot_impl + return snapshot_decorator + + @property + def migration_id_list(self): + return sorted(self.__migration_ids) + + def _migration_id_check_helper(self, migration_id, migration_impl): + if migration_id in self.__migration_ids: + raise MigrationIdAlreadyUsedError(migration_id, + migration_impl.__name__) + + + def _internal_upgrade(self, request): + """Upgrade Wrapper for plugins. + """ + # TODO diff --git a/platform/src/main/python/dlpx/virtualization/platform/_virtual.py b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py new file mode 100644 index 00000000..e9a57b17 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py @@ -0,0 +1,696 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""VirtualOperations for the Virtualization Platform + +""" +import json +from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment +from dlpx.virtualization import common_pb2 +from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.platform import VirtualSource +from dlpx.virtualization.platform import Status +from dlpx.virtualization.platform import Mount +from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectReturnTypeError, OperationNotDefinedError, + OperationAlreadyDefinedError) + + +__all__ = ['VirtualOperations'] + + +class VirtualOperations(object): + + def __init__(self): + self.configure_impl = None + self.unconfigure_impl = None + self.reconfigure_impl = None + self.start_impl = None + self.stop_impl = None + self.pre_snapshot_impl = None + self.post_snapshot_impl = None + self.status_impl = None + self.initialize_impl = None + self.mount_specification_impl = None + + def configure(self): + def configure_decorator(configure_impl): + if self.configure_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_CONFIGURE) + self.configure_impl = configure_impl + return configure_impl + return configure_decorator + + def unconfigure(self): + def unconfigure_decorator(unconfigure_impl): + if self.unconfigure_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_UNCONFIGURE) + self.unconfigure_impl = unconfigure_impl + return unconfigure_impl + return unconfigure_decorator + + def reconfigure(self): + def reconfigure_decorator(reconfigure_impl): + if self.reconfigure_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_RECONFIGURE) + self.reconfigure_impl = reconfigure_impl + return reconfigure_impl + return reconfigure_decorator + + def start(self): + def start_decorator(start_impl): + if self.start_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_START) + self.start_impl = start_impl + return start_impl + return start_decorator + + def stop(self): + def stop_decorator(stop_impl): + if self.stop_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_STOP) + self.stop_impl = stop_impl + return stop_impl + return stop_decorator + + def pre_snapshot(self): + def pre_snapshot_decorator(pre_snapshot_impl): + if self.pre_snapshot_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) + self.pre_snapshot_impl = pre_snapshot_impl + return pre_snapshot_impl + return pre_snapshot_decorator + + def post_snapshot(self): + def post_snapshot_decorator(post_snapshot_impl): + if self.post_snapshot_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_POST_SNAPSHOT) + self.post_snapshot_impl = post_snapshot_impl + return post_snapshot_impl + return post_snapshot_decorator + + def status(self): + def status_decorator(status_impl): + if self.status_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_STATUS) + self.status_impl = status_impl + return status_impl + return status_decorator + + def initialize(self): + def initialize_decorator(initialize_impl): + if self.initialize_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_INITIALIZE) + self.initialize_impl = initialize_impl + return initialize_impl + return initialize_decorator + + def mount_specification(self): + def mount_specification_decorator(mount_specification_impl): + if self.mount_specification_impl: + raise OperationAlreadyDefinedError( + Op.VIRTUAL_MOUNT_SPEC) + self.mount_specification_impl = mount_specification_impl + return mount_specification_impl + return mount_specification_decorator + + @staticmethod + def _from_protobuf_single_subset_mount(single_subset_mount): + return Mount( + remote_environment=RemoteEnvironment.from_proto(single_subset_mount.remote_environment), + mount_path=single_subset_mount.mount_path, + shared_path=single_subset_mount.shared_path) + + def _internal_configure(self, request): + """Configure operation wrapper. + + Executed just after cloning the captured data and mounting it to a + target environment. Specifically, this plugin operation is run during + provision and refresh, prior to taking the initial snapshot of the + clone. This plugin operation is run before the user-customizable + Configure Clone and Before Refresh operations are run. It must return + a sourceConfig object that represents the new dataset. + + Configure the data to be usable on the target environment. For database + data files, this may mean recovering from a crash consistent format or + backup. For application files, this may mean reconfiguring XML files or + rewriting hostnames and symlinks. + + Args: + request (ConfigureRequest): Configure operation arguments. + + Returns: + ConfigureResponse: A response containing the return value of the + configure operation, as a ConfigureResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SourceConfigDefinition + + if not self.configure_impl: + raise OperationNotDefinedError(Op.VIRTUAL_CONFIGURE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + snapshot = SnapshotDefinition.from_dict( + json.loads(request.snapshot.parameters.json)) + + config = self.configure_impl( + virtual_source=virtual_source, + repository=repository, + snapshot=snapshot) + + # Validate that this is a SourceConfigDefinition object. + if not isinstance(config, SourceConfigDefinition): + raise IncorrectReturnTypeError( + Op.VIRTUAL_CONFIGURE, type(config), SourceConfigDefinition) + + configure_response = platform_pb2.ConfigureResponse() + configure_response.return_value.source_config.parameters.json = ( + json.dumps(config.to_dict())) + return configure_response + + def _internal_unconfigure(self, request): + """Unconfigure operation wrapper. + + Executed when disabling or deleting an existing virtual source which + has already been mounted to a target environment. This plugin operation + is run before unmounting the virtual source from the target + environment. + + Args: + request (UnconfigureRequest): Unconfigure operation arguments. + + Returns: + UnconfigureResponse: A response containing UnconfigureResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.unconfigure() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.unconfigure_impl: + raise OperationNotDefinedError(Op.VIRTUAL_UNCONFIGURE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.unconfigure_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + unconfigure_response = platform_pb2.UnconfigureResponse() + unconfigure_response.return_value.CopyFrom( + platform_pb2.UnconfigureResult()) + return unconfigure_response + + def _internal_reconfigure(self, request): + """Reconfigure operation wrapper. + + Executed while attaching a VDB during a virtual source enable job and + returns a virtual source config. + + Args: + request (ReconfigureRequest): Reconfigure operation arguments. + + Returns: + ReconfigureResponse: A response containing the return value of the + reconfigure operation, as a ReconfigureResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import RepositoryDefinition + + if not self.reconfigure_impl: + raise OperationNotDefinedError(Op.VIRTUAL_RECONFIGURE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + snapshot = SnapshotDefinition.from_dict( + json.loads(request.snapshot.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + config = self.reconfigure_impl( + snapshot=snapshot, + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + # Validate that this is a SourceConfigDefinition object. + if not isinstance(config, SourceConfigDefinition): + raise IncorrectReturnTypeError( + Op.VIRTUAL_RECONFIGURE, type(config), SourceConfigDefinition) + + reconfigure_response = platform_pb2.ReconfigureResponse() + reconfigure_response.return_value.source_config.parameters.json = ( + json.dumps(config.to_dict())) + return reconfigure_response + + def _internal_start(self, request): + """Start operation wrapper. + + Executed after attaching a VDB during a virtual source enable job to + start the database. + + Args: + request (StartRequest): Start operation arguments. + + Returns: + StartResponse: A response containing StartResult if successful or + PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.start() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.start_impl: + raise OperationNotDefinedError(Op.VIRTUAL_START) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.start_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + start_response = platform_pb2.StartResponse() + start_response.return_value.CopyFrom(platform_pb2.StartResult()) + return start_response + + def _internal_stop(self, request): + """Stop operation wrapper. + + Executed before unmounting a VDB during a virtual source stop job. + + Args: + request (StopRequest): Stop operation arguments. + + Returns: + StopResponse: A response containing StopResult if successful or + PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.stop() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.stop_impl: + raise OperationNotDefinedError(Op.VIRTUAL_STOP) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.stop_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + stop_response = platform_pb2.StopResponse() + stop_response.return_value.CopyFrom(platform_pb2.StopResult()) + return stop_response + + def _internal_pre_snapshot(self, request): + """Virtual pre snapshot operation wrapper. + + Executed before creating a ZFS snapshot. This plugin operation is run + prior to creating a snapshot for a virtual source. + + Run pre-snapshot operation for a virtual source. + + Args: + virtual_pre_snapshot_request (VirtualPreSnapshotRequest): + Virtual pre snapshot operation arguments. + + Returns: + VirtualPreSnapshotResponse: A response containing + VirtualPreSnapshotResult if successful or PluginErrorResult in case + of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.pre_snapshot() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.pre_snapshot_impl: + raise OperationNotDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.pre_snapshot_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + virtual_pre_snapshot_response = ( + platform_pb2.VirtualPreSnapshotResponse()) + virtual_pre_snapshot_response.return_value.CopyFrom( + platform_pb2.VirtualPreSnapshotResult()) + return virtual_pre_snapshot_response + + def _internal_post_snapshot(self, request): + """Virtual post snapshot operation wrapper. + + Executed after creating a ZFS snapshot. This plugin operation is run + after creating a snapshot for a virtual source. + + Run post-snapshot operation for a virtual source. + + Args: + request (VirtualPostSnapshotRequest): Virtual post snapshot operation + arguments. + + Returns: + VirtualPostSnapshotResponse: A response containing the return value + of the virtual post snapshot operation, as a + VirtualPostSnapshotResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SourceConfigDefinition + + def to_protobuf(snapshot): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(snapshot.to_dict()) + snapshot_protobuf = common_pb2.Snapshot() + snapshot_protobuf.parameters.CopyFrom(parameters) + return snapshot_protobuf + + if not self.post_snapshot_impl: + raise OperationNotDefinedError(Op.VIRTUAL_POST_SNAPSHOT) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + snapshot = self.post_snapshot_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + # Validate that this is a SnapshotDefinition object + if not isinstance(snapshot, SnapshotDefinition): + raise IncorrectReturnTypeError( + Op.VIRTUAL_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + + virtual_post_snapshot_response = ( + platform_pb2.VirtualPostSnapshotResponse()) + virtual_post_snapshot_response.return_value.snapshot.CopyFrom( + to_protobuf(snapshot)) + return virtual_post_snapshot_response + + def _internal_status(self, request): + """Virtual status operation wrapper. + + Executed to get the status of a virtual source - active or inactive. + + Run status operation for a virtual source. + + Args: + request (VirtualStatusRequest): + Virtual status operation arguments. + + Returns: + VirtualStatusResponse: A response containing VirtualStatusResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.status() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.status_impl: + raise OperationNotDefinedError(Op.VIRTUAL_STATUS) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + virtual_status = self.status_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + # Validate that this is a Status object. + if not isinstance(virtual_status, Status): + raise IncorrectReturnTypeError( + Op.VIRTUAL_STATUS, type(virtual_status), Status) + + virtual_status_response = platform_pb2.VirtualStatusResponse() + virtual_status_response.return_value.status = virtual_status.value + return virtual_status_response + + def _internal_initialize(self, request): + """Initialize operation wrapper. + + Executed during VDB creation after mounting onto the target + environment. + + Run initialize operation for an empty virtual source. + + Args: + request (InitializeRequest): Initialize operation arguments. + + Returns: + InitializeResponse: A response containing InitializeResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + if not self.initialize_impl: + raise OperationNotDefinedError(Op.VIRTUAL_INITIALIZE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.initialize_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + initialize_response = platform_pb2.InitializeResponse() + initialize_response.return_value.CopyFrom( + platform_pb2.InitializeResult()) + return initialize_response + + def _internal_mount_specification(self, request): + """Virtual mount spec operation wrapper. + + Executed to fetch the ownership spec before mounting onto a target + environment. + + Run mount spec operation for a virtual source. + + Args: + virtual_mount_spec_request (VirtualMountSpecRequest): + Virtual mount spec operation arguments. + + Returns: + VirtualMountSpecResponse: A response containing the return value of + the virtual mount spec operation, as a VirtualMountSpecResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + + def to_protobuf_single_mount(single_mount): + single_mount_protobuf = common_pb2.SingleSubsetMount() + + environment_protobuf = single_mount.remote_environment.to_proto() + + single_mount_protobuf.remote_environment.CopyFrom( + environment_protobuf) + single_mount_protobuf.mount_path = single_mount.mount_path + + if single_mount.shared_path: + single_mount_protobuf.shared_path = single_mount.shared_path + + return single_mount_protobuf + + def to_protobuf_ownership_spec(ownership_spec): + ownership_spec_protobuf = common_pb2.OwnershipSpec() + ownership_spec_protobuf.uid = ownership_spec.uid + ownership_spec_protobuf.gid = ownership_spec.gid + return ownership_spec_protobuf + + if not self.mount_specification_impl: + raise OperationNotDefinedError(Op.VIRTUAL_MOUNT_SPEC) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + virtual_mount_spec = self.mount_specification_impl( + repository=repository, + virtual_source=virtual_source) + + # Validate that this is a MountSpecification object + if not isinstance(virtual_mount_spec, MountSpecification): + raise IncorrectReturnTypeError( + Op.VIRTUAL_MOUNT_SPEC, + type(virtual_mount_spec), + MountSpecification) + + virtual_mount_spec_response = platform_pb2.VirtualMountSpecResponse() + + if virtual_mount_spec.ownership_specification: + ownership_spec = to_protobuf_ownership_spec( + virtual_mount_spec.ownership_specification) + virtual_mount_spec_response.return_value.ownership_spec.CopyFrom( + ownership_spec) + + mounts_list = [to_protobuf_single_mount(m) + for m in virtual_mount_spec.mounts] + virtual_mount_spec_response.return_value.mounts.extend(mounts_list) + return virtual_mount_spec_response \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py index a4e27f5a..3ba27c89 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py +++ b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py @@ -89,3 +89,28 @@ def __init__(self, operation): message = ('An implementation for the {} operation has not been' ' defined.'.format(operation.value)) super(OperationNotDefinedError, self).__init__(message) + + +class MigrationIdAlreadyUsedError(Exception): + """MigrationIdAlreadyUsedError gets thrown when the same migration id is + used for the same upgrade operation + + Args: + migration_id (str): The migration id assigned for this operation + function_name (str): The name of the function that used the + decorator with the same migration id. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + @property + def message(self): + return self.args[0] + + def __init__(self, migration_id, function_name): + message = ("The migration id '{}' used in the function '{}' has" + " been used by another migration.".format(migration_id, + function_name)) + super(MigrationIdAlreadyUsedError, self).__init__(message) + diff --git a/platform/src/test/python/dlpx/virtualization/test_plugin.py b/platform/src/test/python/dlpx/virtualization/test_plugin.py index 827f574c..91fe0469 100755 --- a/platform/src/test/python/dlpx/virtualization/test_plugin.py +++ b/platform/src/test/python/dlpx/virtualization/test_plugin.py @@ -2,16 +2,12 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -import json import pytest -import sys from dlpx.virtualization import platform_pb2 from dlpx.virtualization.common import (RemoteConnection, RemoteEnvironment, RemoteHost, RemoteUser) from dlpx.virtualization import common_pb2 -from dlpx.virtualization.platform import _plugin from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationAlreadyDefinedError, - PlatformError, PluginRuntimeError) + IncorrectReturnTypeError, OperationAlreadyDefinedError, PluginRuntimeError) from mock import MagicMock, patch import fake_generated_definitions diff --git a/platform/src/test/python/dlpx/virtualization/test_upgrade.py b/platform/src/test/python/dlpx/virtualization/test_upgrade.py new file mode 100755 index 00000000..d8313691 --- /dev/null +++ b/platform/src/test/python/dlpx/virtualization/test_upgrade.py @@ -0,0 +1,206 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import pytest +from dlpx.virtualization.platform.exceptions import MigrationIdAlreadyUsedError + + +class TestUpgrade: + @staticmethod + @pytest.fixture + def my_plugin(): + from dlpx.virtualization.platform import Plugin + yield Plugin() + + @staticmethod + def basic_upgrade_helper(decorator, id_to_impl, upgrade_operation): + @decorator('2019.10.01') + def repo_upgrade_one(input_dict): + output_dict = {} + output_dict['in'] = input_dict['in'] + output_dict['out'] = 'first' + return output_dict + + @decorator('2019.10.02') + def repo_upgrade_two(input_dict): + output_dict = {} + output_dict['in'] = input_dict['in'] + output_dict['out'] = 'second' + return output_dict + + migration_one = id_to_impl['2019.10.01'] + migration_two = id_to_impl['2019.10.02'] + + assert migration_one == repo_upgrade_one + assert migration_two == repo_upgrade_two + assert migration_one({'in':'in_one'}) == {'in': 'in_one', + 'out': 'first'} + assert migration_two({'in':'in_two'}) == {'in': 'in_two', + 'out': 'second'} + + assert upgrade_operation.migration_id_list == ['2019.10.01', + '2019.10.02'] + + @staticmethod + def same_migration_id_used_helper(decorator): + @decorator('2019.10.01') + def repo_upgrade(input_dict): + output_dict = {} + output_dict['in'] = input_dict['in'] + output_dict['out'] = 'first' + return output_dict + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info: + @decorator('2019.10.01') + def upgrade_bad(): + pass + + message = err_info.value.message + assert message == ( + "The migration id '2019.10.01' used in the function" + " 'upgrade_bad' has been used by another migration.") + + @staticmethod + def test_upgrade(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.repository, + my_plugin.upgrade.repository_id_to_impl, + my_plugin.upgrade) + + @staticmethod + def test_upgrade_repository_same_migration_id_used(my_plugin): + TestUpgrade.same_migration_id_used_helper( + my_plugin.upgrade.repository) + + @staticmethod + def test_upgrade_source_config(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.source_config, + my_plugin.upgrade.source_config_id_to_impl, + my_plugin.upgrade) + + @staticmethod + def test_upgrade_source_config_same_migration_id_used(my_plugin): + TestUpgrade.same_migration_id_used_helper( + my_plugin.upgrade.source_config) + + @staticmethod + def test_upgrade_linked_source(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.linked_source, + my_plugin.upgrade.linked_source_id_to_impl, + my_plugin.upgrade) + + @staticmethod + def test_upgrade_linked_source_same_migration_id_used(my_plugin): + TestUpgrade.same_migration_id_used_helper( + my_plugin.upgrade.source_config) + + @staticmethod + def test_upgrade_virtual_source(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.virtual_source, + my_plugin.upgrade.virtual_source_id_to_impl, + my_plugin.upgrade) + + @staticmethod + def test_upgrade_virtual_source_same_migration_id_used(my_plugin): + TestUpgrade.same_migration_id_used_helper( + my_plugin.upgrade.source_config) + + @staticmethod + def test_upgrade_snapshot(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.snapshot, + my_plugin.upgrade.snapshot_id_to_impl, + my_plugin.upgrade) + + @staticmethod + def test_upgrade_snapshot_same_migration_id_used(my_plugin): + TestUpgrade.same_migration_id_used_helper( + my_plugin.upgrade.snapshot) + + @staticmethod + def test_upgrade_same_migration_id_used(my_plugin): + @my_plugin.upgrade.repository('2019.10.01') + def repo_upgrade_one(): + return 'repo_one' + + @my_plugin.upgrade.repository('2019.10.04') + def repo_upgrade_two(): + return 'repo_two' + + @my_plugin.upgrade.repository('2019.10.06') + def repo_upgrade_three(): + return 'repo_three' + + @my_plugin.upgrade.source_config('2019.10.02') + def sc_upgrade_one(): + return 'sc_one' + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info_one: + @my_plugin.upgrade.source_config('2019.10.04') + def sc_upgrade_two(): + return 'sc_two' + + @my_plugin.upgrade.linked_source('2019.10.03') + def ls_upgrade_one(): + return 'ls_one' + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info_two: + @my_plugin.upgrade.virtual_source('2019.10.03') + def vs_upgrade_one(): + return 'vs_one' + + @my_plugin.upgrade.virtual_source('2019.10.05') + def vs_upgrade_two(): + return 'vs_two' + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info_three: + @my_plugin.upgrade.snapshot('2019.10.01') + def snap_upgrade_one(): + return 'snap_one' + + @my_plugin.upgrade.snapshot('2019.10.12') + def snap_upgrade_two(): + return 'snap_two' + + assert my_plugin.upgrade.migration_id_list == ['2019.10.01', + '2019.10.02', + '2019.10.03', + '2019.10.04', + '2019.10.05', + '2019.10.06', + '2019.10.12'] + + repo_one = my_plugin.upgrade.repository_id_to_impl['2019.10.01'] + repo_two = my_plugin.upgrade.repository_id_to_impl['2019.10.04'] + repo_three = my_plugin.upgrade.repository_id_to_impl['2019.10.06'] + assert repo_one == repo_upgrade_one + assert repo_two == repo_upgrade_two + assert repo_three == repo_upgrade_three + + sc_one = my_plugin.upgrade.source_config_id_to_impl['2019.10.02'] + assert sc_one == sc_upgrade_one + + ls_one = my_plugin.upgrade.linked_source_id_to_impl['2019.10.03'] + assert ls_one == ls_upgrade_one + + vs_two = my_plugin.upgrade.virtual_source_id_to_impl['2019.10.05'] + assert vs_two == vs_upgrade_two + + snap_two = my_plugin.upgrade.snapshot_id_to_impl['2019.10.12'] + assert snap_two == snap_upgrade_two + + assert err_info_one.value.message == ( + "The migration id '2019.10.04' used in the function" + " 'sc_upgrade_two' has been used by another migration.") + + assert err_info_two.value.message == ( + "The migration id '2019.10.03' used in the function" + " 'vs_upgrade_one' has been used by another migration.") + + assert err_info_three.value.message == ( + "The migration id '2019.10.01' used in the function" + " 'snap_upgrade_one' has been used by another migration.") diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index a94a9070..fee5939f 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "1f253637b870f7f019ec0b2297b5444647d0c3bdb1fdfb1fd711d486d3d0b2a3" + "sha256": "05d4aeacc3cec31205fe27118343a64e72e70d0182c41affbb5fdb35fb9395fe" }, "pipfile-spec": 6, "requires": {}, @@ -16,10 +16,10 @@ "default": { "attrs": { "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "version": "==19.1.0" + "version": "==19.3.0" }, "certifi": { "hashes": [ @@ -56,9 +56,17 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==4.0.2" }, + "contextlib2": { + "hashes": [ + "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", + "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" + ], + "markers": "python_version < '3'", + "version": "==0.6.0.post1" + }, "entrypoints": { "hashes": [ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", @@ -79,18 +87,18 @@ }, "flake8": { "hashes": [ - "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548", - "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696" + "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb", + "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca" ], "index": "delphix", - "version": "==3.7.8" + "version": "==3.7.9" }, "functools32": { "hashes": [ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "idna": { @@ -100,21 +108,28 @@ ], "version": "==2.8" }, + "importlib-metadata": { + "hashes": [ + "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", + "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" + ], + "version": "==0.23" + }, "jinja2": { "hashes": [ - "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", - "sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b" + "sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f", + "sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de" ], "index": "delphix", - "version": "==2.10.1" + "version": "==2.10.3" }, "jsonschema": { "hashes": [ - "sha256:5f9c0a719ca2ce14c5de2fd350a64fd2d13e8539db29836a86adc990bb1a068f", - "sha256:8d4a2b7b6c2237e0199c8ea1a6d3e05bf118e289ae2b9d7ba444182a2959560d" + "sha256:2fa0684276b6333ff3c0b1b27081f4b2305f0a36cf702a23db50edb141893c3f", + "sha256:94c0a13b4a0616458b42529091624e66700a17f847453e52279e35509a5b7631" ], "index": "delphix", - "version": "==3.0.2" + "version": "==3.1.1" }, "markupsafe": { "hashes": [ @@ -156,6 +171,22 @@ ], "version": "==0.6.1" }, + "more-itertools": { + "hashes": [ + "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", + "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", + "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" + ], + "version": "==5.0.0" + }, + "pathlib2": { + "hashes": [ + "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", + "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" + ], + "markers": "python_version == '3.4.*' or python_version < '3'", + "version": "==2.3.5" + }, "protobuf": { "hashes": [ "sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4", @@ -194,9 +225,9 @@ }, "pyrsistent": { "hashes": [ - "sha256:34b47fa169d6006b32e99d4b3c4031f155e6e68ebcc107d6454852e8e0ee6533" + "sha256:eb6545dbeb1aa69ab1fb4809bfbf5a8705e44d92ef8fc7c2361682a47c46c778" ], - "version": "==0.15.4" + "version": "==0.15.5" }, "pyyaml": { "hashes": [ @@ -225,6 +256,23 @@ "index": "delphix", "version": "==2.22.0" }, + "scandir": { + "hashes": [ + "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e", + "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022", + "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f", + "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f", + "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae", + "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173", + "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4", + "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32", + "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188", + "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d", + "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac" + ], + "markers": "python_version < '3.5'", + "version": "==1.10.0" + }, "six": { "hashes": [ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", @@ -244,10 +292,17 @@ }, "urllib3": { "hashes": [ - "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1", - "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232" + "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", + "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" + ], + "version": "==1.25.6" + }, + "zipp": { + "hashes": [ + "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", + "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" ], - "version": "==1.25.3" + "version": "==0.6.0" } }, "develop": { @@ -260,10 +315,10 @@ }, "attrs": { "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "version": "==19.1.0" + "version": "==19.3.0" }, "backports.functools-lru-cache": { "hashes": [ @@ -278,16 +333,16 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==4.0.2" }, "contextlib2": { "hashes": [ - "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48", - "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00" + "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", + "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" ], "markers": "python_version < '3'", - "version": "==0.5.5" + "version": "==0.6.0.post1" }, "coverage": { "hashes": [ @@ -328,16 +383,16 @@ "version": "==4.5.4" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.0.1-internal-003.tar.gz", - "version": "== 1.0.1-internal-003" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz", + "version": "== 1.1.0-internal-upgrade-001" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.0.1-internal-003.tar.gz", - "version": "== 1.0.1-internal-003" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-001.tar.gz", + "version": "== 1.1.0-internal-upgrade-001" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.0.1-internal-003.tar.gz", - "version": "== 1.0.1-internal-003" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-001.tar.gz", + "version": "== 1.1.0-internal-upgrade-001" }, "entrypoints": { "hashes": [ @@ -359,11 +414,11 @@ }, "flake8": { "hashes": [ - "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548", - "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696" + "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb", + "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca" ], "index": "delphix", - "version": "==3.7.8" + "version": "==3.7.9" }, "funcsigs": { "hashes": [ @@ -378,7 +433,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "futures": { @@ -391,17 +446,16 @@ }, "httpretty": { "hashes": [ - "sha256:01b52d45077e702eda491f4fe75328d3468fd886aed5dcc530003e7b2b5939dc" + "sha256:66216f26b9d2c52e81808f3e674a6fb65d4bf719721394a1a9be926177e55fbe" ], "index": "delphix", - "version": "==0.9.6" + "version": "==0.9.7" }, "importlib-metadata": { "hashes": [ "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" ], - "markers": "python_version < '3.8'", "version": "==0.23" }, "isort": { @@ -433,23 +487,22 @@ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" ], - "markers": "python_version <= '2.7'", "version": "==5.0.0" }, "packaging": { "hashes": [ - "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9", - "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe" + "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", + "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" ], - "version": "==19.1" + "version": "==19.2" }, "pathlib2": { "hashes": [ - "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e", - "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8" + "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", + "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", - "version": "==2.3.4" + "markers": "python_version == '3.4.*' or python_version < '3'", + "version": "==2.3.5" }, "pluggy": { "hashes": [ @@ -488,19 +541,19 @@ }, "pytest": { "hashes": [ - "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347", - "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0" + "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", + "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" ], "index": "delphix", - "version": "==4.6.5" + "version": "==4.6.6" }, "pytest-cov": { "hashes": [ - "sha256:2b097cde81a302e1047331b48cadacf23577e431b61e9c6f49a1170bbe3d3da6", - "sha256:e00ea4fdde970725482f1f35630d12f074e121a23801aabf2ae154ec6bdd343a" + "sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b", + "sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626" ], "index": "delphix", - "version": "==2.7.1" + "version": "==2.8.1" }, "scandir": { "hashes": [ diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index f930a4a9..ca001279 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,34 +1,34 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.0.1-internal-003.tar.gz -./../libs/build/python-dist/dvp-libs-1.0.1-internal-003.tar.gz -./../platform/build/python-dist/dvp-platform-1.0.1-internal-003.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-001.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-001.tar.gz atomicwrites==1.3.0 -attrs==19.1.0 +attrs==19.3.0 backports.functools-lru-cache==1.5 ; python_version < '3.2' -configparser==4.0.2 ; python_version < '3.2' -contextlib2==0.5.5 ; python_version < '3' +configparser==4.0.2 ; python_version < '3' +contextlib2==0.6.0.post1 ; python_version < '3' coverage==4.5.4 entrypoints==0.3 enum34==1.1.6 -flake8==3.7.8 +flake8==3.7.9 funcsigs==1.0.2 ; python_version < '3.0' -functools32==3.2.3.post2 ; python_version < '3' +functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' -httpretty==0.9.6 -importlib-metadata==0.23 ; python_version < '3.8' +httpretty==0.9.7 +importlib-metadata==0.23 isort==4.3.21 mccabe==0.6.1 mock==3.0.5 -more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.1 -pathlib2==2.3.4 ; python_version < '3.6' +more-itertools==5.0.0 +packaging==19.2 +pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' pluggy==0.13.0 py==1.8.0 pycodestyle==2.5.0 pyflakes==2.1.1 pyparsing==2.4.2 -pytest-cov==2.7.1 -pytest==4.6.5 +pytest-cov==2.8.1 +pytest==4.6.6 scandir==1.10.0 ; python_version < '3.5' six==1.12.0 typing==3.7.4.1 ; python_version < '3.5' diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 6a83907b..b87def44 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,25 +1,31 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -attrs==19.1.0 +attrs==19.3.0 certifi==2019.9.11 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version < '3.2' +configparser==4.0.2 ; python_version < '3' +contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 -flake8==3.7.8 -functools32==3.2.3.post2 ; python_version < '3' +flake8==3.7.9 +functools32==3.2.3.post2 ; python_version < '3.2' idna==2.8 -jinja2==2.10.1 -jsonschema==3.0.2 +importlib-metadata==0.23 +jinja2==2.10.3 +jsonschema==3.1.1 markupsafe==1.1.1 mccabe==0.6.1 +more-itertools==5.0.0 +pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' protobuf==3.6.1 pycodestyle==2.5.0 pyflakes==2.1.1 -pyrsistent==0.15.4 +pyrsistent==0.15.5 pyyaml==5.1.2 requests==2.22.0 +scandir==1.10.0 ; python_version < '3.5' six==1.12.0 typing==3.7.4.1 ; python_version < '3.5' -urllib3==1.25.3 +urllib3==1.25.6 +zipp==0.6.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 0df69610..58b62040 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -357,6 +357,12 @@ def _validate_named_args(module_content, entry_point, plugin_type): # LinkedOperations, DiscoveryOperations, VirtualOperations # plugin_op_type = plugin_attrib.__class__.__name__ + if plugin_op_type is 'UpgradeOperations': + # + # For now just ignore all upgrade operations because the fields + # aren't all functions. + # + continue for op_name_key, op_name in plugin_attrib.__dict__.items(): if op_name is None: continue diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index 6b0dc1d5..2d1fe09b 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -218,10 +218,11 @@ def test_multiple_validation_errors(plugin_config_file): assert "'xxx' is not one of ['UNIX', 'WINDOWS']" in message @staticmethod - @mock.patch('os.path.exists', return_value=True) - @mock.patch('os.path.isdir', return_value=True) - def test_staged_plugin(mock_directory, mock_real_plugin, - fake_staged_plugin_config): + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_staged_plugin(mock_file_util, fake_staged_plugin_config): + src_dir = os.path.dirname(fake_staged_plugin_config) + mock_file_util.return_value = os.path.join(src_dir, 'src/') + with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(fake_staged_plugin_config, util_classes.PLUGIN_CONFIG_SCHEMA, @@ -235,10 +236,11 @@ def test_staged_plugin(mock_directory, mock_real_plugin, assert 'Implementation missing for required method' in message @staticmethod - @mock.patch('os.path.exists', return_value=True) - @mock.patch('os.path.isdir', return_value=True) - def test_direct_plugin(mock_directory, mock_real_plugin, - fake_direct_plugin_config): + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_direct_plugin(mock_file_util, fake_direct_plugin_config): + src_dir = os.path.dirname(fake_direct_plugin_config) + mock_file_util.return_value = os.path.join(src_dir, 'src/') + with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(fake_direct_plugin_config, util_classes.PLUGIN_CONFIG_SCHEMA, From b8134c41885a3e2025c2fee00f45c458f7d163e1 Mon Sep 17 00:00:00 2001 From: Sriharsha kanuri Date: Thu, 21 Nov 2019 13:33:29 -0800 Subject: [PATCH 02/25] PYT-1004 Add Protobuf message for upgrade Reviewed at: http://reviews.delphix.com/r/54173/ --- build.gradle | 2 +- .../proto/dlpx/virtualization/common.proto | 6 +++ dvp/Pipfile.lock | 46 +++++-------------- dvp/lock.dev-requirements.txt | 6 +-- dvp/lock.requirements.txt | 15 ++---- libs/Pipfile.lock | 14 +++--- libs/lock.dev-requirements.txt | 4 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 14 +++--- platform/lock.dev-requirements.txt | 4 +- platform/lock.requirements.txt | 2 +- .../proto/dlpx/virtualization/platform.proto | 28 +++++++---- tools/Pipfile.lock | 30 ++++++------ tools/lock.dev-requirements.txt | 14 +++--- tools/lock.requirements.txt | 4 +- .../virtualization/_internal/settings.cfg | 2 +- .../_internal/test_package_util.py | 2 +- 17 files changed, 88 insertions(+), 107 deletions(-) diff --git a/build.gradle b/build.gradle index fdedf007..4ec30ce4 100644 --- a/build.gradle +++ b/build.gradle @@ -8,7 +8,7 @@ plugins { } subprojects { - version = "1.1.0-internal-upgrade-001" + version = "1.1.0-internal-upgrade-002" } def binDir = "${rootProject.projectDir}/bin" diff --git a/common/src/main/proto/dlpx/virtualization/common.proto b/common/src/main/proto/dlpx/virtualization/common.proto index 6606a2c9..e1575044 100644 --- a/common/src/main/proto/dlpx/virtualization/common.proto +++ b/common/src/main/proto/dlpx/virtualization/common.proto @@ -87,6 +87,12 @@ message SnapshotParameters { PluginDefinedObject parameters = 1; } +/* + * A PluginDefinedObject is a serialized JSON representation of an object that conforms to a schema provided by the + * plugin. Typically, this will be deserialized to a language-specific object for use by the plugin. For example, + * deserialization of snapshot metadata for a Python plugin would use the Python + * generated.definitions.SnapshotDefinition class. + */ message PluginDefinedObject { string json = 1; } diff --git a/dvp/Pipfile.lock b/dvp/Pipfile.lock index 91fc39b0..3f80d818 100644 --- a/dvp/Pipfile.lock +++ b/dvp/Pipfile.lock @@ -1,11 +1,7 @@ { "_meta": { "hash": { -<<<<<<< HEAD - "sha256": "a3fd3dcc18adade3d62eb1dce89074b19108e752edd39551824c4f1b6cfc9ccb" -======= - "sha256": "35e5b60f1d50f4e862805780d749057031d522423b0d8cd53e475d2f646bb1da" ->>>>>>> master + "sha256": "b12de201be3806fa19aa966dd33f493161b0d8bbb48af73576c4fbb6f7a219c2" }, "pipfile-spec": 6, "requires": {}, @@ -19,37 +15,20 @@ }, "default": { "dvp-common": { -<<<<<<< HEAD - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz", - "version": "== 1.1.0-internal-upgrade-001" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz", + "version": "== 1.1.0-internal-upgrade-002" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-001.tar.gz", - "version": "== 1.1.0-internal-upgrade-001" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-002.tar.gz", + "version": "== 1.1.0-internal-upgrade-002" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-001.tar.gz", - "version": "== 1.1.0-internal-upgrade-001" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-002.tar.gz", + "version": "== 1.1.0-internal-upgrade-002" }, "dvp-tools": { - "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-001.tar.gz", - "version": "== 1.1.0-internal-upgrade-001" -======= - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-001.tar.gz", - "version": "== 1.1.0-internal-001" - }, - "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-001.tar.gz", - "version": "== 1.1.0-internal-001" - }, - "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-001.tar.gz", - "version": "== 1.1.0-internal-001" - }, - "dvp-tools": { - "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-001.tar.gz", - "version": "== 1.1.0-internal-001" ->>>>>>> master + "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-002.tar.gz", + "version": "== 1.1.0-internal-upgrade-002" } }, "develop": { @@ -125,10 +104,10 @@ }, "pluggy": { "hashes": [ - "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", - "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34" + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "version": "==0.13.0" + "version": "==0.13.1" }, "py": { "hashes": [ @@ -149,7 +128,6 @@ "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" ], - "index": "delphix", "version": "==4.6.6" }, "scandir": { diff --git a/dvp/lock.dev-requirements.txt b/dvp/lock.dev-requirements.txt index 184cdf13..4c5caadc 100644 --- a/dvp/lock.dev-requirements.txt +++ b/dvp/lock.dev-requirements.txt @@ -8,13 +8,9 @@ importlib-metadata==0.23 ; python_version < '3.8' more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 pathlib2==2.3.5 ; python_version < '3.6' -pluggy==0.13.0 +pluggy==0.13.1 py==1.8.0 -<<<<<<< HEAD -pyparsing==2.4.2 -======= pyparsing==2.4.5 ->>>>>>> master pytest==4.6.6 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 diff --git a/dvp/lock.requirements.txt b/dvp/lock.requirements.txt index b09a113b..44e7cd8a 100644 --- a/dvp/lock.requirements.txt +++ b/dvp/lock.requirements.txt @@ -1,12 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -<<<<<<< HEAD -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-001.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-001.tar.gz -./../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-001.tar.gz -======= -./../common/build/python-dist/dvp-common-1.1.0-internal-001.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-001.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-001.tar.gz -./../tools/build/python-dist/dvp-tools-1.1.0-internal-001.tar.gz ->>>>>>> master +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-002.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-002.tar.gz +./../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-002.tar.gz diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index b8cd4777..b16e32f6 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "2cd7bb308b4a60375bf03feaee321063287f8f0a8dfbee915bb8bf149e2cf6af" + "sha256": "2f74667ead2f5475f5decba7ea85d8fd1f350447d74e70ba6b2c2f6a2526d0d6" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz", - "version": "== 1.1.0-internal-upgrade-001" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz", + "version": "== 1.1.0-internal-upgrade-002" }, "protobuf": { "hashes": [ @@ -83,7 +83,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "importlib-metadata": { @@ -127,10 +127,10 @@ }, "pluggy": { "hashes": [ - "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", - "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34" + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "version": "==0.13.0" + "version": "==0.13.1" }, "py": { "hashes": [ diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 1a824cf9..8c38c15f 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -3,13 +3,13 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' +funcsigs==1.0.2 ; python_version < '3.0' importlib-metadata==0.23 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' -pluggy==0.13.0 +pluggy==0.13.1 py==1.8.0 pyparsing==2.4.5 pytest==4.6.6 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 8e93d57b..260ab727 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,4 +1,4 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz protobuf==3.6.1 six==1.13.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 2f433ed8..20db7adb 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "bf9205c13cb754d6f02bf99aeb2faee9a06815cffe907f08cfcbef58431436c8" + "sha256": "75792ef13d92a9eb07bd79b077fed429ffbba896bc3fe1dd932b2cd145b7a755" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz", - "version": "== 1.1.0-internal-upgrade-001" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz", + "version": "== 1.1.0-internal-upgrade-002" }, "enum34": { "hashes": [ @@ -93,7 +93,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "importlib-metadata": { @@ -137,10 +137,10 @@ }, "pluggy": { "hashes": [ - "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", - "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34" + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "version": "==0.13.0" + "version": "==0.13.1" }, "py": { "hashes": [ diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index f07428ad..f57d38a7 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -3,13 +3,13 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' +funcsigs==1.0.2 ; python_version < '3.0' importlib-metadata==0.23 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 pathlib2==2.3.5 ; python_version < '3.6' -pluggy==0.13.0 +pluggy==0.13.1 py==1.8.0 pyparsing==2.4.5 pytest==4.6.6 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index b4e885a6..11833ada 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 six==1.13.0 diff --git a/platform/src/main/proto/dlpx/virtualization/platform.proto b/platform/src/main/proto/dlpx/virtualization/platform.proto index e87c640b..8c51fdc2 100644 --- a/platform/src/main/proto/dlpx/virtualization/platform.proto +++ b/platform/src/main/proto/dlpx/virtualization/platform.proto @@ -446,18 +446,26 @@ message VirtualMountSpecResponse { /* UPGRADE */ -message UpgradeLinkedSourceRequest { - com.delphix.virtualization.common.LinkedSource linked_source = 1; -} - -message UpgradeVirtualSourceRequest { - com.delphix.virtualization.common.VirtualSource virtual_source = 1; +message UpgradeRequest { + enum Type { + SOURCECONFIG = 0; + REPOSITORY = 1; + LINKEDSOURCE = 2; + VIRTUALSOURCE = 3; + SNAPSHOT = 4; + } + map pre_upgrade_parameters = 1; + Type type = 2; + repeated string migration_ids = 3; } -message UpgradeSourceConfigRequest { - com.delphix.virtualization.common.SourceConfig source_config = 1; +message UpgradeResult { + map post_upgrade_parameters = 1; } -message UpgradeSnapshotRequest { - com.delphix.virtualization.common.Snapshot snapshot = 1; +message UpgradeResponse { + oneof result { + UpgradeResult return_value = 1; + PluginErrorResult error = 2; + } } diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index 2c3e21b1..f7c85719 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "6b24bccaadc37f4f7daf43084fbc7fccae2319870d18103a0d4e3b6a722a74bd" + "sha256": "3f31378e575468296ad72d2328e65e878b7756d2b9bca44b0f8ced91cfd63cf1" }, "pipfile-spec": 6, "requires": {}, @@ -54,7 +54,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3'", + "markers": "python_version == '2.7'", "version": "==4.0.2" }, "contextlib2": { @@ -94,7 +94,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "idna": { @@ -324,7 +324,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3'", + "markers": "python_version == '2.7'", "version": "==4.0.2" }, "contextlib2": { @@ -373,16 +373,16 @@ "version": "==4.5.4" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz", - "version": "== 1.1.0-internal-upgrade-001" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz", + "version": "== 1.1.0-internal-upgrade-002" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-001.tar.gz", - "version": "== 1.1.0-internal-upgrade-001" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-002.tar.gz", + "version": "== 1.1.0-internal-upgrade-002" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-001.tar.gz", - "version": "== 1.1.0-internal-upgrade-001" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-002.tar.gz", + "version": "== 1.1.0-internal-upgrade-002" }, "entrypoints": { "hashes": [ @@ -413,7 +413,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "functools32": { @@ -421,7 +421,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "futures": { @@ -492,10 +492,10 @@ }, "pluggy": { "hashes": [ - "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", - "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34" + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "version": "==0.13.0" + "version": "==0.13.1" }, "py": { "hashes": [ diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index 117dbf0a..a510424e 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,18 +1,18 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-001.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-001.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-001.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-002.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-002.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version < '3' +configparser==4.0.2 ; python_version == '2.7' contextlib2==0.6.0.post1 ; python_version < '3' coverage==4.5.4 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -funcsigs==1.0.2 ; python_version < '3.0' -functools32==3.2.3.post2 ; python_version < '3.2' +funcsigs==1.0.2 ; python_version < '3.3' +functools32==3.2.3.post2 ; python_version < '3' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 importlib-metadata==0.23 ; python_version < '3.8' @@ -22,7 +22,7 @@ mock==3.0.5 more-itertools==5.0.0 packaging==19.2 pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' -pluggy==0.13.0 +pluggy==0.13.1 py==1.8.0 pycodestyle==2.5.0 pyflakes==2.1.1 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 191e9c2e..bb1d75bf 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -4,12 +4,12 @@ certifi==2019.9.11 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version < '3' +configparser==4.0.2 ; python_version == '2.7' contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3.2' +functools32==3.2.3.post2 ; python_version < '3' idna==2.8 importlib-metadata==0.23 ; python_version < '3.8' jinja2==2.10.3 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 526aff27..cfc77c99 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -26,7 +26,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-001 +package_version = 1.1.0-internal-upgrade-002 virtualization_api_version = 1.1.0 distribution_name = dvp-tools package_author = Delphix diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index a680f1eb..e7fe5699 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -8,7 +8,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-001' + assert package_util.get_version() == '1.1.0-internal-upgrade-002' @staticmethod def test_get_virtualization_api_version(): From 18afb2165c0eb69dc7835fc9c95ee35526e7d7c8 Mon Sep 17 00:00:00 2001 From: Lindsey Nguyen Date: Fri, 22 Nov 2019 12:37:25 -0800 Subject: [PATCH 03/25] PYT-823 Update Build to tar up build functions after validating the format with decorators on build Reviewed at: http://reviews.delphix.com/r/53996/ --- build.gradle | 2 +- dvp/Pipfile.lock | 21 +- dvp/lock.dev-requirements.txt | 2 +- dvp/lock.requirements.txt | 8 +- libs/Pipfile.lock | 11 +- libs/lock.dev-requirements.txt | 2 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 10 +- platform/lock.requirements.txt | 2 +- .../dlpx/virtualization/platform/__init__.py | 3 + .../virtualization/platform/_discovery.py | 7 +- .../dlpx/virtualization/platform/_linked.py | 21 +- .../dlpx/virtualization/platform/_upgrade.py | 58 ++-- .../dlpx/virtualization/platform/_virtual.py | 28 +- .../virtualization/platform/exceptions.py | 83 ++++- .../platform/migration_id_set.py | 102 ++++++ .../dlpx/virtualization/platform/operation.py | 6 + .../platform/validation_util.py | 11 + .../virtualization/test_migration_id_set.py | 107 ++++++ .../dlpx/virtualization/test_upgrade.py | 117 +++---- tools/Pipfile.lock | 42 ++- tools/lock.dev-requirements.txt | 12 +- tools/lock.requirements.txt | 4 +- .../virtualization/_internal/exceptions.py | 15 + .../_internal/plugin_importer.py | 102 ++++-- .../virtualization/_internal/settings.cfg | 2 +- .../virtualization/_internal/util_classes.py | 20 +- .../validation_schemas/plugin_importer.yaml | 38 +- .../dlpx/virtualization/_internal/conftest.py | 278 ++++++++++----- .../fake_plugin/direct/arbitrary_error.py | 19 + .../fake_plugin/direct/dec_not_function.py | 30 ++ .../fake_plugin/direct/id_bad_format.py | 12 + .../fake_plugin/direct/id_not_string.py | 12 + .../_internal/fake_plugin/direct/id_used.py | 17 + .../python_vfiles.py => multiple_warnings.py} | 5 + .../fake_plugin/direct/op_already_defined.py | 23 ++ .../fake_plugin/direct/plugin_config.yml | 11 - .../_internal/fake_plugin/direct/schema.json | 38 -- .../fake_plugin/direct/successful.py | 84 +++++ .../fake_plugin/direct/upgrade_warnings.py | 100 ++++++ .../python_staged.py => multiple_warnings.py} | 16 +- .../fake_plugin/staged/plugin_config.yml | 14 - .../_internal/fake_plugin/staged/schema.json | 40 --- .../fake_plugin/staged/successful.py | 101 ++++++ .../_internal/test_package_util.py | 2 +- .../_internal/test_plugin_importer.py | 14 +- .../_internal/test_plugin_validator.py | 325 +++++++++--------- 47 files changed, 1405 insertions(+), 574 deletions(-) create mode 100644 platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py create mode 100644 platform/src/main/python/dlpx/virtualization/platform/validation_util.py create mode 100644 platform/src/test/python/dlpx/virtualization/test_migration_id_set.py create mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py create mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py create mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py create mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py create mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py rename tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/{src/python_vfiles.py => multiple_warnings.py} (93%) create mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py delete mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml delete mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json create mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py create mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py rename tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/{src/python_staged.py => multiple_warnings.py} (89%) delete mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml delete mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json create mode 100644 tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py diff --git a/build.gradle b/build.gradle index 4ec30ce4..8bf3044d 100644 --- a/build.gradle +++ b/build.gradle @@ -8,7 +8,7 @@ plugins { } subprojects { - version = "1.1.0-internal-upgrade-002" + version = "1.1.0-internal-upgrade-003" } def binDir = "${rootProject.projectDir}/bin" diff --git a/dvp/Pipfile.lock b/dvp/Pipfile.lock index 3f80d818..1a0ac583 100644 --- a/dvp/Pipfile.lock +++ b/dvp/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b12de201be3806fa19aa966dd33f493161b0d8bbb48af73576c4fbb6f7a219c2" + "sha256": "61dd68fa84db728d2f9604993189481aae3d7e04d53f605627960c0cbdfdc396" }, "pipfile-spec": 6, "requires": {}, @@ -15,20 +15,20 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz", - "version": "== 1.1.0-internal-upgrade-002" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz", + "version": "== 1.1.0-internal-upgrade-003" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-002.tar.gz", - "version": "== 1.1.0-internal-upgrade-002" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-003.tar.gz", + "version": "== 1.1.0-internal-upgrade-003" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-002.tar.gz", - "version": "== 1.1.0-internal-upgrade-002" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-003.tar.gz", + "version": "== 1.1.0-internal-upgrade-003" }, "dvp-tools": { - "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-002.tar.gz", - "version": "== 1.1.0-internal-upgrade-002" + "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-003.tar.gz", + "version": "== 1.1.0-internal-upgrade-003" } }, "develop": { @@ -99,7 +99,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", + "markers": "python_version == '3.4.*' or python_version < '3'", "version": "==2.3.5" }, "pluggy": { @@ -128,6 +128,7 @@ "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" ], + "index": "delphix", "version": "==4.6.6" }, "scandir": { diff --git a/dvp/lock.dev-requirements.txt b/dvp/lock.dev-requirements.txt index 4c5caadc..2213b9d0 100644 --- a/dvp/lock.dev-requirements.txt +++ b/dvp/lock.dev-requirements.txt @@ -7,7 +7,7 @@ funcsigs==1.0.2 ; python_version < '3.0' importlib-metadata==0.23 ; python_version < '3.8' more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 -pathlib2==2.3.5 ; python_version < '3.6' +pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' pluggy==0.13.1 py==1.8.0 pyparsing==2.4.5 diff --git a/dvp/lock.requirements.txt b/dvp/lock.requirements.txt index 44e7cd8a..8d3524c6 100644 --- a/dvp/lock.requirements.txt +++ b/dvp/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-002.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-002.tar.gz -./../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-002.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-003.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-003.tar.gz +./../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-003.tar.gz diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index b16e32f6..c493ea8f 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "2f74667ead2f5475f5decba7ea85d8fd1f350447d74e70ba6b2c2f6a2526d0d6" + "sha256": "9a2bcb4aeddbdaeff8279b2b83dbb2962c24bc8e914492222abec9801b59c0aa" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz", - "version": "== 1.1.0-internal-upgrade-002" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz", + "version": "== 1.1.0-internal-upgrade-003" }, "protobuf": { "hashes": [ @@ -37,6 +37,7 @@ "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" ], + "index": "delphix", "version": "==3.6.1" }, "six": { @@ -83,7 +84,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "importlib-metadata": { @@ -99,6 +100,7 @@ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" ], + "index": "delphix", "version": "==3.0.5" }, "more-itertools": { @@ -151,6 +153,7 @@ "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" ], + "index": "delphix", "version": "==4.6.6" }, "scandir": { diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 8c38c15f..653450e6 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -3,7 +3,7 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' +funcsigs==1.0.2 ; python_version < '3.3' importlib-metadata==0.23 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 260ab727..b551f4d1 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,4 +1,4 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz protobuf==3.6.1 six==1.13.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 20db7adb..3c8a7492 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "75792ef13d92a9eb07bd79b077fed429ffbba896bc3fe1dd932b2cd145b7a755" + "sha256": "4c4a012ee9b986d7e3e36cde9675de9009302596cb7581b4d4b5165fe1457c46" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz", - "version": "== 1.1.0-internal-upgrade-002" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz", + "version": "== 1.1.0-internal-upgrade-003" }, "enum34": { "hashes": [ @@ -25,6 +25,7 @@ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" ], + "index": "delphix", "markers": "python_version < '3.4'", "version": "==1.1.6" }, @@ -47,6 +48,7 @@ "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" ], + "index": "delphix", "version": "==3.6.1" }, "six": { @@ -109,6 +111,7 @@ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" ], + "index": "delphix", "version": "==3.0.5" }, "more-itertools": { @@ -161,6 +164,7 @@ "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" ], + "index": "delphix", "version": "==4.6.6" }, "scandir": { diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 11833ada..9f737cb7 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 six==1.13.0 diff --git a/platform/src/main/python/dlpx/virtualization/platform/__init__.py b/platform/src/main/python/dlpx/virtualization/platform/__init__.py index 23ca4a11..412bcb38 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/__init__.py +++ b/platform/src/main/python/dlpx/virtualization/platform/__init__.py @@ -4,6 +4,9 @@ __path__ = __import__('pkgutil').extend_path(__path__, __name__) + +from dlpx.virtualization.platform.migration_id_set import * +from dlpx.virtualization.platform.validation_util import * from dlpx.virtualization.platform._plugin_classes import * from dlpx.virtualization.platform._discovery import * from dlpx.virtualization.platform._linked import * diff --git a/platform/src/main/python/dlpx/virtualization/platform/_discovery.py b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py index 6e8ce6cd..06f72348 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_discovery.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py @@ -11,6 +11,7 @@ from dlpx.virtualization.common import RemoteConnection from dlpx.virtualization import common_pb2 from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.platform import validation_util as v from dlpx.virtualization.platform.operation import Operation as Op from dlpx.virtualization.platform.exceptions import ( IncorrectReturnTypeError, OperationNotDefinedError, @@ -31,7 +32,8 @@ def repository_decorator(repository_impl): if self.repository_impl: raise OperationAlreadyDefinedError(Op.DISCOVERY_REPOSITORY) - self.repository_impl = repository_impl + self.repository_impl = v.check_function(repository_impl, + Op.DISCOVERY_REPOSITORY) return repository_impl return repository_decorator @@ -39,7 +41,8 @@ def source_config(self): def source_config_decorator(source_config_impl): if self.source_config_impl: raise OperationAlreadyDefinedError(Op.DISCOVERY_SOURCE_CONFIG) - self.source_config_impl = source_config_impl + self.source_config_impl = v.check_function( + source_config_impl, Op.DISCOVERY_SOURCE_CONFIG) return source_config_impl return source_config_decorator diff --git a/platform/src/main/python/dlpx/virtualization/platform/_linked.py b/platform/src/main/python/dlpx/virtualization/platform/_linked.py index b511a6e7..01e16296 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_linked.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_linked.py @@ -17,6 +17,7 @@ from dlpx.virtualization.platform import StagedSource from dlpx.virtualization.platform import Mount from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import validation_util as v from dlpx.virtualization.platform.operation import Operation as Op from dlpx.virtualization.platform.exceptions import ( IncorrectReturnTypeError, OperationNotDefinedError, @@ -25,6 +26,7 @@ __all__ = ['LinkedOperations'] + class LinkedOperations(object): def __init__(self): @@ -40,7 +42,8 @@ def pre_snapshot(self): def pre_snapshot_decorator(pre_snapshot_impl): if self.pre_snapshot_impl: raise OperationAlreadyDefinedError(Op.LINKED_PRE_SNAPSHOT) - self.pre_snapshot_impl = pre_snapshot_impl + self.pre_snapshot_impl = v.check_function(pre_snapshot_impl, + Op.LINKED_PRE_SNAPSHOT) return pre_snapshot_impl return pre_snapshot_decorator @@ -48,7 +51,8 @@ def post_snapshot(self): def post_snapshot_decorator(post_snapshot_impl): if self.post_snapshot_impl: raise OperationAlreadyDefinedError(Op.LINKED_POST_SNAPSHOT) - self.post_snapshot_impl = post_snapshot_impl + self.post_snapshot_impl = v.check_function(post_snapshot_impl, + Op.LINKED_POST_SNAPSHOT) return post_snapshot_impl return post_snapshot_decorator @@ -56,7 +60,8 @@ def start_staging(self): def start_staging_decorator(start_staging_impl): if self.start_staging_impl: raise OperationAlreadyDefinedError(Op.LINKED_START_STAGING) - self.start_staging_impl = start_staging_impl + self.start_staging_impl = v.check_function(start_staging_impl, + Op.LINKED_START_STAGING) return start_staging_impl return start_staging_decorator @@ -64,7 +69,8 @@ def stop_staging(self): def stop_staging_decorator(stop_staging_impl): if self.stop_staging_impl: raise OperationAlreadyDefinedError(Op.LINKED_STOP_STAGING) - self.stop_staging_impl = stop_staging_impl + self.stop_staging_impl = v.check_function(stop_staging_impl, + Op.LINKED_STOP_STAGING) return stop_staging_impl return stop_staging_decorator @@ -72,7 +78,7 @@ def status(self): def status_decorator(status_impl): if self.status_impl: raise OperationAlreadyDefinedError(Op.LINKED_STATUS) - self.status_impl = status_impl + self.status_impl = v.check_function(status_impl, Op.LINKED_STATUS) return status_impl return status_decorator @@ -80,7 +86,7 @@ def worker(self): def worker_decorator(worker_impl): if self.worker_impl: raise OperationAlreadyDefinedError(Op.LINKED_WORKER) - self.worker_impl = worker_impl + self.worker_impl = v.check_function(worker_impl, Op.LINKED_WORKER) return worker_impl return worker_decorator @@ -89,7 +95,8 @@ def mount_specification_decorator(mount_specification_impl): if self.mount_specification_impl: raise OperationAlreadyDefinedError( Op.LINKED_MOUNT_SPEC) - self.mount_specification_impl = mount_specification_impl + self.mount_specification_impl = v.check_function( + mount_specification_impl, Op.LINKED_MOUNT_SPEC) return mount_specification_impl return mount_specification_decorator diff --git a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py index f20e466e..35bec54e 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py @@ -7,17 +7,23 @@ """UpgradeOperations for the Virtualization Platform There are 5 different objects that we can upgrade. All migration ids must be -unique. +unique. To upgrade a specific schema, the plugin author would use that specific +decorator specifying the migration id. We save the implementations of each of +the upgrade functions in a dict for the specific schema. For each new upgrade +operation of the same schema, the key will be the migration id, and the value +will be the function that was implemented. """ -from dlpx.virtualization.platform.exceptions import MigrationIdAlreadyUsedError - +from dlpx.virtualization.platform import MigrationIdSet +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.operation import Operation as Op __all__ = ['UpgradeOperations'] + class UpgradeOperations(object): def __init__(self): - self.__migration_ids = set() + self.__migration_id_set = MigrationIdSet() self.repository_id_to_impl = {} self.source_config_id_to_impl = {} @@ -27,53 +33,53 @@ def __init__(self): def repository(self, migration_id): def repository_decorator(repository_impl): - self._migration_id_check_helper(migration_id, repository_impl) - self.repository_id_to_impl[migration_id] = repository_impl - self.__migration_ids.add(migration_id) + + std_mig_id = self.__migration_id_set.add( + migration_id, repository_impl.__name__) + self.repository_id_to_impl[std_mig_id] = v.check_function( + repository_impl, Op.UPGRADE_REPOSITORY) return repository_impl return repository_decorator def source_config(self, migration_id): def source_config_decorator(source_config_impl): - self._migration_id_check_helper(migration_id, source_config_impl) - self.source_config_id_to_impl[migration_id] = source_config_impl - self.__migration_ids.add(migration_id) + std_mig_id = self.__migration_id_set.add( + migration_id, source_config_impl.__name__) + self.source_config_id_to_impl[std_mig_id] = v.check_function( + source_config_impl, Op.UPGRADE_SOURCE_CONFIG) return source_config_impl return source_config_decorator def linked_source(self, migration_id): def linked_source_decorator(linked_source_impl): - self._migration_id_check_helper(migration_id, linked_source_impl) - self.linked_source_id_to_impl[migration_id] = linked_source_impl - self.__migration_ids.add(migration_id) + std_mig_id = self.__migration_id_set.add( + migration_id, linked_source_impl.__name__) + self.linked_source_id_to_impl[std_mig_id] = v.check_function( + linked_source_impl, Op.UPGRADE_LINKED_SOURCE) return linked_source_impl return linked_source_decorator def virtual_source(self, migration_id): def virtual_source_decorator(virtual_source_impl): - self._migration_id_check_helper(migration_id, virtual_source_impl) - self.virtual_source_id_to_impl[migration_id] = virtual_source_impl - self.__migration_ids.add(migration_id) + std_mig_id = self.__migration_id_set.add( + migration_id, virtual_source_impl.__name__) + self.virtual_source_id_to_impl[std_mig_id] = v.check_function( + virtual_source_impl, Op.UPGRADE_VIRTUAL_SOURCE) return virtual_source_impl return virtual_source_decorator def snapshot(self, migration_id): def snapshot_decorator(snapshot_impl): - self._migration_id_check_helper(migration_id, snapshot_impl) - self.snapshot_id_to_impl[migration_id] = snapshot_impl - self.__migration_ids.add(migration_id) + std_mig_id = self.__migration_id_set.add( + migration_id, snapshot_impl.__name__) + self.snapshot_id_to_impl[std_mig_id] = v.check_function( + snapshot_impl, Op.UPGRADE_SNAPSHOT) return snapshot_impl return snapshot_decorator @property def migration_id_list(self): - return sorted(self.__migration_ids) - - def _migration_id_check_helper(self, migration_id, migration_impl): - if migration_id in self.__migration_ids: - raise MigrationIdAlreadyUsedError(migration_id, - migration_impl.__name__) - + return self.__migration_id_set.get_sorted_ids() def _internal_upgrade(self, request): """Upgrade Wrapper for plugins. diff --git a/platform/src/main/python/dlpx/virtualization/platform/_virtual.py b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py index e9a57b17..dab3bec9 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_virtual.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py @@ -15,6 +15,7 @@ from dlpx.virtualization.platform import Status from dlpx.virtualization.platform import Mount from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import validation_util as v from dlpx.virtualization.platform.operation import Operation as Op from dlpx.virtualization.platform.exceptions import ( IncorrectReturnTypeError, OperationNotDefinedError, @@ -42,7 +43,8 @@ def configure(self): def configure_decorator(configure_impl): if self.configure_impl: raise OperationAlreadyDefinedError(Op.VIRTUAL_CONFIGURE) - self.configure_impl = configure_impl + self.configure_impl = v.check_function(configure_impl, + Op.VIRTUAL_CONFIGURE) return configure_impl return configure_decorator @@ -50,7 +52,8 @@ def unconfigure(self): def unconfigure_decorator(unconfigure_impl): if self.unconfigure_impl: raise OperationAlreadyDefinedError(Op.VIRTUAL_UNCONFIGURE) - self.unconfigure_impl = unconfigure_impl + self.unconfigure_impl = v.check_function(unconfigure_impl, + Op.VIRTUAL_UNCONFIGURE) return unconfigure_impl return unconfigure_decorator @@ -58,7 +61,8 @@ def reconfigure(self): def reconfigure_decorator(reconfigure_impl): if self.reconfigure_impl: raise OperationAlreadyDefinedError(Op.VIRTUAL_RECONFIGURE) - self.reconfigure_impl = reconfigure_impl + self.reconfigure_impl = v.check_function(reconfigure_impl, + Op.VIRTUAL_RECONFIGURE) return reconfigure_impl return reconfigure_decorator @@ -66,7 +70,7 @@ def start(self): def start_decorator(start_impl): if self.start_impl: raise OperationAlreadyDefinedError(Op.VIRTUAL_START) - self.start_impl = start_impl + self.start_impl = v.check_function(start_impl, Op.VIRTUAL_START) return start_impl return start_decorator @@ -74,7 +78,7 @@ def stop(self): def stop_decorator(stop_impl): if self.stop_impl: raise OperationAlreadyDefinedError(Op.VIRTUAL_STOP) - self.stop_impl = stop_impl + self.stop_impl = v.check_function(stop_impl, Op.VIRTUAL_STOP) return stop_impl return stop_decorator @@ -82,7 +86,8 @@ def pre_snapshot(self): def pre_snapshot_decorator(pre_snapshot_impl): if self.pre_snapshot_impl: raise OperationAlreadyDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) - self.pre_snapshot_impl = pre_snapshot_impl + self.pre_snapshot_impl = v.check_function(pre_snapshot_impl, + Op.VIRTUAL_PRE_SNAPSHOT) return pre_snapshot_impl return pre_snapshot_decorator @@ -90,7 +95,8 @@ def post_snapshot(self): def post_snapshot_decorator(post_snapshot_impl): if self.post_snapshot_impl: raise OperationAlreadyDefinedError(Op.VIRTUAL_POST_SNAPSHOT) - self.post_snapshot_impl = post_snapshot_impl + self.post_snapshot_impl = v.check_function( + post_snapshot_impl, Op.VIRTUAL_POST_SNAPSHOT) return post_snapshot_impl return post_snapshot_decorator @@ -98,7 +104,7 @@ def status(self): def status_decorator(status_impl): if self.status_impl: raise OperationAlreadyDefinedError(Op.VIRTUAL_STATUS) - self.status_impl = status_impl + self.status_impl = v.check_function(status_impl, Op.VIRTUAL_STATUS) return status_impl return status_decorator @@ -106,7 +112,8 @@ def initialize(self): def initialize_decorator(initialize_impl): if self.initialize_impl: raise OperationAlreadyDefinedError(Op.VIRTUAL_INITIALIZE) - self.initialize_impl = initialize_impl + self.initialize_impl = v.check_function(initialize_impl, + Op.VIRTUAL_INITIALIZE) return initialize_impl return initialize_decorator @@ -115,7 +122,8 @@ def mount_specification_decorator(mount_specification_impl): if self.mount_specification_impl: raise OperationAlreadyDefinedError( Op.VIRTUAL_MOUNT_SPEC) - self.mount_specification_impl = mount_specification_impl + self.mount_specification_impl = v.check_function( + mount_specification_impl, Op.VIRTUAL_MOUNT_SPEC) return mount_specification_impl return mount_specification_decorator diff --git a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py index 115cb138..4047a50c 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py +++ b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py @@ -91,7 +91,53 @@ def __init__(self, operation): super(OperationNotDefinedError, self).__init__(message) -class MigrationIdAlreadyUsedError(Exception): +class MigrationIdIncorrectTypeError(PlatformError): + """MigrationIdIncorrectType gets thrown when the provided migration id is + not a string. + + Args: + migration_id (str): The migration id assigned for this operation + function_name (str): The name of the function that used the + decorator with the same migration id. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, migration_id, function_name): + message = ("The migration id '{}' used in the function '{}' should" + " be a string.".format(migration_id, function_name)) + super(MigrationIdIncorrectTypeError, self).__init__(message) + + +class MigrationIdIncorrectFormatError(PlatformError): + """MigrationIdIncorrectFormat gets thrown when the migration id given is + not in the correct format. It should be one or more positive integers + separated by periods. + + Args: + migration_id (str): The migration id assigned for this operation + function_name (str): The name of the function that used the + decorator with the same migration id. + format (str): The format expected of the migration_id. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, message): + super(MigrationIdIncorrectFormatError, self).__init__(message) + + @classmethod + def from_fields(cls, migration_id, function_name, format): + message = ("The migration id '{}' used in the function '{}' does not" + " follow the correct format '{}'.".format(migration_id, + function_name, + format)) + return cls(message) + + +class MigrationIdAlreadyUsedError(PlatformError): """MigrationIdAlreadyUsedError gets thrown when the same migration id is used for the same upgrade operation @@ -104,23 +150,37 @@ class MigrationIdAlreadyUsedError(Exception): message (str): A localized user-readable message about what operation should be returning what type. """ - @property - def message(self): - return self.args[0] - - def __init__(self, migration_id, function_name): - message = ("The migration id '{}' used in the function '{}' has" - " been used by another migration.".format(migration_id, - function_name)) + def __init__(self, migration_id, std_migration_id, function_name): + message = ("The migration id '{}' used in the function '{}' has the" + " same canonical form '{}' as another migration.".format( + migration_id, function_name, std_migration_id)) super(MigrationIdAlreadyUsedError, self).__init__(message) +class DecoratorNotFunctionError(PlatformError): + """DecoratorNotFunctionError gets thrown when the decorated variable is + not a function when it should be. + + Args: + object_name (str): The name of the variable that should have been a + decorator_name (str): The decorator that is being incorrectly used. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, object_name, decorator_name): + message = ("The object '{}' decorated by '{}' is" + " not a function.".format(object_name, decorator_name)) + super(DecoratorNotFunctionError, self).__init__(message) + class IncorrectReferenceFormatError(PluginRuntimeError): """There are 2 possible errors that can be thrown with an incorrect reference. The reference passed in can be a non-string, throwing an IncorrectTypeError. The second error that can be thrown is IncorrectReferenceFormatError, which gets thrown when the reference is not - of the format "UNIX_HOST_ENVIRONMENT-#" nor of "WINDOWS_HOST_ENVIRONMENT-#". + of the format "UNIX_HOST_ENVIRONMENT-#" nor of + "WINDOWS_HOST_ENVIRONMENT-#". Args: reference (str): The incorrectly formatted reference @@ -129,6 +189,7 @@ class IncorrectReferenceFormatError(PluginRuntimeError): message (str): A user-readable message describing the exception. """ def __init__(self, reference): - message = ("Reference '{}' is not a correctly formatted host environment reference.".format(reference)) + message = ("Reference '{}' is not a correctly formatted host" + " environment reference.".format(reference)) super(IncorrectReferenceFormatError, self).__init__(message) diff --git a/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py b/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py new file mode 100644 index 00000000..9dc6c142 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py @@ -0,0 +1,102 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import logging +import re + +from dlpx.virtualization.platform.exceptions import ( + MigrationIdAlreadyUsedError, MigrationIdIncorrectTypeError, + MigrationIdIncorrectFormatError) + +MIGRATION_ID_REGEX = re.compile(r'^\d+(\.\d+)*$') +logger = logging.getLogger(__name__) + + +class MigrationIdSet: + """ + Keeps track of all migrations and validites/standardizes them as they are + added / parsed. + + Exceptions can be thrown when trying to add a new migration id. Otherwise + at the end of reading in all migration functions can be gotten in the + correct order. + """ + def __init__(self): + """ + The list of migration ids will store migrations as an array of ids + where the id is represented by the standardized array of positive + integers. For example if there were these ids: 1.0.0, 1.2.03, and + 2.0.1.0, __migration_ids would be [ [1], [1, 2, 3], [2, 0, 1]] + """ + self.__migration_ids = [] + + def add(self, migration_id, impl_name): + """ + Validates that the migration id is the correct type/format and then + return the canonical format of the id. Add the id as an array of + integers into the list of migration ids. + """ + # First validate that the migration_id is the correct type/format. + self.validate_migration_id(migration_id, impl_name) + + # Then we must standardize the migration_id. + std_migration_id = self.standardize_migration_id_to_array( + migration_id, impl_name) + std_string = '.'.join(str(i) for i in std_migration_id) + + # Then we should check if this migration_id has already been used + if std_migration_id in self.__migration_ids: + raise MigrationIdAlreadyUsedError(migration_id, + std_string, + impl_name) + + # Lastly we should add this new array into the internal migration list. + self.__migration_ids.append(std_migration_id) + + # Return back the standardized format of the migration id + return std_string + + @staticmethod + def validate_migration_id(migration_id, impl_name): + # First validate that the id is a string + if not isinstance(migration_id, basestring): + raise MigrationIdIncorrectTypeError(migration_id, impl_name) + + # Next check if the id is the right format + if not MIGRATION_ID_REGEX.match(migration_id): + raise MigrationIdIncorrectFormatError.from_fields( + migration_id, impl_name, MIGRATION_ID_REGEX.pattern) + + @staticmethod + def standardize_migration_id_to_array(migration_id, impl_name): + # Split on the period and convert to integer + array = [int(i) for i in migration_id.split('.')] + + # + # We cannot allow a migration id of essentially '0' because otherwise + # there would be no way to add a migration that goes before this. + # + if not any(array): + raise MigrationIdIncorrectFormatError( + "The migration id '{}' used in the function '{}' cannot be" + " used because a 0 migration id is not allowed.".format( + migration_id, impl_name)) + + # Next we want to trim all trailing zeros so ex: 5.3.0.0 == 5.3 + while array: + if not array[-1]: + # Remove the last element which is a zero from array + array.pop() + else: + break + + return array + + def get_sorted_ids(self): + # First sort the migration ids + self.__migration_ids.sort() + + # Then convert all these arrays to the usual string format. + return ['.'.join(str(i) for i in migration_id) + for migration_id in self.__migration_ids] diff --git a/platform/src/main/python/dlpx/virtualization/platform/operation.py b/platform/src/main/python/dlpx/virtualization/platform/operation.py index 67b5bdd3..b938c270 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/operation.py +++ b/platform/src/main/python/dlpx/virtualization/platform/operation.py @@ -27,3 +27,9 @@ class Operation(Enum): VIRTUAL_STATUS = 'virtual.status()' VIRTUAL_INITIALIZE = 'virtual.initialize()' VIRTUAL_MOUNT_SPEC = 'virtual.mount_specification()' + + UPGRADE_REPOSITORY = 'upgrade.repository()' + UPGRADE_SOURCE_CONFIG = 'upgrade.source_config()' + UPGRADE_LINKED_SOURCE = 'upgrade.linked_source()' + UPGRADE_VIRTUAL_SOURCE = 'upgrade.virtual_source()' + UPGRADE_SNAPSHOT = 'upgrade.snapshot()' diff --git a/platform/src/main/python/dlpx/virtualization/platform/validation_util.py b/platform/src/main/python/dlpx/virtualization/platform/validation_util.py new file mode 100644 index 00000000..bc39d098 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/validation_util.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +import inspect +from dlpx.virtualization.platform.exceptions import DecoratorNotFunctionError + + +def check_function(impl, operation): + if not inspect.isfunction(impl) and not inspect.ismethod(impl): + raise DecoratorNotFunctionError(impl.__name__, operation.value) + return impl diff --git a/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py b/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py new file mode 100644 index 00000000..42db7f92 --- /dev/null +++ b/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py @@ -0,0 +1,107 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import pytest +from dlpx.virtualization.platform.exceptions import ( + MigrationIdAlreadyUsedError, MigrationIdIncorrectTypeError, + MigrationIdIncorrectFormatError) +from dlpx.virtualization.platform import migration_id_set as m + + +class TestMigrationIdSet: + @staticmethod + @pytest.fixture + def migration_set(): + yield m.MigrationIdSet() + + @staticmethod + @pytest.mark.parametrize('migration_id,expected_std_id', [ + ('5.3.2.1', '5.3.2.1'), + ('1000', '1000'), + ('50.0.0', '50'), + ('50.0.0000.1', '50.0.0.1'), + ('2019.10.04', '2019.10.4')]) + def test_basic_add(migration_set, migration_id, expected_std_id): + actual_std_id = migration_set.add(migration_id, 'function') + + assert actual_std_id == expected_std_id + + @staticmethod + @pytest.mark.parametrize('id_one,id_two', [ + ('5.3.2.1', '5.3.2.1'), + ('1000', '1000.0.0'), + ('50.0.0', '50'), + ('50.0.0000.1', '50.0.0.1.0000'), + ('2019.0010.0004', '2019.10.4')]) + def test_same_migration_id_used(migration_set, id_one, id_two): + std_id = migration_set.add(id_one, 'function') + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info: + migration_set.add(id_two, 'function2') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'function2' has the" + " same canonical form '{}' as another migration.".format(id_two, + std_id)) + + @staticmethod + @pytest.mark.parametrize('migration_id', [True, + 1000, + {'random set'}, + ['random', 'list']]) + def test_migration_incorrect_type(migration_set, migration_id): + with pytest.raises(MigrationIdIncorrectTypeError) as err_info: + migration_set.add(migration_id, 'upgrade') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' should" + " be a string.".format(migration_id)) + + @staticmethod + @pytest.mark.parametrize('migration_id', ['Not integers', + '1000.', + '2019 10 20']) + def test_migration_incorrect_format(migration_set, migration_id): + with pytest.raises(MigrationIdIncorrectFormatError) as err_info: + migration_set.add(migration_id, 'upgrade') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' does not" + " follow the correct format '{}'.".format( + migration_id, m.MIGRATION_ID_REGEX.pattern)) + + @staticmethod + @pytest.mark.parametrize('migration_id', ['0.0', + '0', + '0.000.000.00.0']) + def test_migration_id_is_zero(migration_set, migration_id): + with pytest.raises(MigrationIdIncorrectFormatError) as err_info: + migration_set.add(migration_id, 'upgrade') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' cannot be" + " used because a 0 migration id is not allowed.".format( + migration_id)) + + @staticmethod + def test_get_sorted_ids(migration_set): + migration_set.add('2019.04.01', 'one') + migration_set.add('4.10.04', 'two') + migration_set.add('20190.10.006', 'three') + migration_set.add('1.2.3.4', 'four') + migration_set.add('5.4.3.2.1.0', 'five') + migration_set.add('1', 'six') + migration_set.add('10.01.10.00.1.0.0', 'seven') + + assert migration_set.get_sorted_ids() == ['1', + '1.2.3.4', + '4.10.4', + '5.4.3.2.1', + '10.1.10.0.1', + '2019.4.1', + '20190.10.6'] diff --git a/platform/src/test/python/dlpx/virtualization/test_upgrade.py b/platform/src/test/python/dlpx/virtualization/test_upgrade.py index d8313691..570b9a71 100755 --- a/platform/src/test/python/dlpx/virtualization/test_upgrade.py +++ b/platform/src/test/python/dlpx/virtualization/test_upgrade.py @@ -3,7 +3,9 @@ # import pytest -from dlpx.virtualization.platform.exceptions import MigrationIdAlreadyUsedError +from dlpx.virtualization.platform.exceptions import ( + DecoratorNotFunctionError, MigrationIdAlreadyUsedError) +from dlpx.virtualization.platform.operation import Operation as Op class TestUpgrade: @@ -17,20 +19,16 @@ def my_plugin(): def basic_upgrade_helper(decorator, id_to_impl, upgrade_operation): @decorator('2019.10.01') def repo_upgrade_one(input_dict): - output_dict = {} - output_dict['in'] = input_dict['in'] - output_dict['out'] = 'first' + output_dict = {'in': input_dict['in'], 'out': 'first'} return output_dict @decorator('2019.10.02') def repo_upgrade_two(input_dict): - output_dict = {} - output_dict['in'] = input_dict['in'] - output_dict['out'] = 'second' + output_dict = {'in': input_dict['in'], 'out': 'second'} return output_dict - migration_one = id_to_impl['2019.10.01'] - migration_two = id_to_impl['2019.10.02'] + migration_one = id_to_impl['2019.10.1'] + migration_two = id_to_impl['2019.10.2'] assert migration_one == repo_upgrade_one assert migration_two == repo_upgrade_two @@ -39,39 +37,31 @@ def repo_upgrade_two(input_dict): assert migration_two({'in':'in_two'}) == {'in': 'in_two', 'out': 'second'} - assert upgrade_operation.migration_id_list == ['2019.10.01', - '2019.10.02'] + assert upgrade_operation.migration_id_list == ['2019.10.1', + '2019.10.2'] @staticmethod - def same_migration_id_used_helper(decorator): - @decorator('2019.10.01') - def repo_upgrade(input_dict): - output_dict = {} - output_dict['in'] = input_dict['in'] - output_dict['out'] = 'first' - return output_dict + def decorator_not_function_helper(decorator, op): - with pytest.raises(MigrationIdAlreadyUsedError) as err_info: - @decorator('2019.10.01') - def upgrade_bad(): + with pytest.raises(DecoratorNotFunctionError) as err_info: + @decorator('2019.10.03') + class RandomClass(object): pass message = err_info.value.message - assert message == ( - "The migration id '2019.10.01' used in the function" - " 'upgrade_bad' has been used by another migration.") + assert message == ("The object '{}' decorated by '{}' is" + " not a function.".format('RandomClass', + op.value)) @staticmethod - def test_upgrade(my_plugin): + def test_upgrade_repository(my_plugin): TestUpgrade.basic_upgrade_helper( my_plugin.upgrade.repository, my_plugin.upgrade.repository_id_to_impl, my_plugin.upgrade) - @staticmethod - def test_upgrade_repository_same_migration_id_used(my_plugin): - TestUpgrade.same_migration_id_used_helper( - my_plugin.upgrade.repository) + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.repository, Op.UPGRADE_REPOSITORY) @staticmethod def test_upgrade_source_config(my_plugin): @@ -80,10 +70,8 @@ def test_upgrade_source_config(my_plugin): my_plugin.upgrade.source_config_id_to_impl, my_plugin.upgrade) - @staticmethod - def test_upgrade_source_config_same_migration_id_used(my_plugin): - TestUpgrade.same_migration_id_used_helper( - my_plugin.upgrade.source_config) + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.source_config, Op.UPGRADE_SOURCE_CONFIG) @staticmethod def test_upgrade_linked_source(my_plugin): @@ -92,10 +80,8 @@ def test_upgrade_linked_source(my_plugin): my_plugin.upgrade.linked_source_id_to_impl, my_plugin.upgrade) - @staticmethod - def test_upgrade_linked_source_same_migration_id_used(my_plugin): - TestUpgrade.same_migration_id_used_helper( - my_plugin.upgrade.source_config) + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.linked_source, Op.UPGRADE_LINKED_SOURCE) @staticmethod def test_upgrade_virtual_source(my_plugin): @@ -104,10 +90,8 @@ def test_upgrade_virtual_source(my_plugin): my_plugin.upgrade.virtual_source_id_to_impl, my_plugin.upgrade) - @staticmethod - def test_upgrade_virtual_source_same_migration_id_used(my_plugin): - TestUpgrade.same_migration_id_used_helper( - my_plugin.upgrade.source_config) + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.virtual_source, Op.UPGRADE_VIRTUAL_SOURCE) @staticmethod def test_upgrade_snapshot(my_plugin): @@ -116,10 +100,8 @@ def test_upgrade_snapshot(my_plugin): my_plugin.upgrade.snapshot_id_to_impl, my_plugin.upgrade) - @staticmethod - def test_upgrade_snapshot_same_migration_id_used(my_plugin): - TestUpgrade.same_migration_id_used_helper( - my_plugin.upgrade.snapshot) + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.snapshot, Op.UPGRADE_SNAPSHOT) @staticmethod def test_upgrade_same_migration_id_used(my_plugin): @@ -131,7 +113,7 @@ def repo_upgrade_one(): def repo_upgrade_two(): return 'repo_two' - @my_plugin.upgrade.repository('2019.10.06') + @my_plugin.upgrade.repository('2019.10.006') def repo_upgrade_three(): return 'repo_three' @@ -140,11 +122,11 @@ def sc_upgrade_one(): return 'sc_one' with pytest.raises(MigrationIdAlreadyUsedError) as err_info_one: - @my_plugin.upgrade.source_config('2019.10.04') + @my_plugin.upgrade.source_config('2019.10.0004') def sc_upgrade_two(): return 'sc_two' - @my_plugin.upgrade.linked_source('2019.10.03') + @my_plugin.upgrade.linked_source('2019.10.3.000.0') def ls_upgrade_one(): return 'ls_one' @@ -158,7 +140,7 @@ def vs_upgrade_two(): return 'vs_two' with pytest.raises(MigrationIdAlreadyUsedError) as err_info_three: - @my_plugin.upgrade.snapshot('2019.10.01') + @my_plugin.upgrade.snapshot('2019.010.001') def snap_upgrade_one(): return 'snap_one' @@ -166,41 +148,44 @@ def snap_upgrade_one(): def snap_upgrade_two(): return 'snap_two' - assert my_plugin.upgrade.migration_id_list == ['2019.10.01', - '2019.10.02', - '2019.10.03', - '2019.10.04', - '2019.10.05', - '2019.10.06', + assert my_plugin.upgrade.migration_id_list == ['2019.10.1', + '2019.10.2', + '2019.10.3', + '2019.10.4', + '2019.10.5', + '2019.10.6', '2019.10.12'] - repo_one = my_plugin.upgrade.repository_id_to_impl['2019.10.01'] - repo_two = my_plugin.upgrade.repository_id_to_impl['2019.10.04'] - repo_three = my_plugin.upgrade.repository_id_to_impl['2019.10.06'] + repo_one = my_plugin.upgrade.repository_id_to_impl['2019.10.1'] + repo_two = my_plugin.upgrade.repository_id_to_impl['2019.10.4'] + repo_three = my_plugin.upgrade.repository_id_to_impl['2019.10.6'] assert repo_one == repo_upgrade_one assert repo_two == repo_upgrade_two assert repo_three == repo_upgrade_three - sc_one = my_plugin.upgrade.source_config_id_to_impl['2019.10.02'] + sc_one = my_plugin.upgrade.source_config_id_to_impl['2019.10.2'] assert sc_one == sc_upgrade_one - ls_one = my_plugin.upgrade.linked_source_id_to_impl['2019.10.03'] + ls_one = my_plugin.upgrade.linked_source_id_to_impl['2019.10.3'] assert ls_one == ls_upgrade_one - vs_two = my_plugin.upgrade.virtual_source_id_to_impl['2019.10.05'] + vs_two = my_plugin.upgrade.virtual_source_id_to_impl['2019.10.5'] assert vs_two == vs_upgrade_two snap_two = my_plugin.upgrade.snapshot_id_to_impl['2019.10.12'] assert snap_two == snap_upgrade_two assert err_info_one.value.message == ( - "The migration id '2019.10.04' used in the function" - " 'sc_upgrade_two' has been used by another migration.") + "The migration id '2019.10.0004' used in the function" + " 'sc_upgrade_two' has the same canonical form '2019.10.4'" + " as another migration.") assert err_info_two.value.message == ( "The migration id '2019.10.03' used in the function" - " 'vs_upgrade_one' has been used by another migration.") + " 'vs_upgrade_one' has the same canonical form '2019.10.3'" + " as another migration.") assert err_info_three.value.message == ( - "The migration id '2019.10.01' used in the function" - " 'snap_upgrade_one' has been used by another migration.") + "The migration id '2019.010.001' used in the function" + " 'snap_upgrade_one' has the same canonical form '2019.10.1'" + " as another migration.") diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index f7c85719..acd9249f 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "3f31378e575468296ad72d2328e65e878b7756d2b9bca44b0f8ced91cfd63cf1" + "sha256": "4d04a3aa2ea7ed3a60c5625c9204c9a75487c6ce189cffd6707d3d2fe101fe6e" }, "pipfile-spec": 6, "requires": {}, @@ -40,6 +40,7 @@ "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" ], + "index": "delphix", "version": "==7.0" }, "click-configfile": { @@ -47,6 +48,7 @@ "sha256:95beec13bee950e98f43c81dcdabef4f644091559ea66298f9dadf59351d90d1", "sha256:af2ae7123af57d850cd18edd915893e655b6b1bc30d1302fd040b1059bec073d" ], + "index": "delphix", "version": "==0.2.3" }, "configparser": { @@ -54,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version == '2.7'", + "markers": "python_version < '3'", "version": "==4.0.2" }, "contextlib2": { @@ -79,6 +81,7 @@ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" ], + "index": "delphix", "markers": null, "version": "==1.1.6" }, @@ -87,6 +90,7 @@ "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb", "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca" ], + "index": "delphix", "version": "==3.7.9" }, "functools32": { @@ -94,7 +98,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "idna": { @@ -117,6 +121,7 @@ "sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f", "sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de" ], + "index": "delphix", "version": "==2.10.3" }, "jsonschema": { @@ -124,6 +129,7 @@ "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163", "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a" ], + "index": "delphix", "version": "==3.2.0" }, "markupsafe": { @@ -201,6 +207,7 @@ "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" ], + "index": "delphix", "version": "==3.6.1" }, "pycodestyle": { @@ -239,6 +246,7 @@ "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" ], + "index": "delphix", "version": "==5.1.2" }, "requests": { @@ -246,6 +254,7 @@ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31" ], + "index": "delphix", "version": "==2.22.0" }, "scandir": { @@ -324,7 +333,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version == '2.7'", + "markers": "python_version < '3'", "version": "==4.0.2" }, "contextlib2": { @@ -370,19 +379,20 @@ "sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0", "sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025" ], + "index": "delphix", "version": "==4.5.4" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz", - "version": "== 1.1.0-internal-upgrade-002" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz", + "version": "== 1.1.0-internal-upgrade-003" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-002.tar.gz", - "version": "== 1.1.0-internal-upgrade-002" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-003.tar.gz", + "version": "== 1.1.0-internal-upgrade-003" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-002.tar.gz", - "version": "== 1.1.0-internal-upgrade-002" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-003.tar.gz", + "version": "== 1.1.0-internal-upgrade-003" }, "entrypoints": { "hashes": [ @@ -398,6 +408,7 @@ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" ], + "index": "delphix", "markers": null, "version": "==1.1.6" }, @@ -406,6 +417,7 @@ "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb", "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca" ], + "index": "delphix", "version": "==3.7.9" }, "funcsigs": { @@ -413,7 +425,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "functools32": { @@ -421,7 +433,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "futures": { @@ -436,6 +448,7 @@ "hashes": [ "sha256:66216f26b9d2c52e81808f3e674a6fb65d4bf719721394a1a9be926177e55fbe" ], + "index": "delphix", "version": "==0.9.7" }, "importlib-metadata": { @@ -451,6 +464,7 @@ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" ], + "index": "delphix", "version": "==4.3.21" }, "mccabe": { @@ -465,6 +479,7 @@ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" ], + "index": "delphix", "version": "==3.0.5" }, "more-itertools": { @@ -530,6 +545,7 @@ "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" ], + "index": "delphix", "version": "==4.6.6" }, "pytest-cov": { @@ -537,6 +553,7 @@ "sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b", "sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626" ], + "index": "delphix", "version": "==2.8.1" }, "scandir": { @@ -584,6 +601,7 @@ "sha256:02ace10a00fa2e36c7ebd1df2ead91dbfbd7989686dc4ccbdc549e95d19f5780", "sha256:6f94b6a176a7c114cfa6bad86d40f259bbe0f10cf2fa7f2f4b3596fc5802a41b" ], + "index": "delphix", "version": "==0.28.0" }, "zipp": { diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index a510424e..cdf9ed1c 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,18 +1,18 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-002.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-002.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-002.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-003.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-003.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version == '2.7' +configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' coverage==4.5.4 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -funcsigs==1.0.2 ; python_version < '3.3' -functools32==3.2.3.post2 ; python_version < '3' +funcsigs==1.0.2 ; python_version < '3.0' +functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 importlib-metadata==0.23 ; python_version < '3.8' diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index bb1d75bf..191e9c2e 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -4,12 +4,12 @@ certifi==2019.9.11 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version == '2.7' +configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3' +functools32==3.2.3.post2 ; python_version < '3.2' idna==2.8 importlib-metadata==0.23 ; python_version < '3.8' jinja2==2.10.3 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py index 8d2880ea..e3bc9bf0 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py @@ -7,6 +7,21 @@ import re +class SDKToolingError(Exception): + """ + SDKBuildError is one of the main errors that gets caught in cli.py. Errors + that are not related to the user input should raise this error. The + message from this exception is posted to logger.error. message will be the + first arg that is passed in (for any exception that is extending it). + """ + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(SDKToolingError, self).__init__(message) + + class UserError(Exception): """ UserError is the main error that gets caught in cli.py. The message from diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 0b3e1ae6..1b7edee1 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -39,13 +39,12 @@ class PluginImporter: issues with validation of module content and entry points- will save errors/warnings in a dict that callers can access. """ - validation_maps = load_validation_maps() - expected_staged_args_by_op = validation_maps['EXPECTED_STAGED_ARGS_BY_OP'] - expected_direct_args_by_op = validation_maps['EXPECTED_DIRECT_ARGS_BY_OP'] - required_methods_by_plugin_type = \ - validation_maps['REQUIRED_METHODS_BY_PLUGIN_TYPE'] - required_methods_description = \ - validation_maps['REQUIRED_METHODS_DESCRIPTION'] + v_maps = load_validation_maps() + expected_staged_args_by_op = v_maps['EXPECTED_STAGED_ARGS_BY_OP'] + expected_direct_args_by_op = v_maps['EXPECTED_DIRECT_ARGS_BY_OP'] + expected_upgrade_args = v_maps['EXPECTED_UPGRADE_ARGS'] + required_methods_by_plugin_type = v_maps['REQUIRED_METHODS_BY_PLUGIN_TYPE'] + required_methods_description = v_maps['REQUIRED_METHODS_DESCRIPTION'] def __init__(self, src_dir, @@ -212,15 +211,24 @@ def __check_for_undefined_names(src_dir): def __report_warnings_and_exceptions(warnings): """ Prints the warnings and errors that were found in the plugin code, if - the warnings dictionary contains the 'exception' key. + the warnings dictionary contains the 'sdk exception' key this means + there was an sdk error and we should throw the error as such. """ - if warnings and 'exception' in warnings: - exception_msg = MessageUtils.exception_msg(warnings) - exception_msg += '\n{}'.format(MessageUtils.warning_msg(warnings)) - raise exceptions.UserError( - '{}\n{} Warning(s). {} Error(s).'.format( - exception_msg, len(warnings['warning']), - len(warnings['exception']))) + if warnings: + final_message = '\n'.join( + filter(None, [ + MessageUtils.sdk_exception_msg(warnings), + MessageUtils.exception_msg(warnings), + MessageUtils.warning_msg(warnings), + '{} Warning(s). {} Error(s).'.format( + len(warnings['warning']), + len(warnings['exception']) + + len(warnings['sdk exception'])) + ])) + if warnings['sdk exception']: + raise exceptions.SDKToolingError(final_message) + elif warnings['exception']: + raise exceptions.UserError(final_message) def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): @@ -249,6 +257,20 @@ def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): queue.put({'exception': user_err}) except RuntimeError as rt_err: queue.put({'exception': rt_err}) + except Exception as err: + # + # We need to figure out if this is an error that was raised inside the + # wrappers which would mean that it is a user error. Otherwise we + # should still queue the error but specify that it's not a user error. + # + parent_class_list = [base.__name__ for base in err.__class__.__bases__] + if 'PlatformError' in parent_class_list: + # This is a user error + error = exceptions.UserError(err.message) + queue.put({'exception': error}) + else: + error = exceptions.SDKToolingError(err.message) + queue.put({'sdk exception': error}) finally: sys.path.remove(src_dir) @@ -333,7 +355,9 @@ def _validate_and_get_manifest(module, module_content, entry_point): 'hasVirtualStatus': bool(plugin_object.virtual.status_impl), 'hasInitialize': - bool(plugin_object.virtual.initialize_impl) + bool(plugin_object.virtual.initialize_impl), + 'migrationIdList': + plugin_object.upgrade.migration_id_list } return manifest @@ -354,24 +378,58 @@ def _validate_named_args(module_content, entry_point, plugin_type): # us the name of the plugin implemntation method name. That name # is useful in looking up named arguments expected and what is # actually in the plugin code. And plugin_op_type can be, for e.g. - # LinkedOperations, DiscoveryOperations, VirtualOperations + # LinkedOperations, DiscoveryOperations, VirtualOperations. + # UpgradeOperations will need to be handled separately because it's + # attributes are different. # plugin_op_type = plugin_attrib.__class__.__name__ if plugin_op_type == 'UpgradeOperations': # - # For now just ignore all upgrade operations because the fields - # aren't all functions. + # Handle the upgrade operations separately because they aren't + # just functions. # + warnings.extend(_check_upgrade_operations(plugin_attrib)) continue - for op_name_key, op_name in plugin_attrib.__dict__.items(): + for op_name_key, op_name in vars(plugin_attrib).items(): if op_name is None: continue - actual_args = inspect.getargspec(op_name) + actual = inspect.getargspec(op_name) warnings.extend( _check_args(method_name=op_name.__name__, expected_args=_lookup_expected_args( plugin_type, plugin_op_type, op_name_key), - actual_args=actual_args.args)) + actual_args=actual.args)) + + return warnings + + +def _check_upgrade_operations(upgrade_operations): + """ + Does named argument validation of all functions in dictionaries by looping + first through all the attributes in the UpgradeOperations for this plugin. + Any attributes that are not dictionaries that map migration_id -> + upgrade_function are skipped. We then loop through every key/value pair + of each of the dictionaries and validate that the argument in the defined + function has the expected name. + """ + warnings = [] + + for attribute_name, attribute in vars(upgrade_operations).items(): + if attribute_name not in PluginImporter.expected_upgrade_args.keys(): + # Skip if not in one of the operation dicts we store functions in. + continue + # + # If the attribute_name was in the expected upgrade dicts then we know + # it is a dict containing migration id -> upgrade function that we can + # iterate on. + # + for migration_id, migration_func in attribute.items(): + actual = inspect.getargspec(migration_func).args + expected = PluginImporter.expected_upgrade_args[attribute_name] + warnings.extend( + _check_args(method_name=migration_func.__name__, + expected_args=expected, + actual_args=actual)) return warnings diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index cfc77c99..541a1066 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -26,7 +26,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-upgrade-002 +package_version = 1.1.0-internal-upgrade-003 virtualization_api_version = 1.1.0 distribution_name = dvp-tools package_author = Delphix diff --git a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py b/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py index deb2da64..ef2132ad 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py @@ -44,17 +44,27 @@ class MessageUtils: Defines helpers methods to format warning and exception messages. """ @staticmethod - def exception_msg(exceptions): - exception_msg = '\n'.join( + def sdk_exception_msg(warnings): + sdk_exception_msg = '\n'.join([ + MessageUtils.__format_msg('SDK Error', ex) + for ex in warnings['sdk exception'] + ]) + return sdk_exception_msg + + @staticmethod + def exception_msg(warnings): + exception_msg = '\n'.join([ MessageUtils.__format_msg('Error', ex) - for ex in exceptions['exception']) + for ex in warnings['exception'] + ]) return exception_msg @staticmethod def warning_msg(warnings): - warning_msg = '\n'.join( + warning_msg = '\n'.join([ MessageUtils.__format_msg('Warning', warning) - for warning in warnings['warning']) + for warning in warnings['warning'] + ]) return warning_msg @staticmethod diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml index 2d5b0f99..063877ad 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml @@ -139,23 +139,35 @@ EXPECTED_DIRECT_ARGS_BY_OP: - virtual_source - repository +EXPECTED_UPGRADE_ARGS: + repository_id_to_impl: + - old_repository + source_config_id_to_impl: + - old_source_config + linked_source_id_to_impl: + - old_linked_source + virtual_source_id_to_impl: + - old_virtual_source + snapshot_id_to_impl: + - old_snapshot + REQUIRED_METHODS_BY_PLUGIN_TYPE: DIRECT: - hasRepositoryDiscovery: discovery.repository(), - hasSourceConfigDiscovery: discovery.source_config(), - hasLinkedPostSnapshot: linked.post_snapshot(), - hasVirtualConfigure: virtual.configure(), - hasVirtualReconfigure: virtual.reconfigure(), - hasVirtualPostSnapshot: virtual.post_snapshot(), + hasRepositoryDiscovery: discovery.repository() + hasSourceConfigDiscovery: discovery.source_config() + hasLinkedPostSnapshot: linked.post_snapshot() + hasVirtualConfigure: virtual.configure() + hasVirtualReconfigure: virtual.reconfigure() + hasVirtualPostSnapshot: virtual.post_snapshot() hasVirtualMountSpecification: virtual.mount_specification() STAGED: - hasRepositoryDiscovery: discovery.repository(), - hasSourceConfigDiscovery: discovery.source_config(), - hasLinkedPostSnapshot: linked.post_snapshot(), - hasLinkedMountSpecification: linked.mount_specification(), - hasVirtualConfigure: virtual.configure(), - hasVirtualReconfigure: virtual.reconfigure(), - hasVirtualPostSnapshot: virtual.post_snapshot(), + hasRepositoryDiscovery: discovery.repository() + hasSourceConfigDiscovery: discovery.source_config() + hasLinkedPostSnapshot: linked.post_snapshot() + hasLinkedMountSpecification: linked.mount_specification() + hasVirtualConfigure: virtual.configure() + hasVirtualReconfigure: virtual.reconfigure() + hasVirtualPostSnapshot: virtual.post_snapshot() hasVirtualMountSpecification: virtual.mount_specification() REQUIRED_METHODS_DESCRIPTION: diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index 4b442ff6..907dc74a 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -46,18 +46,6 @@ def plugin_config_filename(): return 'plugin_config.yml' -@pytest.fixture -def fake_staged_plugin_config(): - return os.path.join(os.path.dirname(__file__), - 'fake_plugin/staged/plugin_config.yml') - - -@pytest.fixture -def fake_direct_plugin_config(): - return os.path.join(os.path.dirname(__file__), - 'fake_plugin/direct/plugin_config.yml') - - @pytest.fixture def src_dir(tmpdir, src_dirname): """ @@ -181,38 +169,55 @@ def artifact_file_created(): @pytest.fixture -def plugin_config_content(plugin_id, plugin_name, src_dir, schema_file, - language, manual_discovery, plugin_type): +def plugin_config_content( + plugin_id, + plugin_name, + version, + language, + host_types, + plugin_type, + entry_point, + src_dir, + schema_file, + manual_discovery, +): """ This fixutre creates the dict expected in the properties yaml file the customer must provide for the build and compile commands. """ config = { - 'version': '2.0.0', - 'hostTypes': ['UNIX'], - 'entryPoint': 'python_vfiles:vfiles', 'defaultLocale': 'en-us', 'rootSquashEnabled': True, } - if id: + + if plugin_id: config['id'] = plugin_id if plugin_name: config['name'] = plugin_name + if version: + config['version'] = version + + if language: + config['language'] = language + + if host_types: + config['hostTypes'] = host_types + if plugin_type: config['pluginType'] = plugin_type + if entry_point: + config['entryPoint'] = entry_point + if src_dir: config['srcDir'] = src_dir if schema_file: config['schemaFile'] = schema_file - if language: - config['language'] = language - - # Here we do is not None check because we will be passing in + # Here we do an 'is not None' check because we will be passing in # booleans as a parameter in tests. if manual_discovery is not None: config['manualDiscovery'] = manual_discovery @@ -221,64 +226,190 @@ def plugin_config_content(plugin_id, plugin_name, src_dir, schema_file, @pytest.fixture -def plugin_entry_point_name(): +def plugin_id(): + return '16bef554-9470-11e9-b2e3-8c8590d4a42c' + + +@pytest.fixture +def plugin_name(): + return 'python_vfiles' + + +@pytest.fixture +def version(): + return '2.0.0' + + +@pytest.fixture +def language(): + return 'PYTHON27' + + +@pytest.fixture +def host_types(): + return ['UNIX'] + + +@pytest.fixture +def plugin_type(): + return util_classes.DIRECT_TYPE + + +@pytest.fixture +def entry_point(entry_point_module, entry_point_object): + return '{}:{}'.format(entry_point_module, entry_point_object) + + +@pytest.fixture +def entry_point_module(): + return 'python_vfiles' + + +@pytest.fixture +def entry_point_object(): return 'vfiles' @pytest.fixture -def plugin_module_content(plugin_entry_point_name): +def manual_discovery(): + return None + + +@pytest.fixture +def artifact_manual_discovery(): + return True + + +@pytest.fixture +def plugin_module_content(entry_point_object, discovery_operation, + linked_operation, virtual_operation, + upgrade_operation): class Object(object): pass - discovery = Object() - discovery.repository_impl = True - discovery.source_config_impl = True - - linked = Object() - linked.pre_snapshot_impl = True - linked.post_snapshot_impl = True - linked.start_staging_impl = True - linked.stop_staging_impl = False - linked.status_impl = True - linked.worker_impl = False - linked.mount_specification_impl = True - - virtual = Object() - virtual.configure_impl = True - virtual.unconfigure_impl = False - virtual.reconfigure_impl = True - virtual.start_impl = True - virtual.stop_impl = False - virtual.pre_snapshot_impl = True - virtual.post_snapshot_impl = True - virtual.mount_specification_impl = True - virtual.status_impl = False - virtual.initialize_impl = False - plugin_object = Object() - plugin_object.discovery = discovery - plugin_object.linked = linked - plugin_object.virtual = virtual + plugin_object.discovery = discovery_operation + plugin_object.linked = linked_operation + plugin_object.virtual = virtual_operation + plugin_object.upgrade = upgrade_operation plugin_module = Object() - setattr(plugin_module, plugin_entry_point_name, plugin_object) + setattr(plugin_module, entry_point_object, plugin_object) return plugin_module @pytest.fixture -def plugin_manifest(): +def discovery_operation(): + class DiscoveryOperations(object): + pass + + discovery = DiscoveryOperations() + + def repository_discovery(source_connection): + return None + + def source_config_discovery(source_connection, repository): + return None + + discovery.repository_impl = repository_discovery + discovery.source_config_impl = source_config_discovery + + return discovery + + +@pytest.fixture +def linked_operation(): + class LinkedOperations(object): + pass + + linked = LinkedOperations() + + def pre_snapshot(direct_source, repository, source_config): + pass + + def post_snapshot(direct_source, repository, source_config): + return None + + linked.pre_snapshot_impl = pre_snapshot + linked.post_snapshot_impl = post_snapshot + linked.start_staging_impl = None + linked.stop_staging_impl = None + linked.status_impl = None + linked.worker_impl = None + linked.mount_specification_impl = None + + return linked + + +@pytest.fixture +def virtual_operation(): + class VirtualOperations(object): + pass + + virtual = VirtualOperations() + + def configure(virtual_source, repository, snapshot): + return None + + def reconfigure(virtual_source, repository, source_config, snapshot): + pass + + def start(virtual_source, repository, source_config): + pass + + def pre_snapshot(virtual_source, repository, source_config): + pass + + def post_snapshot(virtual_source, repository, source_config): + return None + + def mount_specification(virtual_source, repository): + return None + + virtual.configure_impl = configure + virtual.unconfigure_impl = None + virtual.reconfigure_impl = reconfigure + virtual.start_impl = start + virtual.stop_impl = None + virtual.pre_snapshot_impl = pre_snapshot + virtual.post_snapshot_impl = post_snapshot + virtual.mount_specification_impl = mount_specification + virtual.status_impl = None + virtual.initialize_impl = None + + return virtual + + +@pytest.fixture +def upgrade_operation(): + class UpgradeOperation(object): + pass + + upgrade = UpgradeOperation() + upgrade.migration_id_list = [] + upgrade.repository_id_to_impl = {} + upgrade.source_config_id_to_impl = {} + upgrade.linked_source_id_to_impl = {} + upgrade.virtual_source_id_to_impl = {} + upgrade.snapshot_id_to_impl = {} + + return upgrade + + +@pytest.fixture +def plugin_manifest(upgrade_operation): manifest = { 'type': 'PluginManifest', 'hasRepositoryDiscovery': True, 'hasSourceConfigDiscovery': True, 'hasLinkedPreSnapshot': True, 'hasLinkedPostSnapshot': True, - 'hasLinkedStartStaging': True, + 'hasLinkedStartStaging': False, 'hasLinkedStopStaging': False, - 'hasLinkedStatus': True, + 'hasLinkedStatus': False, 'hasLinkedWorker': False, - 'hasLinkedMountSpecification': True, + 'hasLinkedMountSpecification': False, 'hasVirtualConfigure': True, 'hasVirtualUnconfigure': False, 'hasVirtualReconfigure': True, @@ -288,41 +419,12 @@ def plugin_manifest(): 'hasVirtualPostSnapshot': True, 'hasVirtualMountSpecification': True, 'hasVirtualStatus': False, - 'hasInitialize': False + 'hasInitialize': False, + 'migrationIdList': upgrade_operation.migration_id_list } return manifest -@pytest.fixture -def plugin_id(): - return '16bef554-9470-11e9-b2e3-8c8590d4a42c' - - -@pytest.fixture -def plugin_name(): - return 'python_vfiles' - - -@pytest.fixture -def language(): - return 'PYTHON27' - - -@pytest.fixture -def manual_discovery(): - return None - - -@pytest.fixture -def artifact_manual_discovery(): - return True - - -@pytest.fixture -def plugin_type(): - return util_classes.DIRECT_TYPE - - @pytest.fixture def schema_content(repository_definition, source_config_definition, virtual_source_definition, linked_source_definition, diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py new file mode 100644 index 00000000..cd28b92c --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py @@ -0,0 +1,19 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +class ArbitraryError(Exception): + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(ArbitraryError, self).__init__(message) + + +raise ArbitraryError('Got an arbitrary non-platforms error for testing.') diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py new file mode 100644 index 00000000..2688405b --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from __future__ import print_function + +import logging + +from dlpx.virtualization.platform import Plugin + +logger = logging.getLogger() +logger.setLevel(logging.NOTSET) + +plugin = Plugin() + + +@plugin.discovery.repository() +def repository_discovery(source_connection): + return None + + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + return None + + +# Defining the decorator as not a function +@plugin.linked.pre_snapshot() +class PreSnapshot(object): + pass diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py new file mode 100644 index 00000000..6b99f58c --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py @@ -0,0 +1,12 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository('1234.0.0.') +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py new file mode 100644 index 00000000..6ea3add1 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py @@ -0,0 +1,12 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository(['testing', 'out', 'validation']) +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py new file mode 100644 index 00000000..5f8196d1 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository('5.4.0.1') +def repo_upgrade(old_repository): + return old_repository + + +@plugin.upgrade.snapshot('5.04.000.01') +def snap_upgrade(old_snapshot): + return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/src/python_vfiles.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/multiple_warnings.py similarity index 93% rename from tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/src/python_vfiles.py rename to tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/multiple_warnings.py index d5a00513..c0a031bc 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/src/python_vfiles.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/multiple_warnings.py @@ -71,3 +71,8 @@ def stop(repository, source_config, virtual_source): @vfiles.virtual.unconfigure() def unconfigure(repository, source_config, virtual_source): pass + + +@vfiles.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py new file mode 100644 index 00000000..59fbcc0c --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.discovery.repository() +def repository_discovery(source_connection): + return None + + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + return None + + +# Defining another function with the same decorator +@plugin.discovery.source_config() +def source_config_discovery_two(source_connection, repository): + return None diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml deleted file mode 100644 index 6ddc6cd1..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml +++ /dev/null @@ -1,11 +0,0 @@ -id: 16bef554-9470-11e9-b2e3-8c8590d4a42c -name: Unstructured Files using Python -version: 2.0.0 -hostTypes: - - UNIX -entryPoint: python_vfiles:vfiles -srcDir: src/ -schemaFile: ./schema.json -manualDiscovery: true -pluginType: DIRECT -language: PYTHON27 diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json deleted file mode 100644 index ba2ebcc6..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "repositoryDefinition": { - "type": "object", - "properties": { - "name": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["name"] - }, - "sourceConfigDefinition": { - "type": "object", - "required": ["name", "path"], - "additionalProperties": false, - "properties": { - "name": { "type": "string" }, - "path": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["path"] - }, - "virtualSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties" : { - "path": { "type": "string" } - } - }, - "linkedSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties" : {} - }, - "snapshotDefinition": { - "type" : "object", - "additionalProperties" : false, - "properties" : {} - } -} diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py new file mode 100644 index 00000000..baf0d855 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py @@ -0,0 +1,84 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin, Status + +direct = Plugin() + + +@direct.discovery.repository() +def repository_discovery(source_connection): + return [] + + +@direct.discovery.source_config() +def source_config_discovery(source_connection, repository): + return [] + + +@direct.linked.pre_snapshot() +def direct_pre_snapshot(direct_source, repository, source_config): + return + + +@direct.linked.post_snapshot() +def direct_post_snapshot(direct_source, repository, source_config): + return None + + +@direct.virtual.configure() +def configure(virtual_source, repository, snapshot): + path = virtual_source.parameters.path + name = "VDB mounted to " + path + return None + + +@direct.virtual.mount_specification() +def mount_specification(repository, virtual_source): + return None + + +@direct.virtual.post_snapshot() +def postSnapshot(repository, source_config, virtual_source): + return None + + +@direct.virtual.pre_snapshot() +def preSnapshot(repository, source_config, virtual_source): + pass + + +@direct.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + pass + + +@direct.virtual.start() +def start(repository, source_config, virtual_source): + pass + + +@direct.virtual.status() +def status(repository, source_config, virtual_source): + return Status.ACTIVE + + +@direct.virtual.stop() +def stop(repository, source_config, virtual_source): + pass + + +@direct.virtual.unconfigure() +def unconfigure(repository, source_config, virtual_source): + pass + + +@direct.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository + + +@direct.upgrade.snapshot('2019.11.30') +def repo_upgrade(old_snapshot): + return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py new file mode 100644 index 00000000..68ecd5b2 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py @@ -0,0 +1,100 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin, Status + +direct = Plugin() + + +@direct.discovery.repository() +def repository_discovery(source_connection): + return [] + + +@direct.discovery.source_config() +def source_config_discovery(source_connection, repository): + return [] + + +@direct.linked.pre_snapshot() +def direct_pre_snapshot(direct_source, repository, source_config): + return + + +@direct.linked.post_snapshot() +def direct_post_snapshot(direct_source, repository, source_config): + return None + + +@direct.virtual.configure() +def configure(virtual_source, repository, snapshot): + return None + + +@direct.virtual.mount_specification() +def mount_specification(repository, virtual_source): + return None + + +@direct.virtual.post_snapshot() +def postSnapshot(repository, source_config, virtual_source): + return None + + +@direct.virtual.pre_snapshot() +def preSnapshot(repository, source_config, virtual_source): + pass + + +@direct.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + pass + + +@direct.virtual.start() +def start(repository, source_config, virtual_source): + pass + + +@direct.virtual.status() +def status(repository, source_config, virtual_source): + return Status.ACTIVE + + +@direct.virtual.stop() +def stop(repository, source_config, virtual_source): + pass + + +@direct.virtual.unconfigure() +def unconfigure(repository, source_config, virtual_source): + pass + + +@direct.upgrade.repository('2019.11.20') +def repo_upgrade(old_repository): + return old_repository + + +@direct.upgrade.source_config('2019.11.22') +def sc_upgrade(old_source_config): + return old_source_config + + +# Added second arg to check if length arg check fails. +@direct.upgrade.linked_source('2019.11.24') +def ls_upgrade(old_linked, old_source): + return old_linked + + +# Renamed old_virtual_source to old_linked_source to test named arg checks. +@direct.upgrade.virtual_source('2019.11.26') +def ls_upgrade(old_linked_source): + return old_linked_source + + +# Renamed old_snapshot to bad_input_name to test named arg checks. +@direct.upgrade.snapshot('2019.11.30') +def snap_upgrade(bad_input_name): + return bad_input_name diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/src/python_staged.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/multiple_warnings.py similarity index 89% rename from tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/src/python_staged.py rename to tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/multiple_warnings.py index acd32437..094c1dde 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/src/python_staged.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/multiple_warnings.py @@ -17,12 +17,12 @@ # Renamed source_connection to connection to test if named arg check detects. @staged.discovery.repository() def repository_discovery(connection): - return None + return [] @staged.discovery.source_config() def source_config_discovery(source_connection, repository): - return None + return [] @staged.linked.mount_specification() @@ -67,6 +67,11 @@ def configure(virtual_source, repository, snapshot): return None +@staged.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + return None + + # Removed virtual.mount_specification for test validation. @@ -86,8 +91,11 @@ def start(repository, source_config, virtual_source): # Added snapshot parameter to check if arg check fails. - - @staged.virtual.stop() def stop(repository, source_config, virtual_source, snapshot): pass + + +@staged.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml deleted file mode 100644 index 1742b86b..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# -id: 16bef554-9470-11e9-b2e3-8c8590d4a42c -name: Staged Toolkit using Python -version: 1.0.0 -hostTypes: - - UNIX -entryPoint: python_staged:staged -srcDir: src/ -schemaFile: ./schema.json -manualDiscovery: true -pluginType: STAGED -language: PYTHON27 diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json deleted file mode 100644 index 7c7d10ea..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "repositoryDefinition": { - "type": "object", - "properties": { - "name": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["name"] - }, - "sourceConfigDefinition": { - "type": "object", - "required": ["name"], - "additionalProperties": false, - "properties": { - "name": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["name"] - }, - "virtualSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties" : { - "path": { "type": "string" } - }, - "required": ["path"] - }, - "linkedSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties": { - "path": { "type": "string" } - } - }, - "snapshotDefinition": { - "type" : "object", - "additionalProperties" : false, - "properties" : {} - } -} diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py new file mode 100644 index 00000000..52da22cb --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py @@ -0,0 +1,101 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from __future__ import print_function + +import logging + +from dlpx.virtualization.platform import Plugin + +logger = logging.getLogger() +logger.setLevel(logging.NOTSET) + +staged = Plugin() + + +@staged.discovery.repository() +def repository_discovery(source_connection): + return None + + +@staged.discovery.source_config() +def source_config_discovery(source_connection, repository): + return None + + +@staged.linked.mount_specification() +def staged_mount_specification(staged_source, repository): + return None + + +@staged.linked.pre_snapshot() +def staged_pre_snapshot(repository, source_config, staged_source, + snapshot_parameters): + pass + + +@staged.linked.post_snapshot() +def staged_post_snapshot(repository, source_config, staged_source, + snapshot_parameters): + return None + + +@staged.linked.start_staging() +def start_staging(repository, source_config, staged_source): + pass + + +@staged.linked.stop_staging() +def stop_staging(repository, source_config, staged_source): + pass + + +@staged.linked.status() +def staged_status(staged_source, repository, source_config): + return None + + +@staged.linked.worker() +def staged_worker(repository, source_config, staged_source): + pass + + +@staged.virtual.configure() +def configure(virtual_source, repository, snapshot): + return None + + +@staged.virtual.mount_specification() +def mount_specification(virtual_source, repository): + return None + + +@staged.virtual.pre_snapshot() +def pre_snapshot(repository, source_config, virtual_source): + pass + + +@staged.virtual.post_snapshot() +def post_snapshot(repository, source_config, virtual_source): + return None + + +@staged.virtual.start() +def start(repository, source_config, virtual_source): + pass + + +@staged.virtual.stop() +def stop(repository, source_config, virtual_source): + pass + + +@staged.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository + + +@staged.upgrade.snapshot('2019.11.30') +def repo_upgrade(old_snapshot): + return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index e7fe5699..bdea58bc 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -8,7 +8,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-upgrade-002' + assert package_util.get_version() == '1.1.0-internal-upgrade-003' @staticmethod def test_get_virtualization_api_version(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py index 37a64f50..104af635 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py @@ -13,11 +13,11 @@ class TestPluginImporter: @staticmethod @mock.patch('importlib.import_module') def test_get_plugin_manifest(mock_import, src_dir, plugin_type, - plugin_name, plugin_entry_point_name, + entry_point_module, entry_point_object, plugin_module_content, plugin_manifest): mock_import.return_value = plugin_module_content - importer = PluginImporter(src_dir, plugin_name, - plugin_entry_point_name, plugin_type, False) + importer = PluginImporter(src_dir, entry_point_module, + entry_point_object, plugin_type, False) manifest, warnings = importer.import_plugin() assert not warnings @@ -26,15 +26,15 @@ def test_get_plugin_manifest(mock_import, src_dir, plugin_type, @staticmethod @mock.patch('importlib.import_module') def test_plugin_module_content_none(mock_import, src_dir, plugin_type, - plugin_name, plugin_entry_point_name): + entry_point_module, + entry_point_object): mock_import.return_value = None manifest = {} warnings = defaultdict(list) with pytest.raises(exceptions.UserError) as err_info: - importer = PluginImporter(src_dir, plugin_name, - plugin_entry_point_name, plugin_type, - False) + importer = PluginImporter(src_dir, entry_point_module, + entry_point_object, plugin_type, False) manifest, warnings = importer.import_plugin() message = str(err_info) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index 2d1fe09b..1067da7c 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -4,8 +4,6 @@ import json import os -import uuid -from collections import OrderedDict import mock import pytest @@ -15,14 +13,12 @@ @pytest.fixture -def plugin_config_file(tmpdir): - return os.path.join(tmpdir.strpath, 'plugin_config.yml') - - -@pytest.fixture -def src_dir(tmpdir): - tmpdir.mkdir('src') - return os.path.join(tmpdir.strpath, 'src') +def test_src_dir(plugin_type): + """ + This fixture gets the path of the fake plugin src files used for testing + """ + return os.path.join(os.path.dirname(__file__), 'fake_plugin', + plugin_type.lower()) class TestPluginValidator: @@ -30,16 +26,8 @@ class TestPluginValidator: @pytest.mark.parametrize( 'schema_content', ['{}\nNOT JSON'.format(json.dumps({'random': 'json'}))]) - def test_plugin_bad_schema(plugin_config_file, schema_file): - plugin_config_content = OrderedDict([ - ('name', 'staged'.encode('utf-8')), - ('prettyName', 'StagedPlugin'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) + def test_plugin_bad_schema(plugin_config_file, plugin_config_content, + schema_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, schema_file, @@ -52,6 +40,7 @@ def test_plugin_bad_schema(plugin_config_file, schema_file): ' (char 19 - 27)'.format(schema_file)) in message @staticmethod + @pytest.mark.parametrize('plugin_config_file', ['/dir/plugin_config.yml']) def test_plugin_bad_config_file(plugin_config_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, @@ -69,17 +58,8 @@ def test_plugin_bad_config_file(plugin_config_file): @mock.patch.object(PluginValidator, '_PluginValidator__import_plugin', return_value=({}, None)) - def test_plugin_valid_content(mock_import_plugin, mock_relative_path, - src_dir, plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + def test_plugin_valid_content(mock_import_plugin, src_dir, + plugin_config_file, plugin_config_content): validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) @@ -88,16 +68,8 @@ def test_plugin_valid_content(mock_import_plugin, mock_relative_path, mock_import_plugin.assert_called() @staticmethod - def test_plugin_missing_field(plugin_config_file): - plugin_config_content = OrderedDict([ - ('name', 'staged'.encode('utf-8')), ('version', '0.1.0'), - ('language', 'PYTHON27'), ('hostTypes', ['UNIX']), - ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize('src_dir', [None]) + def test_plugin_missing_field(plugin_config_file, plugin_config_content): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, @@ -111,22 +83,12 @@ def test_plugin_missing_field(plugin_config_file): @mock.patch.object(PluginValidator, '_PluginValidator__import_plugin', return_value=({}, None)) - @pytest.mark.parametrize('version, expected', [ - pytest.param('xxx', "'xxx' does not match"), - pytest.param('1.0.0', None), - pytest.param('1.0.0_HF', None) - ]) - def test_plugin_version_format(mock_import_plugin, mock_path_is_relative, - src_dir, plugin_config_file, version, + @pytest.mark.parametrize('version,expected', + [('xxx', "'xxx' does not match"), ('1.0.0', None), + ('1.0.0_HF', None)]) + def test_plugin_version_format(mock_import_plugin, src_dir, + plugin_config_file, plugin_config_content, expected): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', version), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) try: validator = PluginValidator.from_config_content( @@ -143,28 +105,17 @@ def test_plugin_version_format(mock_import_plugin, mock_path_is_relative, @mock.patch.object(PluginValidator, '_PluginValidator__import_plugin', return_value=({}, None)) - @pytest.mark.parametrize('entry_point, expected', [ - pytest.param('staged_plugin', "'staged_plugin' does not match"), - pytest.param(':staged_plugin', "':staged_plugin' does not match"), - pytest.param('staged:', "'staged:' does not match"), - pytest.param('staged_plugin::staged', - "'staged_plugin::staged' does not match"), - pytest.param(':staged_plugin:staged:', - "':staged_plugin:staged:' does not match"), - pytest.param('staged_plugin:staged', None) - ]) - def test_plugin_entry_point(mock_import_plugin, mock_relative_path, - src_dir, plugin_config_file, entry_point, + @pytest.mark.parametrize( + 'entry_point,expected', + [('staged_plugin', "'staged_plugin' does not match"), + (':staged_plugin', "':staged_plugin' does not match"), + ('staged:', "'staged:' does not match"), + ('staged_plugin::staged', "'staged_plugin::staged' does not match"), + (':staged_plugin:staged:', "':staged_plugin:staged:' does not match"), + ('staged_plugin:staged', None)]) + def test_plugin_entry_point(mock_import_plugin, src_dir, + plugin_config_file, plugin_config_content, expected): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '1.0.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', entry_point.encode('utf-8')), ('srcDir', src_dir), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, @@ -176,16 +127,10 @@ def test_plugin_entry_point(mock_import_plugin, mock_relative_path, assert expected in message @staticmethod - def test_plugin_additional_properties(src_dir, plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '1.0.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('unknown_key', 'unknown_value'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) + def test_plugin_additional_properties(src_dir, plugin_config_file, + plugin_config_content): + # Adding an unknown key + plugin_config_content['unknown_key'] = 'unknown_value' try: validator = PluginValidator.from_config_content( @@ -194,20 +139,14 @@ def test_plugin_additional_properties(src_dir, plugin_config_file): validator.validate() except exceptions.SchemaValidationError as err_info: message = err_info.message - assert "Additional properties are not allowed " \ - "('unknown_key' was unexpected)" in message + assert ("Additional properties are not allowed" + " ('unknown_key' was unexpected)" in message) @staticmethod - def test_multiple_validation_errors(plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['xxx']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize('host_types', [['xxx']]) + @pytest.mark.parametrize('src_dir', [None]) + def test_multiple_validation_errors(plugin_config_file, + plugin_config_content): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, @@ -217,65 +156,20 @@ def test_multiple_validation_errors(plugin_config_file): assert "'srcDir' is a required property" in message assert "'xxx' is not one of ['UNIX', 'WINDOWS']" in message - @staticmethod - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_staged_plugin(mock_file_util, fake_staged_plugin_config): - src_dir = os.path.dirname(fake_staged_plugin_config) - mock_file_util.return_value = os.path.join(src_dir, 'src/') - - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(fake_staged_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() - - message = err_info.value.message - assert validator.result.warnings.items() > 0 - assert 'Named argument mismatch in method' in message - assert 'Number of arguments do not match' in message - assert 'Implementation missing for required method' in message - - @staticmethod - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_direct_plugin(mock_file_util, fake_direct_plugin_config): - src_dir = os.path.dirname(fake_direct_plugin_config) - mock_file_util.return_value = os.path.join(src_dir, 'src/') - - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(fake_direct_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() - - message = err_info.value.message - assert validator.result.warnings.items() > 0 - assert 'Named argument mismatch in method' in message - assert 'Number of arguments do not match' in message - assert 'Implementation missing for required method' in message - @staticmethod @mock.patch('os.path.isabs', return_value=False) @mock.patch.object(PluginValidator, '_PluginValidator__import_plugin', return_value=({}, None)) - @pytest.mark.parametrize('plugin_id , expected', [ - pytest.param('Staged_plugin', "'Staged_plugin' does not match"), - pytest.param('staged_Plugin', "'staged_Plugin' does not match"), - pytest.param('STAGED', "'STAGED' does not match"), - pytest.param('E3b69c61-4c30-44f7-92c0-504c8388b91e', None), - pytest.param('e3b69c61-4c30-44f7-92c0-504c8388b91e', None) - ]) - def test_plugin_id(mock_import_plugin, mock_relative_path, src_dir, - plugin_config_file, plugin_id, expected): - plugin_config_content = OrderedDict([ - ('id', plugin_id.encode('utf-8')), ('name', 'python_vfiles'), - ('version', '1.0.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize( + 'plugin_id , expected', + [('Staged_plugin', "'Staged_plugin' does not match"), + ('staged_Plugin', "'staged_Plugin' does not match"), + ('STAGED', "'STAGED' does not match"), + ('E3b69c61-4c30-44f7-92c0-504c8388b91e', None), + ('e3b69c61-4c30-44f7-92c0-504c8388b91e', None)]) + def test_plugin_id(mock_import_plugin, src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, @@ -289,15 +183,8 @@ def test_plugin_id(mock_import_plugin, mock_relative_path, src_dir, @staticmethod @pytest.mark.parametrize('validation_mode', [ValidationMode.INFO, ValidationMode.WARNING]) - def test_plugin_info_warn_mode(plugin_config_file, validation_mode): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) + def test_plugin_info_warn_mode(plugin_config_file, plugin_config_content, + validation_mode): err_info = None try: validator = PluginValidator.from_config_content( @@ -308,3 +195,117 @@ def test_plugin_info_warn_mode(plugin_config_file, validation_mode): err_info = e assert err_info is None + + @staticmethod + @pytest.mark.parametrize('entry_point,plugin_type', + [('successful:staged', 'STAGED'), + ('successful:direct', 'DIRECT')]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_successful_validation(mock_file_util, plugin_config_file, + test_src_dir): + mock_file_util.return_value = test_src_dir + + validator = PluginValidator(plugin_config_file, + util_classes.PLUGIN_CONFIG_SCHEMA, + ValidationMode.ERROR, True) + validator.validate() + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,plugin_type,expected_errors', + [('multiple_warnings:staged', 'STAGED', [ + 'Error: Named argument mismatch in method repository_discovery', + 'Error: Number of arguments do not match in method stop', + 'Error: Named argument mismatch in method stop', + 'Warning: Implementation missing for required method' + ' virtual.mount_specification().', '1 Warning(s). 3 Error(s).' + ]), + ('multiple_warnings:vfiles', 'DIRECT', [ + 'Error: Number of arguments do not match in method status', + 'Error: Named argument mismatch in method status', + 'Warning: Implementation missing for required method' + ' virtual.reconfigure().', '1 Warning(s). 2 Error(s).' + ])]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_multiple_warnings(mock_file_util, plugin_config_file, + test_src_dir, expected_errors): + mock_file_util.return_value = test_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + validator = PluginValidator(plugin_config_file, + util_classes.PLUGIN_CONFIG_SCHEMA, + ValidationMode.ERROR, True) + validator.validate() + + message = err_info.value.message + for error in expected_errors: + assert error in message + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,expected_errors', [('upgrade_warnings:direct', [ + 'Error: Named argument mismatch in method snap_upgrade.', + 'Error: Number of arguments do not match in method ls_upgrade.', + 'Error: Named argument mismatch in method ls_upgrade.', + 'Error: Named argument mismatch in method ls_upgrade.', + '0 Warning(s). 4 Error(s).' + ])]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_upgrade_warnings(mock_file_util, plugin_config_file, test_src_dir, + expected_errors): + mock_file_util.return_value = test_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + validator = PluginValidator(plugin_config_file, + util_classes.PLUGIN_CONFIG_SCHEMA, + ValidationMode.ERROR, True) + validator.validate() + + message = err_info.value.message + for error in expected_errors: + assert error in message + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,expected_error', + [('op_already_defined:plugin', 'has already been defined'), + ('dec_not_function:plugin', "decorated by 'linked.pre_snapshot()'" + " is not a function"), + ('id_not_string:plugin', "The migration id '['testing', 'out'," + " 'validation']' used in the function" + " 'repo_upgrade' should be a string."), + ('id_bad_format:plugin', "used in the function 'repo_upgrade' does" + " not follow the correct format"), + ('id_used:plugin', "'5.04.000.01' used in the function 'snap_upgrade'" + " has the same canonical form '5.4.0.1' as another migration")]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_wrapper_failures(mock_file_util, plugin_config_file, test_src_dir, + expected_error): + mock_file_util.return_value = test_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + validator = PluginValidator(plugin_config_file, + util_classes.PLUGIN_CONFIG_SCHEMA, + ValidationMode.ERROR, True) + validator.validate() + + message = err_info.value.message + assert expected_error in message + assert '0 Warning(s). 1 Error(s).' in message + + @staticmethod + @pytest.mark.parametrize('entry_point', ['arbitrary_error:plugin']) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_sdk_error(mock_file_util, plugin_config_file, test_src_dir): + mock_file_util.return_value = test_src_dir + + with pytest.raises(exceptions.SDKToolingError) as err_info: + validator = PluginValidator(plugin_config_file, + util_classes.PLUGIN_CONFIG_SCHEMA, + ValidationMode.ERROR, True) + validator.validate() + + message = err_info.value.message + assert ('SDK Error: Got an arbitrary non-platforms error for testing.' + in message) + assert '0 Warning(s). 1 Error(s).' in message From 7ecbb52ea8bfa7f838a160e6cccbac1f4c2f3742 Mon Sep 17 00:00:00 2001 From: Lindsey Nguyen Date: Thu, 5 Dec 2019 13:10:06 -0800 Subject: [PATCH 04/25] PYT-1019 Write shell operations to read protobuf upgrade requests PYT-973 PluginImporter/Validator does not fail when an unexpected error is raised inside the Process Reviewed at: http://reviews.delphix.com/r/54495/ --- build.gradle | 2 +- dvp/Pipfile.lock | 26 ++-- dvp/lock.dev-requirements.txt | 4 +- dvp/lock.requirements.txt | 8 +- libs/Pipfile.lock | 14 +-- libs/lock.dev-requirements.txt | 4 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 12 +- platform/lock.dev-requirements.txt | 2 +- platform/lock.requirements.txt | 2 +- .../dlpx/virtualization/platform/_upgrade.py | 78 +++++++++++- .../virtualization/platform/exceptions.py | 21 ++++ .../dlpx/virtualization/test_upgrade.py | 116 ++++++++++++++++++ tools/Pipfile.lock | 74 ++++++----- tools/build.gradle | 2 +- tools/lock.dev-requirements.txt | 16 +-- tools/lock.requirements.txt | 12 +- .../_internal/commands/build.py | 2 +- .../_internal/plugin_importer.py | 7 +- .../virtualization/_internal/settings.cfg | 2 +- .../dlpx/virtualization/_internal/conftest.py | 15 +-- .../_internal/test_package_util.py | 2 +- .../_internal/test_plugin_validator.py | 22 ++-- 23 files changed, 324 insertions(+), 121 deletions(-) diff --git a/build.gradle b/build.gradle index 8bf3044d..d484c87e 100644 --- a/build.gradle +++ b/build.gradle @@ -8,7 +8,7 @@ plugins { } subprojects { - version = "1.1.0-internal-upgrade-003" + version = "1.1.0-internal-upgrade-004" } def binDir = "${rootProject.projectDir}/bin" diff --git a/dvp/Pipfile.lock b/dvp/Pipfile.lock index 1a0ac583..49c8d13b 100644 --- a/dvp/Pipfile.lock +++ b/dvp/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "61dd68fa84db728d2f9604993189481aae3d7e04d53f605627960c0cbdfdc396" + "sha256": "29cd8bea844d56f1e9296ac2e2d28a4eff66f99b5d4f9f4201d9b6290e6a8c8d" }, "pipfile-spec": 6, "requires": {}, @@ -15,20 +15,20 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz", - "version": "== 1.1.0-internal-upgrade-003" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz", + "version": "== 1.1.0-internal-upgrade-004" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-003.tar.gz", - "version": "== 1.1.0-internal-upgrade-003" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-004.tar.gz", + "version": "== 1.1.0-internal-upgrade-004" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-003.tar.gz", - "version": "== 1.1.0-internal-upgrade-003" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-004.tar.gz", + "version": "== 1.1.0-internal-upgrade-004" }, "dvp-tools": { - "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-003.tar.gz", - "version": "== 1.1.0-internal-upgrade-003" + "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-004.tar.gz", + "version": "== 1.1.0-internal-upgrade-004" } }, "develop": { @@ -72,11 +72,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", - "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" + "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", + "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" ], "markers": "python_version < '3.8'", - "version": "==0.23" + "version": "==1.1.0" }, "more-itertools": { "hashes": [ @@ -99,7 +99,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version == '3.4.*' or python_version < '3'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { diff --git a/dvp/lock.dev-requirements.txt b/dvp/lock.dev-requirements.txt index 2213b9d0..7413fd8b 100644 --- a/dvp/lock.dev-requirements.txt +++ b/dvp/lock.dev-requirements.txt @@ -4,10 +4,10 @@ attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.0' -importlib-metadata==0.23 ; python_version < '3.8' +importlib-metadata==1.1.0 ; python_version < '3.8' more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 -pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.0 pyparsing==2.4.5 diff --git a/dvp/lock.requirements.txt b/dvp/lock.requirements.txt index 8d3524c6..84933c21 100644 --- a/dvp/lock.requirements.txt +++ b/dvp/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-003.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-003.tar.gz -./../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-003.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-004.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-004.tar.gz +./../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-004.tar.gz diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index c493ea8f..5f0c6ef2 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9a2bcb4aeddbdaeff8279b2b83dbb2962c24bc8e914492222abec9801b59c0aa" + "sha256": "5c8e2ff7197cb394e29f939d09c754f8cb20cf8b48efce7907989959c96ad869" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz", - "version": "== 1.1.0-internal-upgrade-003" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz", + "version": "== 1.1.0-internal-upgrade-004" }, "protobuf": { "hashes": [ @@ -89,11 +89,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", - "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" + "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", + "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" ], "markers": "python_version < '3.8'", - "version": "==0.23" + "version": "==1.1.0" }, "mock": { "hashes": [ @@ -124,7 +124,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version == '3.4.*' or python_version < '3'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 653450e6..abb51707 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -4,11 +4,11 @@ attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.3' -importlib-metadata==0.23 ; python_version < '3.8' +importlib-metadata==1.1.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 -pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.0 pyparsing==2.4.5 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index b551f4d1..f6971b29 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,4 +1,4 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz protobuf==3.6.1 six==1.13.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 3c8a7492..09103a51 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "4c4a012ee9b986d7e3e36cde9675de9009302596cb7581b4d4b5165fe1457c46" + "sha256": "c3c19d04feabea6b24dd0f817fb8263bfafdfec8b9c5a4b0d2086f1fb5c571e0" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz", - "version": "== 1.1.0-internal-upgrade-003" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz", + "version": "== 1.1.0-internal-upgrade-004" }, "enum34": { "hashes": [ @@ -100,11 +100,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", - "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" + "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", + "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" ], "markers": "python_version < '3.8'", - "version": "==0.23" + "version": "==1.1.0" }, "mock": { "hashes": [ diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index f57d38a7..97d78fb9 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -4,7 +4,7 @@ attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.0' -importlib-metadata==0.23 ; python_version < '3.8' +importlib-metadata==1.1.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 9f737cb7..2bae3f9f 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 six==1.13.0 diff --git a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py index 35bec54e..390bf121 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py @@ -13,9 +13,15 @@ operation of the same schema, the key will be the migration id, and the value will be the function that was implemented. """ +import logging +from dlpx.virtualization import platform_pb2 from dlpx.virtualization.platform import MigrationIdSet from dlpx.virtualization.platform import validation_util as v from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectUpgradeObjectTypeError) + +logger = logging.getLogger(__name__) __all__ = ['UpgradeOperations'] @@ -81,7 +87,73 @@ def snapshot_decorator(snapshot_impl): def migration_id_list(self): return self.__migration_id_set.get_sorted_ids() - def _internal_upgrade(self, request): - """Upgrade Wrapper for plugins. + @staticmethod + def _success_upgrade_response(upgraded_dict): + upgrade_response = platform_pb2.UpgradeResponse( + return_value=platform_pb2.UpgradeResult( + post_upgrade_parameters=upgraded_dict)) + return upgrade_response + + def _internal_repository(self, request): + """Upgrade repositories for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.REPOSITORY: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.REPOSITORY) + # + # Then loop through each object and upgrade the object reference's + # payload through all migrations. For now we just want to print + # all object references passed in. + # + logger.debug('Upgrade repositories [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self._success_upgrade_response(request.pre_upgrade_parameters) + + def _internal_source_config(self, request): + """Upgrade source configs for plugins. """ - # TODO + if request.type != platform_pb2.UpgradeRequest.SOURCECONFIG: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.SOURCECONFIG) + + logger.debug('Upgrade source configs [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self._success_upgrade_response(request.pre_upgrade_parameters) + + def _internal_linked_source(self, request): + """Upgrade linked source for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.LINKEDSOURCE: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.LINKEDSOURCE) + + logger.debug('Upgrade linked sources [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self._success_upgrade_response(request.pre_upgrade_parameters) + + def _internal_virtual_source(self, request): + """Upgrade virtual sources for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.VIRTUALSOURCE: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.VIRTUALSOURCE) + + logger.debug('Upgrade virtual sources [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self._success_upgrade_response(request.pre_upgrade_parameters) + + def _internal_snapshot(self, request): + """Upgrade snapshots for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.SNAPSHOT: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.SNAPSHOT) + + logger.debug('Upgrade snapshots [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self._success_upgrade_response(request.pre_upgrade_parameters) diff --git a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py index 4047a50c..4f508ea6 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py +++ b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py @@ -57,6 +57,27 @@ def __init__(self, operation, actual_type, expected_type): super(IncorrectReturnTypeError, self).__init__(message) +class IncorrectUpgradeObjectTypeError(PluginRuntimeError): + """IncorrectUpgradeObjectTypeError gets thrown when an upgrade workflow was + called with the incorrect object type to upgrade. + + Args: + actual type (platform_pb2.UpgradeRequest.Type): type that was passed in + expected_type (platform_pb2.UpgradeRequest.Type): expected type + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + + """ + + def __init__(self, actual_type, expected_type): + message = ( + 'The upgrade operation received objects with {} type but should' + ' have had type {}.'.format(actual_type, expected_type)) + super(IncorrectUpgradeObjectTypeError, self).__init__(message) + + class OperationAlreadyDefinedError(PlatformError): """OperationAlreadyDefinedError gets thrown when the plugin writer tries to define an operation more than ones. diff --git a/platform/src/test/python/dlpx/virtualization/test_upgrade.py b/platform/src/test/python/dlpx/virtualization/test_upgrade.py index 570b9a71..edaba017 100755 --- a/platform/src/test/python/dlpx/virtualization/test_upgrade.py +++ b/platform/src/test/python/dlpx/virtualization/test_upgrade.py @@ -3,6 +3,8 @@ # import pytest +import logging +from dlpx.virtualization import platform_pb2 from dlpx.virtualization.platform.exceptions import ( DecoratorNotFunctionError, MigrationIdAlreadyUsedError) from dlpx.virtualization.platform.operation import Operation as Op @@ -189,3 +191,117 @@ def snap_upgrade_two(): "The migration id '2019.010.001' used in the function" " 'snap_upgrade_one' has the same canonical form '2019.10.1'" " as another migration.") + + @staticmethod + @pytest.fixture + def caplog(caplog): + caplog.set_level(logging.DEBUG) + return caplog + + @staticmethod + @pytest.fixture + def upgrade_request(fake_map_param, upgrade_type): + return platform_pb2.UpgradeRequest( + pre_upgrade_parameters=fake_map_param, + type=upgrade_type, + migration_ids=[] + ) + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_REPOSITORY-1': '{}', + 'APPDATA_REPOSITORY-2': '{}', + 'APPDATA_REPOSITORY-3': '{}' + }, platform_pb2.UpgradeRequest.REPOSITORY, + )]) + def test_repository(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_repository( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade repositories [APPDATA_REPOSITORY-1,' + ' APPDATA_REPOSITORY-2, APPDATA_REPOSITORY-3]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_SOURCE_CONFIG-1': '{}', + 'APPDATA_SOURCE_CONFIG-2': '{}', + 'APPDATA_SOURCE_CONFIG-3': '{}', + 'APPDATA_SOURCE_CONFIG-4': '{}' + }, platform_pb2.UpgradeRequest.SOURCECONFIG, + )]) + def test_source_config(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_source_config( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade source configs [APPDATA_SOURCE_CONFIG-1,' + ' APPDATA_SOURCE_CONFIG-2, APPDATA_SOURCE_CONFIG-3,' + ' APPDATA_SOURCE_CONFIG-4]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_STAGED_SOURCE-1': '{}', + 'APPDATA_STAGED_SOURCE-2': '{}', + 'APPDATA_STAGED_SOURCE-3': '{}' + }, platform_pb2.UpgradeRequest.LINKEDSOURCE, + )]) + def test_linked_source(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_linked_source( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade linked sources [APPDATA_STAGED_SOURCE-1,' + ' APPDATA_STAGED_SOURCE-2, APPDATA_STAGED_SOURCE-3]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_VIRTUAL_SOURCE-1': '{}', + 'APPDATA_VIRTUAL_SOURCE-2': '{}' + }, platform_pb2.UpgradeRequest.VIRTUALSOURCE, + )]) + def test_virtual_source( + my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_virtual_source( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade virtual sources [APPDATA_VIRTUAL_SOURCE-1,' + ' APPDATA_VIRTUAL_SOURCE-2]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_SNAPSHOT-1': '{}' + }, platform_pb2.UpgradeRequest.SNAPSHOT, + )]) + def test_snapshot(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_snapshot( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade snapshots [APPDATA_SNAPSHOT-1]') diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index acd9249f..8226a11d 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "4d04a3aa2ea7ed3a60c5625c9204c9a75487c6ce189cffd6707d3d2fe101fe6e" + "sha256": "aee49e3288687d35ae9f52a95793d6db11da79e4e65a1dca2986c1248e912457" }, "pipfile-spec": 6, "requires": {}, @@ -23,10 +23,10 @@ }, "certifi": { "hashes": [ - "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", - "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" + "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3", + "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f" ], - "version": "==2019.9.11" + "version": "==2019.11.28" }, "chardet": { "hashes": [ @@ -56,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3'", + "markers": "python_version == '2.7'", "version": "==4.0.2" }, "contextlib2": { @@ -110,11 +110,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", - "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" + "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", + "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" ], "markers": "python_version < '3.8'", - "version": "==0.23" + "version": "==1.1.0" }, "jinja2": { "hashes": [ @@ -185,7 +185,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version == '3.4.*' or python_version < '3'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "protobuf": { @@ -226,28 +226,26 @@ }, "pyrsistent": { "hashes": [ - "sha256:eb6545dbeb1aa69ab1fb4809bfbf5a8705e44d92ef8fc7c2361682a47c46c778" + "sha256:f3b280d030afb652f79d67c5586157c5c1355c9a58dfc7940566e28d28f3df1b" ], - "version": "==0.15.5" + "version": "==0.15.6" }, "pyyaml": { "hashes": [ - "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", - "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", - "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", - "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", - "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", - "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", - "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", - "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", - "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", - "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", - "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", - "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", - "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" + "sha256:0e7f69397d53155e55d10ff68fdfb2cf630a35e6daf65cf0bdeaf04f127c09dc", + "sha256:2e9f0b7c5914367b0916c3c104a024bb68f269a486b9d04a2e8ac6f6597b7803", + "sha256:35ace9b4147848cafac3db142795ee42deebe9d0dad885ce643928e88daebdcc", + "sha256:38a4f0d114101c58c0f3a88aeaa44d63efd588845c5a2df5290b73db8f246d15", + "sha256:483eb6a33b671408c8529106df3707270bfacb2447bf8ad856a4b4f57f6e3075", + "sha256:4b6be5edb9f6bb73680f5bf4ee08ff25416d1400fbd4535fe0069b2994da07cd", + "sha256:7f38e35c00e160db592091751d385cd7b3046d6d51f578b29943225178257b31", + "sha256:8100c896ecb361794d8bfdb9c11fce618c7cf83d624d73d5ab38aef3bc82d43f", + "sha256:c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c", + "sha256:e4c015484ff0ff197564917b4b4246ca03f411b9bd7f16e02a2f586eb48b6d04", + "sha256:ebc4ed52dcc93eeebeae5cf5deb2ae4347b3a81c3fa12b0b8c976544829396a4" ], "index": "delphix", - "version": "==5.1.2" + "version": "==5.2" }, "requests": { "hashes": [ @@ -333,7 +331,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3'", + "markers": "python_version == '2.7'", "version": "==4.0.2" }, "contextlib2": { @@ -383,16 +381,16 @@ "version": "==4.5.4" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz", - "version": "== 1.1.0-internal-upgrade-003" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz", + "version": "== 1.1.0-internal-upgrade-004" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-003.tar.gz", - "version": "== 1.1.0-internal-upgrade-003" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-004.tar.gz", + "version": "== 1.1.0-internal-upgrade-004" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-003.tar.gz", - "version": "== 1.1.0-internal-upgrade-003" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-004.tar.gz", + "version": "== 1.1.0-internal-upgrade-004" }, "entrypoints": { "hashes": [ @@ -425,7 +423,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "functools32": { @@ -453,11 +451,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", - "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" + "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", + "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" ], "markers": "python_version < '3.8'", - "version": "==0.23" + "version": "==1.1.0" }, "isort": { "hashes": [ @@ -502,7 +500,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version == '3.4.*' or python_version < '3'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { @@ -602,7 +600,7 @@ "sha256:6f94b6a176a7c114cfa6bad86d40f259bbe0f10cf2fa7f2f4b3596fc5802a41b" ], "index": "delphix", - "version": "==0.28.0" + "version": "==0.28" }, "zipp": { "hashes": [ diff --git a/tools/build.gradle b/tools/build.gradle index d0d717b4..6b0172b5 100644 --- a/tools/build.gradle +++ b/tools/build.gradle @@ -214,7 +214,7 @@ dlpxPython { } yapf { - version = ">= 0.25" + version = "== 0.28" } /* diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index cdf9ed1c..c374c676 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,27 +1,27 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-003.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-003.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-003.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-004.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-004.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version < '3' +configparser==4.0.2 ; python_version == '2.7' contextlib2==0.6.0.post1 ; python_version < '3' coverage==4.5.4 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -funcsigs==1.0.2 ; python_version < '3.0' +funcsigs==1.0.2 ; python_version < '3.3' functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 -importlib-metadata==0.23 ; python_version < '3.8' +importlib-metadata==1.1.0 ; python_version < '3.8' isort==4.3.21 mccabe==0.6.1 mock==3.0.5 more-itertools==5.0.0 packaging==19.2 -pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.0 pycodestyle==2.5.0 @@ -33,5 +33,5 @@ scandir==1.10.0 ; python_version < '3.5' six==1.13.0 typing==3.7.4.1 ; python_version < '3.5' wcwidth==0.1.7 -yapf==0.28.0 +yapf==0.28 zipp==0.6.0 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 191e9c2e..70e4c53f 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,28 +1,28 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ attrs==19.3.0 -certifi==2019.9.11 +certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version < '3' +configparser==4.0.2 ; python_version == '2.7' contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 functools32==3.2.3.post2 ; python_version < '3.2' idna==2.8 -importlib-metadata==0.23 ; python_version < '3.8' +importlib-metadata==1.1.0 ; python_version < '3.8' jinja2==2.10.3 jsonschema==3.2.0 markupsafe==1.1.1 mccabe==0.6.1 more-itertools==5.0.0 -pathlib2==2.3.5 ; python_version == '3.4.*' or python_version < '3' +pathlib2==2.3.5 ; python_version < '3' protobuf==3.6.1 pycodestyle==2.5.0 pyflakes==2.1.1 -pyrsistent==0.15.5 -pyyaml==5.1.2 +pyrsistent==0.15.6 +pyyaml==5.2 requests==2.22.0 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py index d4079f5e..27085f79 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py @@ -113,7 +113,7 @@ def build(plugin_config, plugin_config_content, not generate_only, skip_id_validation) - except exceptions.UserError as err: + except (exceptions.UserError, exceptions.SDKToolingError) as err: raise exceptions.BuildFailedError(err) plugin_manifest = {} diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 1b7edee1..e55d42a0 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -269,7 +269,12 @@ def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): error = exceptions.UserError(err.message) queue.put({'exception': error}) else: - error = exceptions.SDKToolingError(err.message) + # + # Because we don't know if the output of the err is actually in the + # message, we just cast the exception to a string and hope to get + # the most information possible. + # + error = exceptions.SDKToolingError(str(err)) queue.put({'sdk exception': error}) finally: sys.path.remove(src_dir) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 541a1066..2b6edb36 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -26,7 +26,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-upgrade-003 +package_version = 1.1.0-internal-upgrade-004 virtualization_api_version = 1.1.0 distribution_name = dvp-tools package_author = Delphix diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index 907dc74a..1dd7d81e 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -169,18 +169,9 @@ def artifact_file_created(): @pytest.fixture -def plugin_config_content( - plugin_id, - plugin_name, - version, - language, - host_types, - plugin_type, - entry_point, - src_dir, - schema_file, - manual_discovery, -): +def plugin_config_content(plugin_id, plugin_name, version, language, + host_types, plugin_type, entry_point, src_dir, + schema_file, manual_discovery): """ This fixutre creates the dict expected in the properties yaml file the customer must provide for the build and compile commands. diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index bdea58bc..42777729 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -8,7 +8,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-upgrade-003' + assert package_util.get_version() == '1.1.0-internal-upgrade-004' @staticmethod def test_get_virtualization_api_version(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index 1067da7c..a04b6de1 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -13,7 +13,7 @@ @pytest.fixture -def test_src_dir(plugin_type): +def fake_src_dir(plugin_type): """ This fixture gets the path of the fake plugin src files used for testing """ @@ -202,8 +202,8 @@ def test_plugin_info_warn_mode(plugin_config_file, plugin_config_content, ('successful:direct', 'DIRECT')]) @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') def test_successful_validation(mock_file_util, plugin_config_file, - test_src_dir): - mock_file_util.return_value = test_src_dir + fake_src_dir): + mock_file_util.return_value = fake_src_dir validator = PluginValidator(plugin_config_file, util_classes.PLUGIN_CONFIG_SCHEMA, @@ -228,8 +228,8 @@ def test_successful_validation(mock_file_util, plugin_config_file, ])]) @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') def test_multiple_warnings(mock_file_util, plugin_config_file, - test_src_dir, expected_errors): - mock_file_util.return_value = test_src_dir + fake_src_dir, expected_errors): + mock_file_util.return_value = fake_src_dir with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, @@ -251,9 +251,9 @@ def test_multiple_warnings(mock_file_util, plugin_config_file, '0 Warning(s). 4 Error(s).' ])]) @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_upgrade_warnings(mock_file_util, plugin_config_file, test_src_dir, + def test_upgrade_warnings(mock_file_util, plugin_config_file, fake_src_dir, expected_errors): - mock_file_util.return_value = test_src_dir + mock_file_util.return_value = fake_src_dir with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, @@ -279,9 +279,9 @@ def test_upgrade_warnings(mock_file_util, plugin_config_file, test_src_dir, ('id_used:plugin', "'5.04.000.01' used in the function 'snap_upgrade'" " has the same canonical form '5.4.0.1' as another migration")]) @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_wrapper_failures(mock_file_util, plugin_config_file, test_src_dir, + def test_wrapper_failures(mock_file_util, plugin_config_file, fake_src_dir, expected_error): - mock_file_util.return_value = test_src_dir + mock_file_util.return_value = fake_src_dir with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, @@ -296,8 +296,8 @@ def test_wrapper_failures(mock_file_util, plugin_config_file, test_src_dir, @staticmethod @pytest.mark.parametrize('entry_point', ['arbitrary_error:plugin']) @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_sdk_error(mock_file_util, plugin_config_file, test_src_dir): - mock_file_util.return_value = test_src_dir + def test_sdk_error(mock_file_util, plugin_config_file, fake_src_dir): + mock_file_util.return_value = fake_src_dir with pytest.raises(exceptions.SDKToolingError) as err_info: validator = PluginValidator(plugin_config_file, From 94541895f695f868f94ebd7482bc8668e186f7a1 Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Wed, 18 Dec 2019 19:38:08 -0800 Subject: [PATCH 05/25] PYT-655 Cleanup plugin_validator and schema_validator modules Reviewed at: http://reviews.delphix.com/r/54770/ --- build.gradle | 4 +- libs/Pipfile.lock | 6 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 6 +- platform/lock.requirements.txt | 2 +- tools/Pipfile.lock | 87 +++++++++---------- tools/lock.dev-requirements.txt | 10 +-- tools/lock.requirements.txt | 6 +- .../_internal/commands/build.py | 20 ++--- .../_internal/commands/initialize.py | 5 +- .../_internal/plugin_importer.py | 22 +++-- .../virtualization/_internal/plugin_util.py | 52 +++++++---- .../_internal/plugin_validator.py | 72 +++++---------- .../_internal/schema_validator.py | 36 ++------ .../virtualization/_internal/settings.cfg | 2 +- .../_internal/commands/test_build.py | 2 +- .../_internal/commands/test_initialize.py | 37 +++++--- .../_internal/test_package_util.py | 2 +- .../_internal/test_plugin_importer.py | 20 ++--- .../_internal/test_plugin_validator.py | 63 ++++++-------- .../_internal/test_schema_validator.py | 53 ++++------- 21 files changed, 221 insertions(+), 288 deletions(-) diff --git a/build.gradle b/build.gradle index c71f7963..cd8764fe 100644 --- a/build.gradle +++ b/build.gradle @@ -12,11 +12,9 @@ subprojects { * dvpApiVersion is the version of the Virtualization API that we want this version of the SDK to be built against. */ project.ext.dvpApiVersion = "1.1.0-master-003" - version = "1.1.0-internal-004" + version = "1.1.0-internal-005" } - - def binDir = "${rootProject.projectDir}/bin" /* diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index c0390242..141a20ed 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "14088559f45826a15935708b470b33e951715b88ba52502c21c2522009c8778c" + "sha256": "6ce0b104acdd7af1e177100807866ee7176d5da58f6feb55b3a2aa5bbc8c4c8c" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-004.tar.gz", - "version": "== 1.1.0-internal-004" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz", + "version": "== 1.1.0-internal-005" }, "protobuf": { "hashes": [ diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 1e278929..772c601f 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-004.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz dvp-api==1.1.0-master-003 protobuf==3.6.1 six==1.13.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index bc7c15be..702a4b73 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "e369c56ef70fdbd57d2e664028343db11cae9ee5d0d648167c0a9cc1671a06fa" + "sha256": "fc15781d03ed3f3d9e8ff4f588f90cab291c9899ef4339d82656e5d3aaa1aa68" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-004.tar.gz", - "version": "== 1.1.0-internal-004" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz", + "version": "== 1.1.0-internal-005" }, "enum34": { "hashes": [ diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index b2dfb9fe..1032f9d4 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-004.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz dvp-api==1.1.0-master-003 enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index a914a05a..71bcea2a 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b5e602378bf639088b9259b4250b9177bc5840339b86805a0ecd55d884c801e2" + "sha256": "68f9a4de5023d37cdcef8e799d69437a07b7ea17980210634090a5e146d21faa" }, "pipfile-spec": 6, "requires": {}, @@ -56,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==4.0.2" }, "contextlib2": { @@ -68,8 +68,8 @@ "version": "==0.6.0.post1" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-004.tar.gz", - "version": "== 1.1.0-internal-004" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-005.tar.gz", + "version": "== 1.1.0-internal-005" }, "entrypoints": { "hashes": [ @@ -102,7 +102,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "idna": { @@ -313,7 +313,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==4.0.2" }, "contextlib2": { @@ -326,49 +326,48 @@ }, "coverage": { "hashes": [ - "sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6", - "sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650", - "sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5", - "sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d", - "sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351", - "sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755", - "sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef", - "sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca", - "sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca", - "sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9", - "sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc", - "sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5", - "sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f", - "sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe", - "sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888", - "sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5", - "sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce", - "sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5", - "sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e", - "sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e", - "sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9", - "sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437", - "sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1", - "sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c", - "sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24", - "sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47", - "sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2", - "sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28", - "sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c", - "sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7", - "sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0", - "sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025" + "sha256:0cd13a6e98c37b510a2d34c8281d5e1a226aaf9b65b7d770ef03c63169965351", + "sha256:1a4b6b6a2a3a6612e6361130c2cc3dc4378d8c221752b96167ccbad94b47f3cd", + "sha256:2ee55e6dba516ddf6f484aa83ccabbb0adf45a18892204c23486938d12258cde", + "sha256:3be5338a2eb4ef03c57f20917e1d12a1fd10e3853fed060b6d6b677cb3745898", + "sha256:44b783b02db03c4777d8cf71bae19eadc171a6f2a96777d916b2c30a1eb3d070", + "sha256:475bf7c4252af0a56e1abba9606f1e54127cdf122063095c75ab04f6f99cf45e", + "sha256:47c81ee687eafc2f1db7f03fbe99aab81330565ebc62fb3b61edfc2216a550c8", + "sha256:4a7f8e72b18f2aca288ff02255ce32cc830bc04d993efbc87abf6beddc9e56c0", + "sha256:50197163a22fd17f79086e087a787883b3ec9280a509807daf158dfc2a7ded02", + "sha256:56b13000acf891f700f5067512b804d1ec8c301d627486c678b903859d07f798", + "sha256:79388ae29c896299b3567965dbcd93255f175c17c6c7bca38614d12718c47466", + "sha256:79fd5d3d62238c4f583b75d48d53cdae759fe04d4fb18fe8b371d88ad2b6f8be", + "sha256:7fe3e2fde2bf1d7ce25ebcd2d3de3650b8d60d9a73ce6dcef36e20191291613d", + "sha256:81042a24f67b96e4287774014fa27220d8a4d91af1043389e4d73892efc89ac6", + "sha256:81326f1095c53111f8afc95da281e1414185f4a538609a77ca50bdfa39a6c207", + "sha256:8873dc0d8f42142ea9f20c27bbdc485190fff93823c6795be661703369e5877d", + "sha256:88d2cbcb0a112f47eef71eb95460b6995da18e6f8ca50c264585abc2c473154b", + "sha256:91f2491aeab9599956c45a77c5666d323efdec790bfe23fcceafcd91105d585a", + "sha256:979daa8655ae5a51e8e7a24e7d34e250ae8309fd9719490df92cbb2fe2b0422b", + "sha256:9c871b006c878a890c6e44a5b2f3c6291335324b298c904dc0402ee92ee1f0be", + "sha256:a6d092545e5af53e960465f652e00efbf5357adad177b2630d63978d85e46a72", + "sha256:b5ed7837b923d1d71c4f587ae1539ccd96bfd6be9788f507dbe94dab5febbb5d", + "sha256:ba259f68250f16d2444cbbfaddaa0bb20e1560a4fdaad50bece25c199e6af864", + "sha256:be1d89614c6b6c36d7578496dc8625123bda2ff44f224cf8b1c45b810ee7383f", + "sha256:c1b030a79749aa8d1f1486885040114ee56933b15ccfc90049ba266e4aa2139f", + "sha256:c95bb147fab76f2ecde332d972d8f4138b8f2daee6c466af4ff3b4f29bd4c19e", + "sha256:d52c1c2d7e856cecc05aa0526453cb14574f821b7f413cc279b9514750d795c1", + "sha256:d609a6d564ad3d327e9509846c2c47f170456344521462b469e5cb39e48ba31c", + "sha256:e1bad043c12fb58e8c7d92b3d7f2f49977dcb80a08a6d1e7a5114a11bf819fca", + "sha256:e5a675f6829c53c87d79117a8eb656cc4a5f8918185a32fc93ba09778e90f6db", + "sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c" ], "index": "delphix", - "version": "==4.5.4" + "version": "==5.0" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-004.tar.gz", - "version": "== 1.1.0-internal-004" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz", + "version": "== 1.1.0-internal-005" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-004.tar.gz", - "version": "== 1.1.0-internal-004" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-005.tar.gz", + "version": "== 1.1.0-internal-005" }, "entrypoints": { "hashes": [ @@ -409,7 +408,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "futures": { diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index df16938a..e9b41c27 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,17 +1,17 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-004.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-004.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-005.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version < '3' +configparser==4.0.2 ; python_version < '3.2' contextlib2==0.6.0.post1 ; python_version < '3' -coverage==4.5.4 +coverage==5.0 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 funcsigs==1.0.2 ; python_version < '3.3' -functools32==3.2.3.post2 ; python_version < '3' +functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 importlib-metadata==1.3.0 ; python_version < '3.8' diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index c18bc17b..6dc11593 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,16 +1,16 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../platform/build/python-dist/dvp-platform-1.1.0-internal-004.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-005.tar.gz attrs==19.3.0 certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version < '3' +configparser==4.0.2 ; python_version < '3.2' contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3' +functools32==3.2.3.post2 ; python_version < '3.2' idna==2.8 importlib-metadata==1.3.0 ; python_version < '3.8' jinja2==2.10.3 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py index d4079f5e..dddfa3a1 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py @@ -13,8 +13,7 @@ from dlpx.virtualization._internal import (codegen, exceptions, file_util, package_util, - plugin_dependency_util, plugin_util, - util_classes) + plugin_dependency_util, plugin_util) logger = logging.getLogger(__name__) @@ -54,10 +53,10 @@ def build(plugin_config, local_vsdk_root = os.path.expanduser(local_vsdk_root) # Read content of the plugin config file provided and perform validations - logger.info('Reading and validating plugin config file %s', plugin_config) + logger.info('Validating plugin config file %s', plugin_config) try: - result = plugin_util.read_and_validate_plugin_config_file( - plugin_config, not generate_only, False, skip_id_validation) + result = plugin_util.validate_plugin_config_file( + plugin_config, not generate_only, skip_id_validation) except exceptions.UserError as err: raise exceptions.BuildFailedError(err) @@ -69,11 +68,11 @@ def build(plugin_config, plugin_config, plugin_config_content['schemaFile']) # Read schemas from the file provided in the config and validate them - logger.info('Reading and validating schemas from %s', schema_file) + logger.info('Validating schemas from %s', schema_file) try: - result = plugin_util.read_and_validate_schema_file( - schema_file, not generate_only) + result = plugin_util.validate_schema_file(schema_file, + not generate_only) except exceptions.UserError as err: raise exceptions.BuildFailedError(err) @@ -119,11 +118,6 @@ def build(plugin_config, plugin_manifest = {} if result: plugin_manifest = result.plugin_manifest - if result.warnings: - warning_msg = util_classes.MessageUtils.warning_msg( - result.warnings) - logger.warn('{}\n{} Warning(s). {} Error(s).'.format( - warning_msg, len(result.warnings['warning']), 0)) # # Setup a build directory for the plugin in its root. Dependencies are diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py index 459dd3a4..9e805c8b 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py @@ -103,9 +103,8 @@ def init(root, ingestion_strategy, name, host_type): logger.info('Writing schema file at %s.', schema_file_path) shutil.copyfile(SCHEMA_TEMPLATE_PATH, schema_file_path) - # Read and valida the schema file - result = plugin_util.read_and_validate_schema_file( - schema_file_path, False) + # Validate the schema file. + result = plugin_util.validate_schema_file(schema_file_path, False) # Generate the definitions based on the schema file codegen.generate_python(name, src_dir_path, diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 0df69610..37ca0250 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -64,9 +64,13 @@ def import_plugin(self): Imports the plugin module, does basic validation. Returns: plugin manifest - dict describing methods implemented in the plugin - Note: - warnings - dict containing a list of errors or warnings can be - obtained by the caller via warnings property. + NOTE: + Importing module in the current context pollutes the runtime of + the caller, in this case dvp. If the module being imported, for + e.g. contains code that adds a handler to the root logger at + import time, this can cause issues with logging in this code and + callers of validator. To avoid such issues, perform the import in + in a sub-process and on completion return the output. """ logger.debug('Importing plugin module : %s', self.__plugin_module) @@ -74,25 +78,25 @@ def import_plugin(self): plugin_manifest, warnings = self.__import_plugin() self.__post_import_checks(plugin_manifest, warnings) - return plugin_manifest, warnings + return plugin_manifest def __pre_import_checks(self): """ Performs checks of the plugin code that should take place prior to importing. """ - warnings = PluginImporter.__check_for_undefined_names(self.__src_dir) - PluginImporter.__report_warnings_and_exceptions(warnings) + warnings = self.__check_for_undefined_names(self.__src_dir) + self.__report_warnings_and_exceptions(warnings) def __import_plugin(self): """ - Imports the module to check for errors or issues. Also does an eval on - the entry point. + Imports the module in a sub-process to check for errors or issues. + Also does an eval on the entry point. """ plugin_manifest = {} warnings = defaultdict(list) try: - plugin_manifest, warnings = (PluginImporter.__import_in_subprocess( + plugin_manifest, warnings = (self.__import_in_subprocess( self.__src_dir, self.__plugin_module, self.__plugin_entry_point, self.__plugin_type, self.__validate)) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py index 8ef97776..98c648cc 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py @@ -4,6 +4,7 @@ import logging import os +from contextlib import contextmanager from dlpx.virtualization._internal import exceptions, util_classes from dlpx.virtualization._internal.plugin_validator import PluginValidator @@ -13,10 +14,24 @@ logger = logging.getLogger(__name__) -def read_and_validate_plugin_config_file(plugin_config, - stop_build, - run_all_validations, - skip_id_validation=False): +@contextmanager +def validate_error_handler(plugin_file, validation_mode): + try: + yield + except Exception as e: + if validation_mode is ValidationMode.INFO: + logger.info('Validation failed on plugin file %s : %s', + plugin_file, e) + elif validation_mode is ValidationMode.WARNING: + logger.warning('Validation failed on plugin file %s : %s', + plugin_file, e) + else: + raise e + + +def validate_plugin_config_file(plugin_config, + stop_build, + skip_id_validation=False): """ Reads a plugin config file and validates the contents using a pre-defined schema. If stop_build is True, will report exception @@ -30,9 +45,11 @@ def read_and_validate_plugin_config_file(plugin_config, plugin_config_schema_file = ( util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) - validator = PluginValidator(plugin_config, plugin_config_schema_file, - validation_mode, run_all_validations) - validator.validate() + validator = PluginValidator(plugin_config, plugin_config_schema_file) + + with validate_error_handler(plugin_config, validation_mode): + validator.validate_plugin_config() + return validator.result @@ -53,13 +70,15 @@ def get_plugin_manifest(plugin_config_file, if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) validator = PluginValidator.from_config_content(plugin_config_file, plugin_config_content, - plugin_config_schema_file, - validation_mode) - validator.validate() + plugin_config_schema_file) + + with validate_error_handler(plugin_config_file, validation_mode): + validator.validate_plugin_module() + return validator.result -def read_and_validate_schema_file(schema_file, stop_build): +def validate_schema_file(schema_file, stop_build): """ Reads a plugin schema file and validates the contents using a pre-defined schema. If stop_build is True, will report exception @@ -69,9 +88,11 @@ def read_and_validate_schema_file(schema_file, stop_build): """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - validation_mode) - validator.validate() + validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + + with validate_error_handler(schema_file, validation_mode): + validator.validate() + return validator.result @@ -79,8 +100,7 @@ def get_plugin_config_property(plugin_config_path, prop): """ Returns the value for a specific property from the plugin config file. """ - result = read_and_validate_plugin_config_file(plugin_config_path, False, - False) + result = validate_plugin_config_file(plugin_config_path, False, False) return result.plugin_config_content[prop] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py index 09c0e0b2..e4a1d572 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py @@ -5,19 +5,17 @@ import json import logging import os -from collections import defaultdict, namedtuple +from collections import namedtuple import yaml from dlpx.virtualization._internal import (exceptions, file_util, plugin_importer) -from dlpx.virtualization._internal.util_classes import ValidationMode from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple( - 'validation_result', - ['plugin_config_content', 'plugin_manifest', 'warnings']) +validation_result = namedtuple('validation_result', + ['plugin_config_content', 'plugin_manifest']) class PluginValidator: @@ -29,33 +27,26 @@ class PluginValidator: config, content of the python module specified in in the pluginEntryPoint and also name of the plugin entry point in the module. If validation fails or has issues - will report exception - back if validation mode is error, otherwise warnings or info based - on validation mode. + back. """ def __init__(self, plugin_config, plugin_config_schema, - validation_mode, - run_all_validations, plugin_config_content=None): self.__plugin_config = plugin_config self.__plugin_config_schema = plugin_config_schema - self.__validation_mode = validation_mode - self.__run_all_validations = run_all_validations self.__plugin_config_content = plugin_config_content self.__plugin_manifest = None - self.__warnings = defaultdict(list) @property def result(self): return validation_result( plugin_config_content=self.__plugin_config_content, - plugin_manifest=self.__plugin_manifest, - warnings=self.__warnings) + plugin_manifest=self.__plugin_manifest) @classmethod def from_config_content(cls, plugin_config_file, plugin_config_content, - plugin_config_schema, validation_mode): + plugin_config_schema): """ Instantiates the validator with given plugin config content. plugin_config_file path is not read but used to get the absolute @@ -63,26 +54,10 @@ def from_config_content(cls, plugin_config_file, plugin_config_content, Returns: PluginValidator """ - return cls(plugin_config_file, plugin_config_schema, validation_mode, - True, plugin_config_content) + return cls(plugin_config_file, plugin_config_schema, + plugin_config_content) - def validate(self): - """ - Validates the plugin config file. - """ - logger.debug('Run config validations') - try: - self.__run_validations() - except Exception as e: - if self.__validation_mode is ValidationMode.INFO: - logger.info('Validation failed on plugin config file : %s', e) - elif self.__validation_mode is ValidationMode.WARNING: - logger.warning('Validation failed on plugin config file : %s', - e) - else: - raise e - - def __run_validations(self): + def validate_plugin_config(self): """ Reads a plugin config file and validates the contents using a pre-defined schema. If validation is successful, tries to import @@ -97,9 +72,12 @@ def __run_validations(self): self.__plugin_config_content) self.__validate_plugin_config_content() - if not self.__run_all_validations: - logger.debug('Plugin config file schema validation is done') - return + def validate_plugin_module(self): + """ + Tries to import the plugin module and validates the entry point + specified. + """ + self.validate_plugin_config() src_dir = file_util.get_src_dir_path( self.__plugin_config, self.__plugin_config_content['srcDir']) @@ -208,10 +186,8 @@ def __validate_plugin_entry_point(self, src_dir): plugin_type = self.__plugin_config_content['pluginType'] try: - self.__plugin_manifest, self.__warnings = ( - PluginValidator.__import_plugin(src_dir, entry_point_module, - entry_point_object, - plugin_type)) + self.__plugin_manifest = (self.__import_plugin( + src_dir, entry_point_module, entry_point_object, plugin_type)) except ImportError as err: raise exceptions.UserError( 'Unable to load module \'{}\' specified in ' @@ -225,18 +201,12 @@ def __validate_plugin_entry_point(self, src_dir): def __import_plugin(src_dir, entry_point_module, entry_point_object, plugin_type): """ - Imports the given python module. - NOTE: - Importing module in the current context pollutes the runtime of - the caller, in this case dvp. If the module being imported, for - e.g. contains code that adds a handler to the root logger at - import time, this can cause issues with logging in this code and - callers of validator. To avoid such issues, perform the import in - in a sub-process and on completion return the output. + Imports the given python module, does some validations ans returns the + manifest describing implemented plugin operations. """ importer = plugin_importer.PluginImporter(src_dir, entry_point_module, entry_point_object, plugin_type, True) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() - return manifest, warnings + return manifest diff --git a/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py index e66241c7..46354fce 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py @@ -5,16 +5,14 @@ import json import logging import os -from collections import defaultdict, namedtuple +from collections import namedtuple from dlpx.virtualization._internal import exceptions -from dlpx.virtualization._internal.util_classes import ValidationMode from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple('validation_result', - ['plugin_schemas', 'warnings']) +validation_result = namedtuple('validation_result', ['plugin_schemas']) class SchemaValidator: @@ -24,42 +22,18 @@ class SchemaValidator: Returns: On successful validation, callers can get the content of the plugin schemas. If validation fails or has issues - will report exception - back if validation mode is error, otherwise warnings or info based - on validation mode. + back. """ - def __init__(self, - schema_file, - plugin_meta_schema, - validation_mode, - schemas=None): + def __init__(self, schema_file, plugin_meta_schema, schemas=None): self.__schema_file = schema_file self.__plugin_meta_schema = plugin_meta_schema - self.__validation_mode = validation_mode self.__plugin_schemas = schemas - self.__warnings = defaultdict(list) @property def result(self): - return validation_result(plugin_schemas=self.__plugin_schemas, - warnings=self.__warnings) + return validation_result(plugin_schemas=self.__plugin_schemas) def validate(self): - """ - Validates the plugin schema file. - """ - logger.debug('Run schema validations') - try: - self.__run_validations() - except Exception as e: - if self.__validation_mode is ValidationMode.INFO: - logger.info('Validation failed on plugin schema file : %s', e) - elif self.__validation_mode is ValidationMode.WARNING: - logger.warning('Validation failed on plugin schema file : %s', - e) - else: - raise e - - def __run_validations(self): """ Reads a plugin schema file and validates the contents using a pre-defined schema. diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 6675c17b..23e7e676 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -21,7 +21,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-004 +package_version = 1.1.0-internal-005 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 62d72c83..44c7f4db 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -59,7 +59,7 @@ def test_build_success(mock_relative_path, mock_install_deps, @pytest.mark.parametrize('artifact_filename', ['somefile.json']) @mock.patch.object(PluginValidator, '_PluginValidator__import_plugin', - return_value=({}, None)) + return_value={}) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') @mock.patch( 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index 11d6bb9c..e208108b 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -85,9 +85,9 @@ def test_init(tmpdir, ingestion_strategy, host_type, schema_template, init.init(tmpdir.strpath, ingestion_strategy, plugin_name, host_type) # Validate the config file is as we expect. - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content @@ -121,9 +121,9 @@ def test_init_without_plugin_name(tmpdir): init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, "", util_classes.UNIX_HOST_TYPE) - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content @@ -134,9 +134,9 @@ def test_init_without_plugin_name(tmpdir): def test_init_windows_plugin(tmpdir, plugin_name): init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, plugin_name, util_classes.WINDOWS_HOST_TYPE) - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content # Validate that the host type is WINDOWS @@ -157,10 +157,26 @@ def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, init.DEFAULT_PLUGIN_CONFIG_FILE) schema_file = os.path.join(tmpdir.strpath, init.DEFAULT_SCHEMA_FILE) validator = plugin_validator.PluginValidator(plugin_config_file, - schema_file, True, True) - validator.validate() + schema_file) + + # Assert config file and import validations are not done. + assert not validator.result.plugin_config_content + assert not validator.result.plugin_manifest + + validator.validate_plugin_config() + + # Assert config file is validated and import validation is not done. + assert validator.result.plugin_config_content + assert not validator.result.plugin_manifest - assert not validator.result.warnings + validator.validate_plugin_module() + + # + # Assert both config content and import validation are done and result + # tuple has both set to valid values. + # + assert validator.result.plugin_config_content + assert validator.result.plugin_manifest @staticmethod def test_invalid_with_config_file(plugin_config_file): @@ -203,8 +219,7 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, @staticmethod def test_default_schema_definition(schema_template): validator = schema_validator.SchemaValidator( - None, util_classes.PLUGIN_SCHEMA, - util_classes.ValidationMode.ERROR, schema_template) + None, util_classes.PLUGIN_SCHEMA, schema_template) validator.validate() # Validate the repository schema only has the 'name' property. diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 17b2b4d0..65cf614b 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -9,7 +9,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-004' + assert package_util.get_version() == '1.1.0-internal-005' @staticmethod def test_get_virtualization_api_version(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py index 37a64f50..74f26ed8 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py @@ -2,7 +2,6 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # import exceptions -from collections import defaultdict import mock import pytest @@ -18,9 +17,8 @@ def test_get_plugin_manifest(mock_import, src_dir, plugin_type, mock_import.return_value = plugin_module_content importer = PluginImporter(src_dir, plugin_name, plugin_entry_point_name, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() - assert not warnings assert manifest == plugin_manifest @staticmethod @@ -29,16 +27,14 @@ def test_plugin_module_content_none(mock_import, src_dir, plugin_type, plugin_name, plugin_entry_point_name): mock_import.return_value = None manifest = {} - warnings = defaultdict(list) with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, plugin_entry_point_name, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() message = str(err_info) - assert warnings.items() > 0 assert manifest == {} assert 'Plugin module content is None.' in message @@ -48,15 +44,13 @@ def test_plugin_entry_object_none(mock_import, src_dir, plugin_type, plugin_name, plugin_module_content): mock_import.return_value = plugin_module_content manifest = {} - warnings = defaultdict(list) with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, None, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() message = str(err_info) - assert warnings.items() > 0 assert manifest == {} assert 'Plugin entry point object is None.' in message @@ -68,15 +62,13 @@ def test_plugin_entry_point_nonexistent(mock_import, src_dir, plugin_type, entry_point_name = "nonexistent entry point" mock_import.return_value = plugin_module_content manifest = {} - warnings = defaultdict(list) with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, entry_point_name, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() message = err_info.value.message - assert warnings.items() > 0 assert manifest == {} assert ('\'{}\' is not a symbol in module'.format(entry_point_name) in message) @@ -90,15 +82,13 @@ def test_plugin_object_none(mock_import, src_dir, plugin_type, plugin_name, mock_import.return_value = plugin_module_content manifest = {} - warnings = defaultdict(list) with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, none_entry_point, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() message = err_info.value.message - assert warnings.items() > 0 assert manifest == {} assert ('Plugin object retrieved from the entry point {} is' ' None'.format(none_entry_point)) in message diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index 2d1fe09b..aa04f17d 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -9,9 +9,8 @@ import mock import pytest -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import exceptions, plugin_util, util_classes from dlpx.virtualization._internal.plugin_validator import PluginValidator -from dlpx.virtualization._internal.util_classes import ValidationMode @pytest.fixture @@ -42,9 +41,8 @@ def test_plugin_bad_schema(plugin_config_file, schema_file): ]) with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator.from_config_content( - plugin_config_file, plugin_config_content, schema_file, - ValidationMode.ERROR) - validator.validate() + plugin_config_file, plugin_config_content, schema_file) + validator.validate_plugin_config() message = err_info.value.message assert ('Failed to load schemas because {} is not a valid json file.' @@ -55,9 +53,8 @@ def test_plugin_bad_schema(plugin_config_file, schema_file): def test_plugin_bad_config_file(plugin_config_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert message == ("Unable to read plugin config file '{}'" @@ -82,8 +79,8 @@ def test_plugin_valid_content(mock_import_plugin, mock_relative_path, validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() mock_import_plugin.assert_called() @@ -101,8 +98,8 @@ def test_plugin_missing_field(plugin_config_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message @@ -131,8 +128,8 @@ def test_plugin_version_format(mock_import_plugin, mock_path_is_relative, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: message = err_info.message @@ -168,8 +165,8 @@ def test_plugin_entry_point(mock_import_plugin, mock_relative_path, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: message = err_info.message @@ -190,8 +187,8 @@ def test_plugin_additional_properties(src_dir, plugin_config_file): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert "Additional properties are not allowed " \ @@ -211,8 +208,8 @@ def test_multiple_validation_errors(plugin_config_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message assert "'xxx' is not one of ['UNIX', 'WINDOWS']" in message @@ -225,12 +222,10 @@ def test_staged_plugin(mock_file_util, fake_staged_plugin_config): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(fake_staged_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() message = err_info.value.message - assert validator.result.warnings.items() > 0 assert 'Named argument mismatch in method' in message assert 'Number of arguments do not match' in message assert 'Implementation missing for required method' in message @@ -243,12 +238,10 @@ def test_direct_plugin(mock_file_util, fake_direct_plugin_config): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(fake_direct_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() message = err_info.value.message - assert validator.result.warnings.items() > 0 assert 'Named argument mismatch in method' in message assert 'Number of arguments do not match' in message assert 'Implementation missing for required method' in message @@ -279,17 +272,15 @@ def test_plugin_id(mock_import_plugin, mock_relative_path, src_dir, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod - @pytest.mark.parametrize('validation_mode', - [ValidationMode.INFO, ValidationMode.WARNING]) - def test_plugin_info_warn_mode(plugin_config_file, validation_mode): + def test_plugin_info_warn_mode(plugin_config_file): plugin_config_content = OrderedDict([ ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), ('version', '0.1.0'), ('language', 'PYTHON27'), @@ -300,10 +291,8 @@ def test_plugin_info_warn_mode(plugin_config_file, validation_mode): ]) err_info = None try: - validator = PluginValidator.from_config_content( - plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, validation_mode) - validator.validate() + plugin_util.get_plugin_manifest(plugin_config_file, + plugin_config_content, False) except Exception as e: err_info = e diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py index 00be8fe4..99ea6761 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py @@ -6,9 +6,8 @@ import os import pytest -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import exceptions, plugin_util, util_classes from dlpx.virtualization._internal.schema_validator import SchemaValidator -from dlpx.virtualization._internal.util_classes import ValidationMode class TestSchemaValidator: @@ -18,7 +17,7 @@ def test_bad_meta_schema(schema_file, tmpdir, schema_filename): f = tmpdir.join(schema_filename) f.write(meta_schema) with pytest.raises(exceptions.UserError) as err_info: - validator = SchemaValidator(schema_file, f, ValidationMode.ERROR) + validator = SchemaValidator(schema_file, f) validator.validate() message = err_info.value.message @@ -31,8 +30,7 @@ def test_bad_schema_file(schema_file): os.remove(schema_file) with pytest.raises(exceptions.UserError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -42,8 +40,7 @@ def test_bad_schema_file(schema_file): @staticmethod def test_valid_schema(schema_file): - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -68,8 +65,7 @@ def test_missing_root_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -92,8 +88,7 @@ def test_missing_root_type(schema_file): def test_bad_root_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -119,8 +114,7 @@ def test_bad_root_type_num(schema_file): def test_bad_root_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -145,8 +139,7 @@ def test_bad_root_type(schema_file): def test_missing_identity_fields(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -171,8 +164,7 @@ def test_missing_identity_fields(schema_file): def test_missing_name_field(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -199,8 +191,7 @@ def test_missing_sub_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -223,8 +214,7 @@ def test_missing_sub_type(schema_file): def test_bad_sub_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -250,8 +240,7 @@ def test_bad_sub_type(schema_file): def test_bad_sub_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -279,8 +268,7 @@ def test_missing_required_field(schema_file): pytest.skip("required fields validation is not working yet") with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -305,8 +293,7 @@ def test_missing_required_field(schema_file): def test_multiple_validation_errors(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -314,8 +301,6 @@ def test_multiple_validation_errors(schema_file): assert "'identityFields' is a required property" in message @staticmethod - @pytest.mark.parametrize('validation_mode', - [ValidationMode.INFO, ValidationMode.WARNING]) @pytest.mark.parametrize('source_config_definition', [{ 'type': 'object', @@ -332,13 +317,10 @@ def test_multiple_validation_errors(schema_file): 'nameField': 'name', 'identityFields': ['name'] }]) - def test_bad_sub_type_info_warn_mode(schema_file, validation_mode): + def test_bad_sub_type_info_warn_mode(schema_file): err_info = None try: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - validation_mode) - validator.validate() + plugin_util.validate_schema_file(schema_file, False) except Exception as e: err_info = e @@ -378,8 +360,7 @@ def test_bad_sub_type_info_warn_mode(schema_file, validation_mode): def test_bad_type_in_array(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message From 8ccba459815bb0ac7a385931da33872a63bb56e7 Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Thu, 19 Dec 2019 07:13:14 -0800 Subject: [PATCH 06/25] PYT-1031 [Backport of Issue PYT-655] Cleanup plugin_validator and schema_validator modules Reviewed at: http://reviews.delphix.com/r/54844/ --- build.gradle | 2 +- libs/Pipfile.lock | 22 ++-- libs/lock.dev-requirements.txt | 8 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 20 ++-- platform/lock.dev-requirements.txt | 6 +- platform/lock.requirements.txt | 2 +- tools/Pipfile.lock | 105 +++++++++--------- tools/lock.dev-requirements.txt | 16 +-- tools/lock.requirements.txt | 6 +- .../_internal/commands/build.py | 20 ++-- .../_internal/commands/initialize.py | 5 +- .../_internal/plugin_importer.py | 22 ++-- .../virtualization/_internal/plugin_util.py | 52 ++++++--- .../_internal/plugin_validator.py | 72 ++++-------- .../_internal/schema_validator.py | 36 +----- .../virtualization/_internal/settings.cfg | 2 +- .../_internal/commands/test_build.py | 2 +- .../_internal/commands/test_initialize.py | 37 ++++-- .../_internal/test_package_util.py | 2 +- .../_internal/test_plugin_importer.py | 20 +--- .../_internal/test_plugin_validator.py | 77 ++++++------- .../_internal/test_schema_validator.py | 53 +++------ 23 files changed, 261 insertions(+), 328 deletions(-) diff --git a/build.gradle b/build.gradle index d484c87e..0146a1a3 100644 --- a/build.gradle +++ b/build.gradle @@ -8,7 +8,7 @@ plugins { } subprojects { - version = "1.1.0-internal-upgrade-004" + version = "1.1.0-internal-upgrade-005" } def binDir = "${rootProject.projectDir}/bin" diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index 5f0c6ef2..3c0fb470 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "5c8e2ff7197cb394e29f939d09c754f8cb20cf8b48efce7907989959c96ad869" + "sha256": "db763774d6ae530ab99cff6f933166d3d201eab1927161a9441cdad48db99a65" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz", - "version": "== 1.1.0-internal-upgrade-004" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz", + "version": "== 1.1.0-internal-upgrade-005" }, "protobuf": { "hashes": [ @@ -84,16 +84,16 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "importlib-metadata": { "hashes": [ - "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", - "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" + "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", + "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" ], "markers": "python_version < '3.8'", - "version": "==1.1.0" + "version": "==1.3.0" }, "mock": { "hashes": [ @@ -124,7 +124,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.6'", "version": "==2.3.5" }, "pluggy": { @@ -150,11 +150,11 @@ }, "pytest": { "hashes": [ - "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", - "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" + "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", + "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" ], "index": "delphix", - "version": "==4.6.6" + "version": "==4.6.7" }, "scandir": { "hashes": [ diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index abb51707..e52d3dd1 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -3,16 +3,16 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' -importlib-metadata==1.1.0 ; python_version < '3.8' +funcsigs==1.0.2 ; python_version < '3.0' +importlib-metadata==1.3.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 -pathlib2==2.3.5 ; python_version < '3' +pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.1 py==1.8.0 pyparsing==2.4.5 -pytest==4.6.6 +pytest==4.6.7 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 wcwidth==0.1.7 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index f6971b29..62dd0d32 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,4 +1,4 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz protobuf==3.6.1 six==1.13.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 09103a51..8335c2e6 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "c3c19d04feabea6b24dd0f817fb8263bfafdfec8b9c5a4b0d2086f1fb5c571e0" + "sha256": "91499a72181105c414d96700acae980dcaa952887ec4e4e9417f679c76c83c91" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz", - "version": "== 1.1.0-internal-upgrade-004" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz", + "version": "== 1.1.0-internal-upgrade-005" }, "enum34": { "hashes": [ @@ -95,16 +95,16 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "importlib-metadata": { "hashes": [ - "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", - "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" + "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", + "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" ], "markers": "python_version < '3.8'", - "version": "==1.1.0" + "version": "==1.3.0" }, "mock": { "hashes": [ @@ -161,11 +161,11 @@ }, "pytest": { "hashes": [ - "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", - "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" + "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", + "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" ], "index": "delphix", - "version": "==4.6.6" + "version": "==4.6.7" }, "scandir": { "hashes": [ diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index 97d78fb9..e65a7a1b 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -3,8 +3,8 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' -importlib-metadata==1.1.0 ; python_version < '3.8' +funcsigs==1.0.2 ; python_version < '3.3' +importlib-metadata==1.3.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 @@ -12,7 +12,7 @@ pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.1 py==1.8.0 pyparsing==2.4.5 -pytest==4.6.6 +pytest==4.6.7 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 wcwidth==0.1.7 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 2bae3f9f..0c6004db 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 six==1.13.0 diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index 8226a11d..4c83fc13 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "aee49e3288687d35ae9f52a95793d6db11da79e4e65a1dca2986c1248e912457" + "sha256": "22e7e3329355eeacfe7f924851cc185ee70c5c2d3017ce51b7f09ef468cc4328" }, "pipfile-spec": 6, "requires": {}, @@ -56,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version == '2.7'", + "markers": "python_version < '3'", "version": "==4.0.2" }, "contextlib2": { @@ -98,7 +98,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "idna": { @@ -110,11 +110,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", - "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" + "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", + "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" ], "markers": "python_version < '3.8'", - "version": "==1.1.0" + "version": "==1.3.0" }, "jinja2": { "hashes": [ @@ -331,7 +331,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version == '2.7'", + "markers": "python_version < '3'", "version": "==4.0.2" }, "contextlib2": { @@ -344,53 +344,52 @@ }, "coverage": { "hashes": [ - "sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6", - "sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650", - "sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5", - "sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d", - "sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351", - "sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755", - "sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef", - "sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca", - "sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca", - "sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9", - "sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc", - "sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5", - "sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f", - "sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe", - "sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888", - "sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5", - "sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce", - "sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5", - "sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e", - "sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e", - "sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9", - "sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437", - "sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1", - "sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c", - "sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24", - "sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47", - "sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2", - "sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28", - "sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c", - "sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7", - "sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0", - "sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025" + "sha256:0cd13a6e98c37b510a2d34c8281d5e1a226aaf9b65b7d770ef03c63169965351", + "sha256:1a4b6b6a2a3a6612e6361130c2cc3dc4378d8c221752b96167ccbad94b47f3cd", + "sha256:2ee55e6dba516ddf6f484aa83ccabbb0adf45a18892204c23486938d12258cde", + "sha256:3be5338a2eb4ef03c57f20917e1d12a1fd10e3853fed060b6d6b677cb3745898", + "sha256:44b783b02db03c4777d8cf71bae19eadc171a6f2a96777d916b2c30a1eb3d070", + "sha256:475bf7c4252af0a56e1abba9606f1e54127cdf122063095c75ab04f6f99cf45e", + "sha256:47c81ee687eafc2f1db7f03fbe99aab81330565ebc62fb3b61edfc2216a550c8", + "sha256:4a7f8e72b18f2aca288ff02255ce32cc830bc04d993efbc87abf6beddc9e56c0", + "sha256:50197163a22fd17f79086e087a787883b3ec9280a509807daf158dfc2a7ded02", + "sha256:56b13000acf891f700f5067512b804d1ec8c301d627486c678b903859d07f798", + "sha256:79388ae29c896299b3567965dbcd93255f175c17c6c7bca38614d12718c47466", + "sha256:79fd5d3d62238c4f583b75d48d53cdae759fe04d4fb18fe8b371d88ad2b6f8be", + "sha256:7fe3e2fde2bf1d7ce25ebcd2d3de3650b8d60d9a73ce6dcef36e20191291613d", + "sha256:81042a24f67b96e4287774014fa27220d8a4d91af1043389e4d73892efc89ac6", + "sha256:81326f1095c53111f8afc95da281e1414185f4a538609a77ca50bdfa39a6c207", + "sha256:8873dc0d8f42142ea9f20c27bbdc485190fff93823c6795be661703369e5877d", + "sha256:88d2cbcb0a112f47eef71eb95460b6995da18e6f8ca50c264585abc2c473154b", + "sha256:91f2491aeab9599956c45a77c5666d323efdec790bfe23fcceafcd91105d585a", + "sha256:979daa8655ae5a51e8e7a24e7d34e250ae8309fd9719490df92cbb2fe2b0422b", + "sha256:9c871b006c878a890c6e44a5b2f3c6291335324b298c904dc0402ee92ee1f0be", + "sha256:a6d092545e5af53e960465f652e00efbf5357adad177b2630d63978d85e46a72", + "sha256:b5ed7837b923d1d71c4f587ae1539ccd96bfd6be9788f507dbe94dab5febbb5d", + "sha256:ba259f68250f16d2444cbbfaddaa0bb20e1560a4fdaad50bece25c199e6af864", + "sha256:be1d89614c6b6c36d7578496dc8625123bda2ff44f224cf8b1c45b810ee7383f", + "sha256:c1b030a79749aa8d1f1486885040114ee56933b15ccfc90049ba266e4aa2139f", + "sha256:c95bb147fab76f2ecde332d972d8f4138b8f2daee6c466af4ff3b4f29bd4c19e", + "sha256:d52c1c2d7e856cecc05aa0526453cb14574f821b7f413cc279b9514750d795c1", + "sha256:d609a6d564ad3d327e9509846c2c47f170456344521462b469e5cb39e48ba31c", + "sha256:e1bad043c12fb58e8c7d92b3d7f2f49977dcb80a08a6d1e7a5114a11bf819fca", + "sha256:e5a675f6829c53c87d79117a8eb656cc4a5f8918185a32fc93ba09778e90f6db", + "sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c" ], "index": "delphix", - "version": "==4.5.4" + "version": "==5.0" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz", - "version": "== 1.1.0-internal-upgrade-004" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz", + "version": "== 1.1.0-internal-upgrade-005" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-004.tar.gz", - "version": "== 1.1.0-internal-upgrade-004" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-005.tar.gz", + "version": "== 1.1.0-internal-upgrade-005" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-004.tar.gz", - "version": "== 1.1.0-internal-upgrade-004" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-005.tar.gz", + "version": "== 1.1.0-internal-upgrade-005" }, "entrypoints": { "hashes": [ @@ -431,7 +430,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "futures": { @@ -451,11 +450,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", - "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" + "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", + "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" ], "markers": "python_version < '3.8'", - "version": "==1.1.0" + "version": "==1.3.0" }, "isort": { "hashes": [ @@ -540,11 +539,11 @@ }, "pytest": { "hashes": [ - "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", - "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" + "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", + "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" ], "index": "delphix", - "version": "==4.6.6" + "version": "==4.6.7" }, "pytest-cov": { "hashes": [ diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index c374c676..1372f14e 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,21 +1,21 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-004.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-004.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-005.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-005.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version == '2.7' +configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -coverage==4.5.4 +coverage==5.0 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 funcsigs==1.0.2 ; python_version < '3.3' -functools32==3.2.3.post2 ; python_version < '3.2' +functools32==3.2.3.post2 ; python_version < '3' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 -importlib-metadata==1.1.0 ; python_version < '3.8' +importlib-metadata==1.3.0 ; python_version < '3.8' isort==4.3.21 mccabe==0.6.1 mock==3.0.5 @@ -28,7 +28,7 @@ pycodestyle==2.5.0 pyflakes==2.1.1 pyparsing==2.4.5 pytest-cov==2.8.1 -pytest==4.6.6 +pytest==4.6.7 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 typing==3.7.4.1 ; python_version < '3.5' diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 70e4c53f..2cef9f26 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -4,14 +4,14 @@ certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version == '2.7' +configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3.2' +functools32==3.2.3.post2 ; python_version < '3' idna==2.8 -importlib-metadata==1.1.0 ; python_version < '3.8' +importlib-metadata==1.3.0 ; python_version < '3.8' jinja2==2.10.3 jsonschema==3.2.0 markupsafe==1.1.1 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py index 27085f79..87aec7f8 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py @@ -13,8 +13,7 @@ from dlpx.virtualization._internal import (codegen, exceptions, file_util, package_util, - plugin_dependency_util, plugin_util, - util_classes) + plugin_dependency_util, plugin_util) logger = logging.getLogger(__name__) @@ -54,10 +53,10 @@ def build(plugin_config, local_vsdk_root = os.path.expanduser(local_vsdk_root) # Read content of the plugin config file provided and perform validations - logger.info('Reading and validating plugin config file %s', plugin_config) + logger.info('Validating plugin config file %s', plugin_config) try: - result = plugin_util.read_and_validate_plugin_config_file( - plugin_config, not generate_only, False, skip_id_validation) + result = plugin_util.validate_plugin_config_file( + plugin_config, not generate_only, skip_id_validation) except exceptions.UserError as err: raise exceptions.BuildFailedError(err) @@ -69,11 +68,11 @@ def build(plugin_config, plugin_config, plugin_config_content['schemaFile']) # Read schemas from the file provided in the config and validate them - logger.info('Reading and validating schemas from %s', schema_file) + logger.info('Validating schemas from %s', schema_file) try: - result = plugin_util.read_and_validate_schema_file( - schema_file, not generate_only) + result = plugin_util.validate_schema_file(schema_file, + not generate_only) except exceptions.UserError as err: raise exceptions.BuildFailedError(err) @@ -119,11 +118,6 @@ def build(plugin_config, plugin_manifest = {} if result: plugin_manifest = result.plugin_manifest - if result.warnings: - warning_msg = util_classes.MessageUtils.warning_msg( - result.warnings) - logger.warn('{}\n{} Warning(s). {} Error(s).'.format( - warning_msg, len(result.warnings['warning']), 0)) # # Setup a build directory for the plugin in its root. Dependencies are diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py index 459dd3a4..9e805c8b 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py @@ -103,9 +103,8 @@ def init(root, ingestion_strategy, name, host_type): logger.info('Writing schema file at %s.', schema_file_path) shutil.copyfile(SCHEMA_TEMPLATE_PATH, schema_file_path) - # Read and valida the schema file - result = plugin_util.read_and_validate_schema_file( - schema_file_path, False) + # Validate the schema file. + result = plugin_util.validate_schema_file(schema_file_path, False) # Generate the definitions based on the schema file codegen.generate_python(name, src_dir_path, diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index e55d42a0..f684ee36 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -63,9 +63,13 @@ def import_plugin(self): Imports the plugin module, does basic validation. Returns: plugin manifest - dict describing methods implemented in the plugin - Note: - warnings - dict containing a list of errors or warnings can be - obtained by the caller via warnings property. + NOTE: + Importing module in the current context pollutes the runtime of + the caller, in this case dvp. If the module being imported, for + e.g. contains code that adds a handler to the root logger at + import time, this can cause issues with logging in this code and + callers of validator. To avoid such issues, perform the import in + in a sub-process and on completion return the output. """ logger.debug('Importing plugin module : %s', self.__plugin_module) @@ -73,25 +77,25 @@ def import_plugin(self): plugin_manifest, warnings = self.__import_plugin() self.__post_import_checks(plugin_manifest, warnings) - return plugin_manifest, warnings + return plugin_manifest def __pre_import_checks(self): """ Performs checks of the plugin code that should take place prior to importing. """ - warnings = PluginImporter.__check_for_undefined_names(self.__src_dir) - PluginImporter.__report_warnings_and_exceptions(warnings) + warnings = self.__check_for_undefined_names(self.__src_dir) + self.__report_warnings_and_exceptions(warnings) def __import_plugin(self): """ - Imports the module to check for errors or issues. Also does an eval on - the entry point. + Imports the module in a sub-process to check for errors or issues. + Also does an eval on the entry point. """ plugin_manifest = {} warnings = defaultdict(list) try: - plugin_manifest, warnings = (PluginImporter.__import_in_subprocess( + plugin_manifest, warnings = (self.__import_in_subprocess( self.__src_dir, self.__plugin_module, self.__plugin_entry_point, self.__plugin_type, self.__validate)) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py index 8ef97776..98c648cc 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py @@ -4,6 +4,7 @@ import logging import os +from contextlib import contextmanager from dlpx.virtualization._internal import exceptions, util_classes from dlpx.virtualization._internal.plugin_validator import PluginValidator @@ -13,10 +14,24 @@ logger = logging.getLogger(__name__) -def read_and_validate_plugin_config_file(plugin_config, - stop_build, - run_all_validations, - skip_id_validation=False): +@contextmanager +def validate_error_handler(plugin_file, validation_mode): + try: + yield + except Exception as e: + if validation_mode is ValidationMode.INFO: + logger.info('Validation failed on plugin file %s : %s', + plugin_file, e) + elif validation_mode is ValidationMode.WARNING: + logger.warning('Validation failed on plugin file %s : %s', + plugin_file, e) + else: + raise e + + +def validate_plugin_config_file(plugin_config, + stop_build, + skip_id_validation=False): """ Reads a plugin config file and validates the contents using a pre-defined schema. If stop_build is True, will report exception @@ -30,9 +45,11 @@ def read_and_validate_plugin_config_file(plugin_config, plugin_config_schema_file = ( util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) - validator = PluginValidator(plugin_config, plugin_config_schema_file, - validation_mode, run_all_validations) - validator.validate() + validator = PluginValidator(plugin_config, plugin_config_schema_file) + + with validate_error_handler(plugin_config, validation_mode): + validator.validate_plugin_config() + return validator.result @@ -53,13 +70,15 @@ def get_plugin_manifest(plugin_config_file, if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) validator = PluginValidator.from_config_content(plugin_config_file, plugin_config_content, - plugin_config_schema_file, - validation_mode) - validator.validate() + plugin_config_schema_file) + + with validate_error_handler(plugin_config_file, validation_mode): + validator.validate_plugin_module() + return validator.result -def read_and_validate_schema_file(schema_file, stop_build): +def validate_schema_file(schema_file, stop_build): """ Reads a plugin schema file and validates the contents using a pre-defined schema. If stop_build is True, will report exception @@ -69,9 +88,11 @@ def read_and_validate_schema_file(schema_file, stop_build): """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - validation_mode) - validator.validate() + validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + + with validate_error_handler(schema_file, validation_mode): + validator.validate() + return validator.result @@ -79,8 +100,7 @@ def get_plugin_config_property(plugin_config_path, prop): """ Returns the value for a specific property from the plugin config file. """ - result = read_and_validate_plugin_config_file(plugin_config_path, False, - False) + result = validate_plugin_config_file(plugin_config_path, False, False) return result.plugin_config_content[prop] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py index 09c0e0b2..e4a1d572 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py @@ -5,19 +5,17 @@ import json import logging import os -from collections import defaultdict, namedtuple +from collections import namedtuple import yaml from dlpx.virtualization._internal import (exceptions, file_util, plugin_importer) -from dlpx.virtualization._internal.util_classes import ValidationMode from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple( - 'validation_result', - ['plugin_config_content', 'plugin_manifest', 'warnings']) +validation_result = namedtuple('validation_result', + ['plugin_config_content', 'plugin_manifest']) class PluginValidator: @@ -29,33 +27,26 @@ class PluginValidator: config, content of the python module specified in in the pluginEntryPoint and also name of the plugin entry point in the module. If validation fails or has issues - will report exception - back if validation mode is error, otherwise warnings or info based - on validation mode. + back. """ def __init__(self, plugin_config, plugin_config_schema, - validation_mode, - run_all_validations, plugin_config_content=None): self.__plugin_config = plugin_config self.__plugin_config_schema = plugin_config_schema - self.__validation_mode = validation_mode - self.__run_all_validations = run_all_validations self.__plugin_config_content = plugin_config_content self.__plugin_manifest = None - self.__warnings = defaultdict(list) @property def result(self): return validation_result( plugin_config_content=self.__plugin_config_content, - plugin_manifest=self.__plugin_manifest, - warnings=self.__warnings) + plugin_manifest=self.__plugin_manifest) @classmethod def from_config_content(cls, plugin_config_file, plugin_config_content, - plugin_config_schema, validation_mode): + plugin_config_schema): """ Instantiates the validator with given plugin config content. plugin_config_file path is not read but used to get the absolute @@ -63,26 +54,10 @@ def from_config_content(cls, plugin_config_file, plugin_config_content, Returns: PluginValidator """ - return cls(plugin_config_file, plugin_config_schema, validation_mode, - True, plugin_config_content) + return cls(plugin_config_file, plugin_config_schema, + plugin_config_content) - def validate(self): - """ - Validates the plugin config file. - """ - logger.debug('Run config validations') - try: - self.__run_validations() - except Exception as e: - if self.__validation_mode is ValidationMode.INFO: - logger.info('Validation failed on plugin config file : %s', e) - elif self.__validation_mode is ValidationMode.WARNING: - logger.warning('Validation failed on plugin config file : %s', - e) - else: - raise e - - def __run_validations(self): + def validate_plugin_config(self): """ Reads a plugin config file and validates the contents using a pre-defined schema. If validation is successful, tries to import @@ -97,9 +72,12 @@ def __run_validations(self): self.__plugin_config_content) self.__validate_plugin_config_content() - if not self.__run_all_validations: - logger.debug('Plugin config file schema validation is done') - return + def validate_plugin_module(self): + """ + Tries to import the plugin module and validates the entry point + specified. + """ + self.validate_plugin_config() src_dir = file_util.get_src_dir_path( self.__plugin_config, self.__plugin_config_content['srcDir']) @@ -208,10 +186,8 @@ def __validate_plugin_entry_point(self, src_dir): plugin_type = self.__plugin_config_content['pluginType'] try: - self.__plugin_manifest, self.__warnings = ( - PluginValidator.__import_plugin(src_dir, entry_point_module, - entry_point_object, - plugin_type)) + self.__plugin_manifest = (self.__import_plugin( + src_dir, entry_point_module, entry_point_object, plugin_type)) except ImportError as err: raise exceptions.UserError( 'Unable to load module \'{}\' specified in ' @@ -225,18 +201,12 @@ def __validate_plugin_entry_point(self, src_dir): def __import_plugin(src_dir, entry_point_module, entry_point_object, plugin_type): """ - Imports the given python module. - NOTE: - Importing module in the current context pollutes the runtime of - the caller, in this case dvp. If the module being imported, for - e.g. contains code that adds a handler to the root logger at - import time, this can cause issues with logging in this code and - callers of validator. To avoid such issues, perform the import in - in a sub-process and on completion return the output. + Imports the given python module, does some validations ans returns the + manifest describing implemented plugin operations. """ importer = plugin_importer.PluginImporter(src_dir, entry_point_module, entry_point_object, plugin_type, True) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() - return manifest, warnings + return manifest diff --git a/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py index e66241c7..46354fce 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py @@ -5,16 +5,14 @@ import json import logging import os -from collections import defaultdict, namedtuple +from collections import namedtuple from dlpx.virtualization._internal import exceptions -from dlpx.virtualization._internal.util_classes import ValidationMode from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple('validation_result', - ['plugin_schemas', 'warnings']) +validation_result = namedtuple('validation_result', ['plugin_schemas']) class SchemaValidator: @@ -24,42 +22,18 @@ class SchemaValidator: Returns: On successful validation, callers can get the content of the plugin schemas. If validation fails or has issues - will report exception - back if validation mode is error, otherwise warnings or info based - on validation mode. + back. """ - def __init__(self, - schema_file, - plugin_meta_schema, - validation_mode, - schemas=None): + def __init__(self, schema_file, plugin_meta_schema, schemas=None): self.__schema_file = schema_file self.__plugin_meta_schema = plugin_meta_schema - self.__validation_mode = validation_mode self.__plugin_schemas = schemas - self.__warnings = defaultdict(list) @property def result(self): - return validation_result(plugin_schemas=self.__plugin_schemas, - warnings=self.__warnings) + return validation_result(plugin_schemas=self.__plugin_schemas) def validate(self): - """ - Validates the plugin schema file. - """ - logger.debug('Run schema validations') - try: - self.__run_validations() - except Exception as e: - if self.__validation_mode is ValidationMode.INFO: - logger.info('Validation failed on plugin schema file : %s', e) - elif self.__validation_mode is ValidationMode.WARNING: - logger.warning('Validation failed on plugin schema file : %s', - e) - else: - raise e - - def __run_validations(self): """ Reads a plugin schema file and validates the contents using a pre-defined schema. diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 2b6edb36..45c16bd5 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -26,7 +26,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-upgrade-004 +package_version = 1.1.0-internal-upgrade-005 virtualization_api_version = 1.1.0 distribution_name = dvp-tools package_author = Delphix diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 62d72c83..44c7f4db 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -59,7 +59,7 @@ def test_build_success(mock_relative_path, mock_install_deps, @pytest.mark.parametrize('artifact_filename', ['somefile.json']) @mock.patch.object(PluginValidator, '_PluginValidator__import_plugin', - return_value=({}, None)) + return_value={}) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') @mock.patch( 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index 11d6bb9c..e208108b 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -85,9 +85,9 @@ def test_init(tmpdir, ingestion_strategy, host_type, schema_template, init.init(tmpdir.strpath, ingestion_strategy, plugin_name, host_type) # Validate the config file is as we expect. - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content @@ -121,9 +121,9 @@ def test_init_without_plugin_name(tmpdir): init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, "", util_classes.UNIX_HOST_TYPE) - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content @@ -134,9 +134,9 @@ def test_init_without_plugin_name(tmpdir): def test_init_windows_plugin(tmpdir, plugin_name): init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, plugin_name, util_classes.WINDOWS_HOST_TYPE) - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content # Validate that the host type is WINDOWS @@ -157,10 +157,26 @@ def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, init.DEFAULT_PLUGIN_CONFIG_FILE) schema_file = os.path.join(tmpdir.strpath, init.DEFAULT_SCHEMA_FILE) validator = plugin_validator.PluginValidator(plugin_config_file, - schema_file, True, True) - validator.validate() + schema_file) + + # Assert config file and import validations are not done. + assert not validator.result.plugin_config_content + assert not validator.result.plugin_manifest + + validator.validate_plugin_config() + + # Assert config file is validated and import validation is not done. + assert validator.result.plugin_config_content + assert not validator.result.plugin_manifest - assert not validator.result.warnings + validator.validate_plugin_module() + + # + # Assert both config content and import validation are done and result + # tuple has both set to valid values. + # + assert validator.result.plugin_config_content + assert validator.result.plugin_manifest @staticmethod def test_invalid_with_config_file(plugin_config_file): @@ -203,8 +219,7 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, @staticmethod def test_default_schema_definition(schema_template): validator = schema_validator.SchemaValidator( - None, util_classes.PLUGIN_SCHEMA, - util_classes.ValidationMode.ERROR, schema_template) + None, util_classes.PLUGIN_SCHEMA, schema_template) validator.validate() # Validate the repository schema only has the 'name' property. diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 42777729..f535ed92 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -8,7 +8,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-upgrade-004' + assert package_util.get_version() == '1.1.0-internal-upgrade-005' @staticmethod def test_get_virtualization_api_version(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py index 104af635..4136e990 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py @@ -2,7 +2,6 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # import exceptions -from collections import defaultdict import mock import pytest @@ -18,9 +17,8 @@ def test_get_plugin_manifest(mock_import, src_dir, plugin_type, mock_import.return_value = plugin_module_content importer = PluginImporter(src_dir, entry_point_module, entry_point_object, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() - assert not warnings assert manifest == plugin_manifest @staticmethod @@ -30,15 +28,13 @@ def test_plugin_module_content_none(mock_import, src_dir, plugin_type, entry_point_object): mock_import.return_value = None manifest = {} - warnings = defaultdict(list) with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, entry_point_module, entry_point_object, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() message = str(err_info) - assert warnings.items() > 0 assert manifest == {} assert 'Plugin module content is None.' in message @@ -48,15 +44,13 @@ def test_plugin_entry_object_none(mock_import, src_dir, plugin_type, plugin_name, plugin_module_content): mock_import.return_value = plugin_module_content manifest = {} - warnings = defaultdict(list) with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, None, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() message = str(err_info) - assert warnings.items() > 0 assert manifest == {} assert 'Plugin entry point object is None.' in message @@ -68,15 +62,13 @@ def test_plugin_entry_point_nonexistent(mock_import, src_dir, plugin_type, entry_point_name = "nonexistent entry point" mock_import.return_value = plugin_module_content manifest = {} - warnings = defaultdict(list) with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, entry_point_name, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() message = err_info.value.message - assert warnings.items() > 0 assert manifest == {} assert ('\'{}\' is not a symbol in module'.format(entry_point_name) in message) @@ -90,15 +82,13 @@ def test_plugin_object_none(mock_import, src_dir, plugin_type, plugin_name, mock_import.return_value = plugin_module_content manifest = {} - warnings = defaultdict(list) with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, none_entry_point, plugin_type, False) - manifest, warnings = importer.import_plugin() + manifest = importer.import_plugin() message = err_info.value.message - assert warnings.items() > 0 assert manifest == {} assert ('Plugin object retrieved from the entry point {} is' ' None'.format(none_entry_point)) in message diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index a04b6de1..43c37915 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -7,9 +7,8 @@ import mock import pytest -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import exceptions, plugin_util, util_classes from dlpx.virtualization._internal.plugin_validator import PluginValidator -from dlpx.virtualization._internal.util_classes import ValidationMode @pytest.fixture @@ -30,9 +29,8 @@ def test_plugin_bad_schema(plugin_config_file, plugin_config_content, schema_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator.from_config_content( - plugin_config_file, plugin_config_content, schema_file, - ValidationMode.ERROR) - validator.validate() + plugin_config_file, plugin_config_content, schema_file) + validator.validate_plugin_config() message = err_info.value.message assert ('Failed to load schemas because {} is not a valid json file.' @@ -44,9 +42,8 @@ def test_plugin_bad_schema(plugin_config_file, plugin_config_content, def test_plugin_bad_config_file(plugin_config_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert message == ("Unable to read plugin config file '{}'" @@ -62,8 +59,8 @@ def test_plugin_valid_content(mock_import_plugin, src_dir, plugin_config_file, plugin_config_content): validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() mock_import_plugin.assert_called() @@ -73,8 +70,8 @@ def test_plugin_missing_field(plugin_config_file, plugin_config_content): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message @@ -93,8 +90,8 @@ def test_plugin_version_format(mock_import_plugin, src_dir, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: message = err_info.message @@ -119,8 +116,8 @@ def test_plugin_entry_point(mock_import_plugin, src_dir, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: message = err_info.message @@ -135,8 +132,8 @@ def test_plugin_additional_properties(src_dir, plugin_config_file, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert ("Additional properties are not allowed" @@ -150,8 +147,8 @@ def test_multiple_validation_errors(plugin_config_file, with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message assert "'xxx' is not one of ['UNIX', 'WINDOWS']" in message @@ -173,24 +170,19 @@ def test_plugin_id(mock_import_plugin, src_dir, plugin_config_file, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod - @pytest.mark.parametrize('validation_mode', - [ValidationMode.INFO, ValidationMode.WARNING]) - def test_plugin_info_warn_mode(plugin_config_file, plugin_config_content, - validation_mode): + def test_plugin_info_warn_mode(plugin_config_file, plugin_config_content): err_info = None try: - validator = PluginValidator.from_config_content( - plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, validation_mode) - validator.validate() + plugin_util.get_plugin_manifest(plugin_config_file, + plugin_config_content, False) except Exception as e: err_info = e @@ -206,9 +198,8 @@ def test_successful_validation(mock_file_util, plugin_config_file, mock_file_util.return_value = fake_src_dir validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() @staticmethod @pytest.mark.parametrize( @@ -233,9 +224,8 @@ def test_multiple_warnings(mock_file_util, plugin_config_file, with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() message = err_info.value.message for error in expected_errors: @@ -257,9 +247,8 @@ def test_upgrade_warnings(mock_file_util, plugin_config_file, fake_src_dir, with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() message = err_info.value.message for error in expected_errors: @@ -285,9 +274,8 @@ def test_wrapper_failures(mock_file_util, plugin_config_file, fake_src_dir, with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() message = err_info.value.message assert expected_error in message @@ -301,9 +289,8 @@ def test_sdk_error(mock_file_util, plugin_config_file, fake_src_dir): with pytest.raises(exceptions.SDKToolingError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + util_classes.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() message = err_info.value.message assert ('SDK Error: Got an arbitrary non-platforms error for testing.' diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py index 00be8fe4..99ea6761 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py @@ -6,9 +6,8 @@ import os import pytest -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import exceptions, plugin_util, util_classes from dlpx.virtualization._internal.schema_validator import SchemaValidator -from dlpx.virtualization._internal.util_classes import ValidationMode class TestSchemaValidator: @@ -18,7 +17,7 @@ def test_bad_meta_schema(schema_file, tmpdir, schema_filename): f = tmpdir.join(schema_filename) f.write(meta_schema) with pytest.raises(exceptions.UserError) as err_info: - validator = SchemaValidator(schema_file, f, ValidationMode.ERROR) + validator = SchemaValidator(schema_file, f) validator.validate() message = err_info.value.message @@ -31,8 +30,7 @@ def test_bad_schema_file(schema_file): os.remove(schema_file) with pytest.raises(exceptions.UserError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -42,8 +40,7 @@ def test_bad_schema_file(schema_file): @staticmethod def test_valid_schema(schema_file): - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -68,8 +65,7 @@ def test_missing_root_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -92,8 +88,7 @@ def test_missing_root_type(schema_file): def test_bad_root_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -119,8 +114,7 @@ def test_bad_root_type_num(schema_file): def test_bad_root_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -145,8 +139,7 @@ def test_bad_root_type(schema_file): def test_missing_identity_fields(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -171,8 +164,7 @@ def test_missing_identity_fields(schema_file): def test_missing_name_field(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -199,8 +191,7 @@ def test_missing_sub_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -223,8 +214,7 @@ def test_missing_sub_type(schema_file): def test_bad_sub_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -250,8 +240,7 @@ def test_bad_sub_type(schema_file): def test_bad_sub_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -279,8 +268,7 @@ def test_missing_required_field(schema_file): pytest.skip("required fields validation is not working yet") with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -305,8 +293,7 @@ def test_missing_required_field(schema_file): def test_multiple_validation_errors(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -314,8 +301,6 @@ def test_multiple_validation_errors(schema_file): assert "'identityFields' is a required property" in message @staticmethod - @pytest.mark.parametrize('validation_mode', - [ValidationMode.INFO, ValidationMode.WARNING]) @pytest.mark.parametrize('source_config_definition', [{ 'type': 'object', @@ -332,13 +317,10 @@ def test_multiple_validation_errors(schema_file): 'nameField': 'name', 'identityFields': ['name'] }]) - def test_bad_sub_type_info_warn_mode(schema_file, validation_mode): + def test_bad_sub_type_info_warn_mode(schema_file): err_info = None try: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - validation_mode) - validator.validate() + plugin_util.validate_schema_file(schema_file, False) except Exception as e: err_info = e @@ -378,8 +360,7 @@ def test_bad_sub_type_info_warn_mode(schema_file, validation_mode): def test_bad_type_in_array(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + util_classes.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message From 6e8eb99c67a168787a2914a0dc3965dba36c14dd Mon Sep 17 00:00:00 2001 From: murali Date: Wed, 8 Jan 2020 08:42:49 -0800 Subject: [PATCH 07/25] PYT-642 Cleanup util_classes.py and fix any circular dependencies in plugin validator/imported code Reviewed at: http://reviews.delphix.com/r/54864/ --- build.gradle | 2 +- libs/Pipfile.lock | 16 ++--- libs/lock.dev-requirements.txt | 6 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 12 ++-- platform/lock.dev-requirements.txt | 2 +- platform/lock.requirements.txt | 2 +- tools/Pipfile.lock | 30 ++++----- tools/lock.dev-requirements.txt | 12 ++-- tools/lock.requirements.txt | 6 +- .../dlpx/virtualization/_internal/cli.py | 18 +++--- .../dlpx/virtualization/_internal/codegen.py | 6 +- .../_internal/commands/initialize.py | 14 ++-- .../dlpx/virtualization/_internal/const.py | 21 ++++++ .../_internal/plugin_importer.py | 32 ++++++++-- .../virtualization/_internal/plugin_util.py | 32 +++++++--- .../virtualization/_internal/settings.cfg | 4 +- .../virtualization/_internal/util_classes.py | 63 ------------------ .../_internal/commands/test_build.py | 8 +-- .../_internal/commands/test_codegen.py | 9 ++- .../_internal/commands/test_initialize.py | 64 +++++++++---------- .../dlpx/virtualization/_internal/conftest.py | 7 +- .../dlpx/virtualization/_internal/test_cli.py | 47 ++++++-------- .../_internal/test_package_util.py | 4 +- .../_internal/test_plugin_validator.py | 24 +++---- .../_internal/test_schema_validator.py | 40 +++++------- 26 files changed, 222 insertions(+), 261 deletions(-) create mode 100644 tools/src/main/python/dlpx/virtualization/_internal/const.py delete mode 100644 tools/src/main/python/dlpx/virtualization/_internal/util_classes.py diff --git a/build.gradle b/build.gradle index cd8764fe..a4f17c4a 100644 --- a/build.gradle +++ b/build.gradle @@ -12,7 +12,7 @@ subprojects { * dvpApiVersion is the version of the Virtualization API that we want this version of the SDK to be built against. */ project.ext.dvpApiVersion = "1.1.0-master-003" - version = "1.1.0-internal-005" + version = "1.1.0-internal-006" } def binDir = "${rootProject.projectDir}/bin" diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index 141a20ed..ef4c8136 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "6ce0b104acdd7af1e177100807866ee7176d5da58f6feb55b3a2aa5bbc8c4c8c" + "sha256": "9fdae34cda2117051576372f70698033622d25c390672a83669f63d5ae206370" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz", - "version": "== 1.1.0-internal-005" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz", + "version": "== 1.1.0-internal-006" }, "protobuf": { "hashes": [ @@ -91,7 +91,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "importlib-metadata": { @@ -131,7 +131,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { @@ -157,11 +157,11 @@ }, "pytest": { "hashes": [ - "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", - "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" + "sha256:6192875be8af57b694b7c4904e909680102befcb99e610ef3d9f786952f795aa", + "sha256:f8447ebf8fd3d362868a5d3f43a9df786dfdfe9608843bd9002a2d47a104808f" ], "index": "delphix", - "version": "==4.6.7" + "version": "==4.6.8" }, "scandir": { "hashes": [ diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 4bb2799e..6ddfb04e 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -3,16 +3,16 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' +funcsigs==1.0.2 ; python_version < '3.3' importlib-metadata==1.3.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==19.2 -pathlib2==2.3.5 ; python_version < '3.6' +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.0 pyparsing==2.4.5 -pytest==4.6.7 +pytest==4.6.8 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 wcwidth==0.1.7 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 772c601f..83fb69f1 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz dvp-api==1.1.0-master-003 protobuf==3.6.1 six==1.13.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 702a4b73..067d5491 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "fc15781d03ed3f3d9e8ff4f588f90cab291c9899ef4339d82656e5d3aaa1aa68" + "sha256": "486b00d11f451beda7c61fbb8a4b65f6aa8aec9866dfd4f460fb2034bdaac806" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz", - "version": "== 1.1.0-internal-005" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz", + "version": "== 1.1.0-internal-006" }, "enum34": { "hashes": [ @@ -168,11 +168,11 @@ }, "pytest": { "hashes": [ - "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", - "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" + "sha256:6192875be8af57b694b7c4904e909680102befcb99e610ef3d9f786952f795aa", + "sha256:f8447ebf8fd3d362868a5d3f43a9df786dfdfe9608843bd9002a2d47a104808f" ], "index": "delphix", - "version": "==4.6.7" + "version": "==4.6.8" }, "scandir": { "hashes": [ diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index da55f105..6ddfb04e 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -12,7 +12,7 @@ pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.0 pyparsing==2.4.5 -pytest==4.6.7 +pytest==4.6.8 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 wcwidth==0.1.7 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 1032f9d4..16d5c83c 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz dvp-api==1.1.0-master-003 enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index 71bcea2a..f5c98751 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "68f9a4de5023d37cdcef8e799d69437a07b7ea17980210634090a5e146d21faa" + "sha256": "cc67692aa51c06b35e89165038a8e0789edc38dd91b0e60f0f932d631739931b" }, "pipfile-spec": 6, "requires": {}, @@ -56,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==4.0.2" }, "contextlib2": { @@ -68,8 +68,8 @@ "version": "==0.6.0.post1" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-005.tar.gz", - "version": "== 1.1.0-internal-005" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-006.tar.gz", + "version": "== 1.1.0-internal-006" }, "entrypoints": { "hashes": [ @@ -102,7 +102,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "idna": { @@ -313,7 +313,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==4.0.2" }, "contextlib2": { @@ -362,12 +362,12 @@ "version": "==5.0" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz", - "version": "== 1.1.0-internal-005" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz", + "version": "== 1.1.0-internal-006" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-005.tar.gz", - "version": "== 1.1.0-internal-005" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-006.tar.gz", + "version": "== 1.1.0-internal-006" }, "entrypoints": { "hashes": [ @@ -400,7 +400,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "functools32": { @@ -408,7 +408,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "futures": { @@ -517,11 +517,11 @@ }, "pytest": { "hashes": [ - "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", - "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" + "sha256:6192875be8af57b694b7c4904e909680102befcb99e610ef3d9f786952f795aa", + "sha256:f8447ebf8fd3d362868a5d3f43a9df786dfdfe9608843bd9002a2d47a104808f" ], "index": "delphix", - "version": "==4.6.7" + "version": "==4.6.8" }, "pytest-cov": { "hashes": [ diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index e9b41c27..3f7c8477 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,17 +1,17 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-005.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-005.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-006.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version < '3.2' +configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' coverage==5.0 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -funcsigs==1.0.2 ; python_version < '3.3' -functools32==3.2.3.post2 ; python_version < '3.2' +funcsigs==1.0.2 ; python_version < '3.0' +functools32==3.2.3.post2 ; python_version < '3' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 importlib-metadata==1.3.0 ; python_version < '3.8' @@ -27,7 +27,7 @@ pycodestyle==2.5.0 pyflakes==2.1.1 pyparsing==2.4.5 pytest-cov==2.8.1 -pytest==4.6.7 +pytest==4.6.8 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 typing==3.7.4.1 ; python_version < '3.5' diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 6dc11593..55df3adb 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,16 +1,16 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../platform/build/python-dist/dvp-platform-1.1.0-internal-005.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-006.tar.gz attrs==19.3.0 certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version < '3.2' +configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3.2' +functools32==3.2.3.post2 ; python_version < '3' idna==2.8 importlib-metadata==1.3.0 ; python_version < '3.8' jinja2==2.10.3 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/cli.py b/tools/src/main/python/dlpx/virtualization/_internal/cli.py index aef8caf9..a71c6343 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/cli.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/cli.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import logging @@ -9,9 +9,8 @@ from contextlib import contextmanager import click -from dlpx.virtualization._internal import (click_util, exceptions, - logging_util, package_util, - util_classes) +from dlpx.virtualization._internal import (click_util, const, exceptions, + logging_util, package_util) from dlpx.virtualization._internal.commands import build as build_internal from dlpx.virtualization._internal.commands import \ download_logs as download_logs_internal @@ -109,20 +108,19 @@ def delphix_sdk(verbose, quiet): @click.option( '-s', '--ingestion-strategy', - default=util_classes.DIRECT_TYPE, + default=const.DIRECT_TYPE, show_default=True, - type=click.Choice([util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE], + type=click.Choice([const.DIRECT_TYPE, const.STAGED_TYPE], case_sensitive=False), help=('Set the ingestion strategy of the plugin. A "direct" plugin ' 'ingests without a staging server while a "staged" plugin ' 'requires a staging server.')) @click.option('-t', '--host-type', - default=util_classes.UNIX_HOST_TYPE, + default=const.UNIX_HOST_TYPE, show_default=True, - type=click.Choice([ - util_classes.UNIX_HOST_TYPE, util_classes.WINDOWS_HOST_TYPE - ]), + type=click.Choice( + [const.UNIX_HOST_TYPE, const.WINDOWS_HOST_TYPE]), help='Set the host platform supported by the plugin.') def init(root, ingestion_strategy, name, host_type): """ diff --git a/tools/src/main/python/dlpx/virtualization/_internal/codegen.py b/tools/src/main/python/dlpx/virtualization/_internal/codegen.py index c8b11d0f..9eda39b7 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/codegen.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/codegen.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import copy @@ -10,7 +10,7 @@ import shutil import subprocess -from dlpx.virtualization._internal import exceptions, file_util, util_classes +from dlpx.virtualization._internal import const, exceptions, file_util logger = logging.getLogger(__name__) UNKNOWN_ERR = 'UNKNOWN_ERR' @@ -77,7 +77,7 @@ def generate_python(name, source_dir, plugin_config_dir, schema_content): # relevant to the plugin writer. We want to always force this to be # recreated. # - output_dir = os.path.join(plugin_config_dir, util_classes.OUTPUT_DIR_NAME) + output_dir = os.path.join(plugin_config_dir, const.OUTPUT_DIR_NAME) logger.info('Creating new output directory: {}'.format(output_dir)) file_util.make_dir(output_dir, True) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py index 9e805c8b..05b8eff4 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import logging @@ -10,8 +10,8 @@ import jinja2 import yaml -from dlpx.virtualization._internal import (codegen, exceptions, file_util, - plugin_util, util_classes) +from dlpx.virtualization._internal import (codegen, const, exceptions, + file_util, plugin_util) logger = logging.getLogger(__name__) @@ -163,15 +163,15 @@ def _get_entry_point_contents(plugin_name, ingestion_strategy, host_type): template = env.get_template(ENTRY_POINT_TEMPLATE_NAME) - if host_type == util_classes.WINDOWS_HOST_TYPE: + if host_type == const.WINDOWS_HOST_TYPE: default_mount_path = "C:\\\\tmp\\\\dlpx_staged_mounts\\\\{}" - elif host_type == util_classes.UNIX_HOST_TYPE: + elif host_type == const.UNIX_HOST_TYPE: default_mount_path = "/tmp/dlpx_staged_mounts/{}" - if ingestion_strategy == util_classes.DIRECT_TYPE: + if ingestion_strategy == const.DIRECT_TYPE: linked_operations = env.get_template( DIRECT_OPERATIONS_TEMPLATE_NAME).render() - elif ingestion_strategy == util_classes.STAGED_TYPE: + elif ingestion_strategy == const.STAGED_TYPE: linked_operations = env.get_template( STAGED_OPERATIONS_TEMPLATE_NAME).render( default_mount_path=default_mount_path) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/const.py b/tools/src/main/python/dlpx/virtualization/_internal/const.py new file mode 100644 index 00000000..2022af51 --- /dev/null +++ b/tools/src/main/python/dlpx/virtualization/_internal/const.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# + +import os + +UNIX_HOST_TYPE = 'UNIX' +WINDOWS_HOST_TYPE = 'WINDOWS' +STAGED_TYPE = 'STAGED' +DIRECT_TYPE = 'DIRECT' + +OUTPUT_DIR_NAME = '.dvp-gen-output' +PLUGIN_SCHEMAS_DIR = os.path.join(os.path.dirname(__file__), + 'validation_schemas') +PLUGIN_CONFIG_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, + 'plugin_config_schema.json') + +PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION = os.path.join( + PLUGIN_SCHEMAS_DIR, 'plugin_config_schema_no_id_validation.json') + +PLUGIN_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, 'plugin_schema.json') diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 37ca0250..540b0830 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import importlib import inspect @@ -10,17 +10,39 @@ from multiprocessing import Process, Queue import yaml -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE -from dlpx.virtualization._internal.util_classes import MessageUtils from flake8.api import legacy as flake8 logger = logging.getLogger(__name__) -PLUGIN_IMPORTER_YAML = os.path.join(util_classes.PLUGIN_SCHEMAS_DIR, +PLUGIN_IMPORTER_YAML = os.path.join(const.PLUGIN_SCHEMAS_DIR, 'plugin_importer.yaml') +class MessageUtils: + """ + Defines helpers methods to format warning and exception messages. + """ + @classmethod + def exception_msg(cls, exceptions): + exception_msg = '\n'.join( + cls.__format_msg('Error', ex) for ex in exceptions['exception']) + return exception_msg + + @classmethod + def warning_msg(cls, warnings): + warning_msg = '\n'.join( + cls.__format_msg('Warning', warning) + for warning in warnings['warning']) + return warning_msg + + @staticmethod + def __format_msg(msg_type, msg): + msg_str = "{}: {}".format(msg_type, msg) + return msg_str + + def load_validation_maps(): """ Reads a plugin config file and raises UserError if there is an issue @@ -393,7 +415,7 @@ def _check_args(method_name, expected_args, actual_args): def _lookup_expected_args(plugin_type, plugin_op_type, plugin_op_name): - if plugin_type == util_classes.DIRECT_TYPE: + if plugin_type == const.DIRECT_TYPE: return PluginImporter.expected_direct_args_by_op[plugin_op_type][ plugin_op_name] else: diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py index 98c648cc..06497cbd 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py @@ -1,19 +1,31 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # +import enum import logging import os from contextlib import contextmanager -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.plugin_validator import PluginValidator from dlpx.virtualization._internal.schema_validator import SchemaValidator -from dlpx.virtualization._internal.util_classes import ValidationMode logger = logging.getLogger(__name__) +class ValidationMode(enum.Enum): + """ + Defines the validation mode that validator uses. + INFO - validator will give out info messages if validation fails. + WARNING - validator will log a warning if validation fails. + ERROR - validator will raise an exception if validation fails. + """ + INFO = 1 + WARNING = 2 + ERROR = 3 + + @contextmanager def validate_error_handler(plugin_file, validation_mode): try: @@ -42,9 +54,9 @@ def validate_plugin_config_file(plugin_config, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = ( - util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) + plugin_config_schema_file = (const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION + if skip_id_validation else + const.PLUGIN_CONFIG_SCHEMA) validator = PluginValidator(plugin_config, plugin_config_schema_file) with validate_error_handler(plugin_config, validation_mode): @@ -65,9 +77,9 @@ def get_plugin_manifest(plugin_config_file, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = ( - util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) + plugin_config_schema_file = (const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION + if skip_id_validation else + const.PLUGIN_CONFIG_SCHEMA) validator = PluginValidator.from_config_content(plugin_config_file, plugin_config_content, plugin_config_schema_file) @@ -88,7 +100,7 @@ def validate_schema_file(schema_file, stop_build): """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) with validate_error_handler(schema_file, validation_mode): validator.validate() diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 23e7e676..e51ab54d 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # # @@ -21,7 +21,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-005 +package_version = 1.1.0-internal-006 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py b/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py deleted file mode 100644 index deb2da64..00000000 --- a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py +++ /dev/null @@ -1,63 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -import enum -import os - -UNIX_HOST_TYPE = 'UNIX' -WINDOWS_HOST_TYPE = 'WINDOWS' -STAGED_TYPE = 'STAGED' -DIRECT_TYPE = 'DIRECT' - -OUTPUT_DIR_NAME = '.dvp-gen-output' -PLUGIN_SCHEMAS_DIR = os.path.join(os.path.dirname(__file__), - 'validation_schemas') -PLUGIN_CONFIG_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, - 'plugin_config_schema.json') - -# -# This is a temporary file. Once blackbox has made the transition to 'id' -# instead of 'name' and uses UUIDs for the id, this, and everything -# associated with it can be removed. -# -PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION = os.path.join( - PLUGIN_SCHEMAS_DIR, 'plugin_config_schema_no_id_validation.json') - -PLUGIN_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, 'plugin_schema.json') - - -class ValidationMode(enum.Enum): - """ - Defines the validation mode that validator uses. - INFO - validator will give out info messages if validation fails. - WARNING - validator will log a warning if validation fails. - ERROR - validator will raise an exception if validation fails. - """ - INFO = 1 - WARNING = 2 - ERROR = 3 - - -class MessageUtils: - """ - Defines helpers methods to format warning and exception messages. - """ - @staticmethod - def exception_msg(exceptions): - exception_msg = '\n'.join( - MessageUtils.__format_msg('Error', ex) - for ex in exceptions['exception']) - return exception_msg - - @staticmethod - def warning_msg(warnings): - warning_msg = '\n'.join( - MessageUtils.__format_msg('Warning', warning) - for warning in warnings['warning']) - return warning_msg - - @staticmethod - def __format_msg(msg_type, msg): - msg_str = "{}: {}".format(msg_type, msg) - return msg_str diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 44c7f4db..8478827e 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json @@ -8,7 +8,7 @@ import mock import pytest import yaml -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.commands import build from dlpx.virtualization._internal.plugin_validator import PluginValidator @@ -627,10 +627,10 @@ def test_manual_discovery_parameter(plugin_config_content, src_dir, @staticmethod def test_plugin_config_schemas_diff(): - with open(util_classes.PLUGIN_CONFIG_SCHEMA) as f: + with open(const.PLUGIN_CONFIG_SCHEMA) as f: config_schema = json.load(f) - with open(util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION) as f: + with open(const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION) as f: config_schema_no_id = json.load(f) # Only the id's pattern should be different so remove it. diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py index 30c64df4..adca36ce 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import errno @@ -8,8 +8,7 @@ import subprocess import pytest -from dlpx.virtualization._internal import (codegen, exceptions, file_util, - util_classes) +from dlpx.virtualization._internal import codegen, const, exceptions, file_util class TestCodegen: @@ -139,7 +138,7 @@ def test_codegen_success(codegen_gen_py_inputs, popen_helper): assert popen_helper.package_name == codegen.CODEGEN_PACKAGE assert popen_helper.module_name == codegen.CODEGEN_MODULE expected_output_dir = os.path.join(gen_py.plugin_content_dir, - util_classes.OUTPUT_DIR_NAME) + const.OUTPUT_DIR_NAME) assert popen_helper.output_dir == expected_output_dir # Validate that the "generated" file were copied. @@ -158,7 +157,7 @@ def test_codegen_success(codegen_gen_py_inputs, popen_helper): @staticmethod def test_get_build_dir_success(tmpdir): - testdir = os.path.join(tmpdir.strpath, util_classes.OUTPUT_DIR_NAME) + testdir = os.path.join(tmpdir.strpath, const.OUTPUT_DIR_NAME) file_util.make_dir(testdir, True) assert os.path.exists(testdir) assert os.path.isdir(testdir) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index e208108b..633596e6 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import ast @@ -9,9 +9,8 @@ import jinja2 import mock import pytest -from dlpx.virtualization._internal import (exceptions, plugin_util, - plugin_validator, schema_validator, - util_classes) +from dlpx.virtualization._internal import (const, exceptions, plugin_util, + plugin_validator, schema_validator) from dlpx.virtualization._internal.commands import initialize as init @@ -48,14 +47,14 @@ def format_entry_point_template(entry_point_template): template = jinja2.Environment().from_string(entry_point_template) def format_template(plugin_name, ingestion_strategy, host_type): - if host_type == util_classes.WINDOWS_HOST_TYPE: + if host_type == const.WINDOWS_HOST_TYPE: default_mount_path = "C:\\\\tmp\\\\dlpx_staged_mounts\\\\{}" - elif host_type == util_classes.UNIX_HOST_TYPE: + elif host_type == const.UNIX_HOST_TYPE: default_mount_path = "/tmp/dlpx_staged_mounts/{}" - if ingestion_strategy == util_classes.DIRECT_TYPE: + if ingestion_strategy == const.DIRECT_TYPE: operations = direct_operations_template() - elif ingestion_strategy == util_classes.STAGED_TYPE: + elif ingestion_strategy == const.STAGED_TYPE: operations = jinja2.Environment().from_string( staged_operations_template()) operations = operations.render( @@ -73,12 +72,10 @@ def format_template(plugin_name, ingestion_strategy, host_type): class TestInitialize: @staticmethod - @pytest.mark.parametrize( - 'ingestion_strategy', - [util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE]) - @pytest.mark.parametrize( - 'host_type', - [util_classes.UNIX_HOST_TYPE, util_classes.WINDOWS_HOST_TYPE]) + @pytest.mark.parametrize('ingestion_strategy', + [const.DIRECT_TYPE, const.STAGED_TYPE]) + @pytest.mark.parametrize('host_type', + [const.UNIX_HOST_TYPE, const.WINDOWS_HOST_TYPE]) def test_init(tmpdir, ingestion_strategy, host_type, schema_template, plugin_name, format_entry_point_template): # Initialize an empty directory. @@ -118,8 +115,7 @@ def test_init(tmpdir, ingestion_strategy, host_type, schema_template, @staticmethod def test_init_without_plugin_name(tmpdir): - init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, "", - util_classes.UNIX_HOST_TYPE) + init.init(tmpdir.strpath, const.DIRECT_TYPE, "", const.UNIX_HOST_TYPE) result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), @@ -132,8 +128,8 @@ def test_init_without_plugin_name(tmpdir): @staticmethod def test_init_windows_plugin(tmpdir, plugin_name): - init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, plugin_name, - util_classes.WINDOWS_HOST_TYPE) + init.init(tmpdir.strpath, const.DIRECT_TYPE, plugin_name, + const.WINDOWS_HOST_TYPE) result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), True) @@ -142,16 +138,15 @@ def test_init_windows_plugin(tmpdir, plugin_name): # Validate that the host type is WINDOWS host_types = config['hostTypes'] assert len(host_types) == 1 - assert host_types[0] == util_classes.WINDOWS_HOST_TYPE + assert host_types[0] == const.WINDOWS_HOST_TYPE @staticmethod - @pytest.mark.parametrize( - 'ingestion_strategy', - [util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE]) + @pytest.mark.parametrize('ingestion_strategy', + [const.DIRECT_TYPE, const.STAGED_TYPE]) def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, plugin_name): init.init(tmpdir.strpath, ingestion_strategy, plugin_name, - util_classes.UNIX_HOST_TYPE) + const.UNIX_HOST_TYPE) plugin_config_file = os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE) @@ -181,21 +176,20 @@ def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, @staticmethod def test_invalid_with_config_file(plugin_config_file): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(plugin_config_file), - util_classes.DIRECT_TYPE, None, - util_classes.UNIX_HOST_TYPE) + init.init(os.path.dirname(plugin_config_file), const.DIRECT_TYPE, + None, const.UNIX_HOST_TYPE) @staticmethod def test_invalid_with_schema_file(schema_file): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(schema_file), util_classes.DIRECT_TYPE, - None, util_classes.UNIX_HOST_TYPE) + init.init(os.path.dirname(schema_file), const.DIRECT_TYPE, None, + const.UNIX_HOST_TYPE) @staticmethod def test_invalid_with_src_dir(src_dir): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(src_dir), util_classes.DIRECT_TYPE, None, - util_classes.UNIX_HOST_TYPE) + init.init(os.path.dirname(src_dir), const.DIRECT_TYPE, None, + const.UNIX_HOST_TYPE) @staticmethod @mock.patch('yaml.dump') @@ -204,8 +198,8 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, tmpdir, plugin_name): mock_yaml_dump.side_effect = RuntimeError() with pytest.raises(exceptions.UserError): - init.init(tmpdir.strpath, util_classes.STAGED_TYPE, plugin_name, - util_classes.UNIX_HOST_TYPE) + init.init(tmpdir.strpath, const.STAGED_TYPE, plugin_name, + const.UNIX_HOST_TYPE) src_dir_path = os.path.join(tmpdir.strpath, init.DEFAULT_SRC_DIRECTORY) config_file_path = os.path.join(tmpdir.strpath, @@ -218,8 +212,8 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, @staticmethod def test_default_schema_definition(schema_template): - validator = schema_validator.SchemaValidator( - None, util_classes.PLUGIN_SCHEMA, schema_template) + validator = schema_validator.SchemaValidator(None, const.PLUGIN_SCHEMA, + schema_template) validator.validate() # Validate the repository schema only has the 'name' property. @@ -254,7 +248,7 @@ def test_default_schema_definition(schema_template): @staticmethod def test_default_entry_point(plugin_id): entry_point_contents = init._get_entry_point_contents( - plugin_id, util_classes.DIRECT_TYPE, util_classes.UNIX_HOST_TYPE) + plugin_id, const.DIRECT_TYPE, const.UNIX_HOST_TYPE) tree = ast.parse(entry_point_contents) for stmt in ast.walk(tree): if isinstance(stmt, ast.Assign): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index 084c6dda..e2ee9c6d 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import configparser @@ -9,8 +9,7 @@ import pytest import yaml -from dlpx.virtualization._internal import (cli, click_util, package_util, - util_classes) +from dlpx.virtualization._internal import cli, click_util, const, package_util # # conftest.py is used to share fixtures among multiple tests files. pytest will @@ -320,7 +319,7 @@ def artifact_manual_discovery(): @pytest.fixture def plugin_type(): - return util_classes.DIRECT_TYPE + return const.DIRECT_TYPE @pytest.fixture diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py b/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py index 85123600..5afc609b 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import os @@ -8,7 +8,7 @@ import mock import pytest import yaml -from dlpx.virtualization._internal import cli, exceptions, util_classes +from dlpx.virtualization._internal import cli, const, exceptions class TestCli: @@ -106,10 +106,8 @@ def test_command_user_error(mock_init, plugin_name): assert result.output == 'codegen_error\n' # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.initialize.init') @@ -123,10 +121,8 @@ def test_command_non_user_error(mock_init, plugin_name): assert 'Internal error, please contact Delphix.\n' in result.output # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) class TestInitCli: @@ -140,26 +136,21 @@ def test_default_params(mock_init, plugin_name): assert result.exit_code == 0, 'Output: {}'.format(result.output) # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.initialize.init') def test_non_default_params(mock_init, plugin_name): runner = click_testing.CliRunner() - result = runner.invoke(cli.delphix_sdk, [ - 'init', '-s', util_classes.STAGED_TYPE, '-r', '.', '-n', - plugin_name - ]) + result = runner.invoke( + cli.delphix_sdk, + ['init', '-s', const.STAGED_TYPE, '-r', '.', '-n', plugin_name]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_init.assert_called_once_with(os.getcwd(), - util_classes.STAGED_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.STAGED_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod def test_invalid_ingestion_strategy(plugin_name): @@ -184,8 +175,8 @@ def test_multiple_host_types(): runner = click_testing.CliRunner() result = runner.invoke(cli.delphix_sdk, [ - 'init', '-t', '{},{}'.format(util_classes.UNIX_HOST_TYPE, - util_classes.WINDOWS_HOST_TYPE) + 'init', '-t', '{},{}'.format(const.UNIX_HOST_TYPE, + const.WINDOWS_HOST_TYPE) ]) assert result.exit_code != 0 @@ -198,12 +189,10 @@ def test_windows_host_type(mock_init, plugin_name): result = runner.invoke( cli.delphix_sdk, - ['init', '-n', plugin_name, '-t', util_classes.WINDOWS_HOST_TYPE]) + ['init', '-n', plugin_name, '-t', const.WINDOWS_HOST_TYPE]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.WINDOWS_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.WINDOWS_HOST_TYPE) @staticmethod def test_invalid_host_type(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 65cf614b..c35d4510 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import pytest @@ -9,7 +9,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-005' + assert package_util.get_version() == '1.1.0-internal-006' @staticmethod def test_get_virtualization_api_version(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index aa04f17d..afea9f1b 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json @@ -9,7 +9,7 @@ import mock import pytest -from dlpx.virtualization._internal import exceptions, plugin_util, util_classes +from dlpx.virtualization._internal import const, exceptions, plugin_util from dlpx.virtualization._internal.plugin_validator import PluginValidator @@ -53,7 +53,7 @@ def test_plugin_bad_schema(plugin_config_file, schema_file): def test_plugin_bad_config_file(plugin_config_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_config() message = err_info.value.message @@ -79,7 +79,7 @@ def test_plugin_valid_content(mock_import_plugin, mock_relative_path, validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() mock_import_plugin.assert_called() @@ -98,7 +98,7 @@ def test_plugin_missing_field(plugin_config_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message @@ -128,7 +128,7 @@ def test_plugin_version_format(mock_import_plugin, mock_path_is_relative, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: @@ -165,7 +165,7 @@ def test_plugin_entry_point(mock_import_plugin, mock_relative_path, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: @@ -187,7 +187,7 @@ def test_plugin_additional_properties(src_dir, plugin_config_file): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message @@ -208,7 +208,7 @@ def test_multiple_validation_errors(plugin_config_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message @@ -222,7 +222,7 @@ def test_staged_plugin(mock_file_util, fake_staged_plugin_config): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(fake_staged_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() message = err_info.value.message @@ -238,7 +238,7 @@ def test_direct_plugin(mock_file_util, fake_direct_plugin_config): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(fake_direct_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() message = err_info.value.message @@ -272,7 +272,7 @@ def test_plugin_id(mock_import_plugin, mock_relative_path, src_dir, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py index 99ea6761..2b064b57 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py @@ -1,12 +1,12 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json import os import pytest -from dlpx.virtualization._internal import exceptions, plugin_util, util_classes +from dlpx.virtualization._internal import const, exceptions, plugin_util from dlpx.virtualization._internal.schema_validator import SchemaValidator @@ -29,8 +29,7 @@ def test_bad_meta_schema(schema_file, tmpdir, schema_filename): def test_bad_schema_file(schema_file): os.remove(schema_file) with pytest.raises(exceptions.UserError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -40,7 +39,7 @@ def test_bad_schema_file(schema_file): @staticmethod def test_valid_schema(schema_file): - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -65,7 +64,7 @@ def test_missing_root_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -87,8 +86,7 @@ def test_missing_root_type(schema_file): }]) def test_bad_root_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -113,8 +111,7 @@ def test_bad_root_type_num(schema_file): }]) def test_bad_root_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -138,8 +135,7 @@ def test_bad_root_type(schema_file): }]) def test_missing_identity_fields(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -163,8 +159,7 @@ def test_missing_identity_fields(schema_file): }]) def test_missing_name_field(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -191,7 +186,7 @@ def test_missing_sub_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -213,8 +208,7 @@ def test_missing_sub_type(schema_file): }]) def test_bad_sub_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -239,8 +233,7 @@ def test_bad_sub_type(schema_file): }]) def test_bad_sub_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -267,8 +260,7 @@ def test_missing_required_field(schema_file): # pytest.skip("required fields validation is not working yet") with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -292,8 +284,7 @@ def test_missing_required_field(schema_file): }]) def test_multiple_validation_errors(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -359,8 +350,7 @@ def test_bad_sub_type_info_warn_mode(schema_file): }]) def test_bad_type_in_array(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message From 42292d3054cf69501c2d22a5d6b11c489f7bdb11 Mon Sep 17 00:00:00 2001 From: murali Date: Fri, 10 Jan 2020 15:11:20 -0800 Subject: [PATCH 08/25] PYT-1048 [Backport of Issue PYT-642 to plugin-upgrade branch] Cleanup util_classes.py and fix any circular dependencies in plugin validator/imported code Reviewed at: http://reviews.delphix.com/r/55097/ --- build.gradle | 2 +- dvp/Pipfile.lock | 54 +++---- dvp/lock.dev-requirements.txt | 12 +- dvp/lock.requirements.txt | 8 +- libs/Pipfile.lock | 40 ++--- libs/lock.dev-requirements.txt | 14 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 38 ++--- platform/lock.dev-requirements.txt | 12 +- platform/lock.requirements.txt | 2 +- tools/Pipfile.lock | 144 +++++++++--------- tools/lock.dev-requirements.txt | 22 +-- tools/lock.requirements.txt | 8 +- .../dlpx/virtualization/_internal/cli.py | 18 +-- .../dlpx/virtualization/_internal/codegen.py | 6 +- .../_internal/commands/initialize.py | 14 +- .../dlpx/virtualization/_internal/const.py | 21 +++ .../_internal/plugin_importer.py | 40 ++++- .../virtualization/_internal/plugin_util.py | 32 ++-- .../virtualization/_internal/settings.cfg | 4 +- .../virtualization/_internal/util_classes.py | 73 --------- .../_internal/commands/test_build.py | 8 +- .../_internal/commands/test_codegen.py | 9 +- .../_internal/commands/test_initialize.py | 64 ++++---- .../dlpx/virtualization/_internal/conftest.py | 7 +- .../dlpx/virtualization/_internal/test_cli.py | 47 +++--- .../_internal/test_package_util.py | 4 +- .../_internal/test_plugin_validator.py | 30 ++-- .../_internal/test_schema_validator.py | 40 ++--- 29 files changed, 367 insertions(+), 408 deletions(-) create mode 100644 tools/src/main/python/dlpx/virtualization/_internal/const.py delete mode 100644 tools/src/main/python/dlpx/virtualization/_internal/util_classes.py diff --git a/build.gradle b/build.gradle index 0146a1a3..014334e7 100644 --- a/build.gradle +++ b/build.gradle @@ -8,7 +8,7 @@ plugins { } subprojects { - version = "1.1.0-internal-upgrade-005" + version = "1.1.0-internal-upgrade-006" } def binDir = "${rootProject.projectDir}/bin" diff --git a/dvp/Pipfile.lock b/dvp/Pipfile.lock index 49c8d13b..8aeab25d 100644 --- a/dvp/Pipfile.lock +++ b/dvp/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "29cd8bea844d56f1e9296ac2e2d28a4eff66f99b5d4f9f4201d9b6290e6a8c8d" + "sha256": "a25c21cda1d50cf93c86f2b819a8ebae28b1c907b4d98f557f8c0f8663b69049" }, "pipfile-spec": 6, "requires": {}, @@ -15,20 +15,20 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz", - "version": "== 1.1.0-internal-upgrade-004" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz", + "version": "== 1.1.0-internal-006" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-004.tar.gz", - "version": "== 1.1.0-internal-upgrade-004" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-006.tar.gz", + "version": "== 1.1.0-internal-006" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-004.tar.gz", - "version": "== 1.1.0-internal-upgrade-004" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-006.tar.gz", + "version": "== 1.1.0-internal-006" }, "dvp-tools": { - "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-004.tar.gz", - "version": "== 1.1.0-internal-upgrade-004" + "path": "../tools/build/python-dist/dvp-tools-1.1.0-internal-006.tar.gz", + "version": "== 1.1.0-internal-006" } }, "develop": { @@ -72,11 +72,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21", - "sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742" + "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", + "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" ], "markers": "python_version < '3.8'", - "version": "==1.1.0" + "version": "==1.3.0" }, "more-itertools": { "hashes": [ @@ -89,10 +89,10 @@ }, "packaging": { "hashes": [ - "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", - "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" + "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", + "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" ], - "version": "==19.2" + "version": "==20.0" }, "pathlib2": { "hashes": [ @@ -111,25 +111,25 @@ }, "py": { "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" ], - "version": "==1.8.0" + "version": "==1.8.1" }, "pyparsing": { "hashes": [ - "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f", - "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a" + "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", + "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" ], - "version": "==2.4.5" + "version": "==2.4.6" }, "pytest": { "hashes": [ - "sha256:5d0d20a9a66e39b5845ab14f8989f3463a7aa973700e6cdf02db69da9821e738", - "sha256:692d9351353ef709c1126266579edd4fd469dcf6b5f4f583050f72161d6f3592" + "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", + "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], "index": "delphix", - "version": "==4.6.6" + "version": "==4.6.9" }, "scandir": { "hashes": [ @@ -157,10 +157,10 @@ }, "wcwidth": { "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" ], - "version": "==0.1.7" + "version": "==0.1.8" }, "zipp": { "hashes": [ diff --git a/dvp/lock.dev-requirements.txt b/dvp/lock.dev-requirements.txt index 7413fd8b..934529f4 100644 --- a/dvp/lock.dev-requirements.txt +++ b/dvp/lock.dev-requirements.txt @@ -4,15 +4,15 @@ attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.0' -importlib-metadata==1.1.0 ; python_version < '3.8' +importlib-metadata==1.3.0 ; python_version < '3.8' more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.2 +packaging==20.0 pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 -py==1.8.0 -pyparsing==2.4.5 -pytest==4.6.6 +py==1.8.1 +pyparsing==2.4.6 +pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.7 +wcwidth==0.1.8 zipp==0.6.0 diff --git a/dvp/lock.requirements.txt b/dvp/lock.requirements.txt index 84933c21..a3aa6671 100644 --- a/dvp/lock.requirements.txt +++ b/dvp/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-004.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-004.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-004.tar.gz -./../tools/build/python-dist/dvp-tools-1.1.0-internal-upgrade-004.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-006.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-006.tar.gz +./../tools/build/python-dist/dvp-tools-1.1.0-internal-006.tar.gz diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index 3c0fb470..6bf7224d 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "db763774d6ae530ab99cff6f933166d3d201eab1927161a9441cdad48db99a65" + "sha256": "1d74a651ce57525f8698ea70fbc7adcf9df8ec915f091b4b42b592d7ed5a2b5f" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz", - "version": "== 1.1.0-internal-upgrade-005" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz", + "version": "== 1.1.0-internal-upgrade-006" }, "protobuf": { "hashes": [ @@ -84,7 +84,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "importlib-metadata": { @@ -114,17 +114,17 @@ }, "packaging": { "hashes": [ - "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", - "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" + "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", + "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" ], - "version": "==19.2" + "version": "==20.0" }, "pathlib2": { "hashes": [ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { @@ -136,25 +136,25 @@ }, "py": { "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" ], - "version": "==1.8.0" + "version": "==1.8.1" }, "pyparsing": { "hashes": [ - "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f", - "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a" + "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", + "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" ], - "version": "==2.4.5" + "version": "==2.4.6" }, "pytest": { "hashes": [ - "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", - "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" + "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", + "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], "index": "delphix", - "version": "==4.6.7" + "version": "==4.6.9" }, "scandir": { "hashes": [ @@ -182,10 +182,10 @@ }, "wcwidth": { "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" ], - "version": "==0.1.7" + "version": "==0.1.8" }, "zipp": { "hashes": [ diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index e52d3dd1..97327492 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -3,17 +3,17 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' +funcsigs==1.0.2 ; python_version < '3.3' importlib-metadata==1.3.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.2 -pathlib2==2.3.5 ; python_version < '3.6' +packaging==20.0 +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 -py==1.8.0 -pyparsing==2.4.5 -pytest==4.6.7 +py==1.8.1 +pyparsing==2.4.6 +pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.7 +wcwidth==0.1.8 zipp==0.6.0 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 62dd0d32..94057d8b 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,4 +1,4 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz protobuf==3.6.1 six==1.13.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 8335c2e6..a93f32a3 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "91499a72181105c414d96700acae980dcaa952887ec4e4e9417f679c76c83c91" + "sha256": "d74f43c96bc68d83b527b9f2b93e47005e2e2d68a68ee4a2350dabf535defb04" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz", - "version": "== 1.1.0-internal-upgrade-005" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz", + "version": "== 1.1.0-internal-upgrade-006" }, "enum34": { "hashes": [ @@ -125,17 +125,17 @@ }, "packaging": { "hashes": [ - "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", - "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" + "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", + "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" ], - "version": "==19.2" + "version": "==20.0" }, "pathlib2": { "hashes": [ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { @@ -147,25 +147,25 @@ }, "py": { "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" ], - "version": "==1.8.0" + "version": "==1.8.1" }, "pyparsing": { "hashes": [ - "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f", - "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a" + "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", + "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" ], - "version": "==2.4.5" + "version": "==2.4.6" }, "pytest": { "hashes": [ - "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", - "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" + "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", + "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], "index": "delphix", - "version": "==4.6.7" + "version": "==4.6.9" }, "scandir": { "hashes": [ @@ -193,10 +193,10 @@ }, "wcwidth": { "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" ], - "version": "==0.1.7" + "version": "==0.1.8" }, "zipp": { "hashes": [ diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index e65a7a1b..97327492 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -7,13 +7,13 @@ funcsigs==1.0.2 ; python_version < '3.3' importlib-metadata==1.3.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.2 -pathlib2==2.3.5 ; python_version < '3.6' +packaging==20.0 +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 -py==1.8.0 -pyparsing==2.4.5 -pytest==4.6.7 +py==1.8.1 +pyparsing==2.4.6 +pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.7 +wcwidth==0.1.8 zipp==0.6.0 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 0c6004db..f3af65a9 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 six==1.13.0 diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index 4c83fc13..d4ce5ecc 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "22e7e3329355eeacfe7f924851cc185ee70c5c2d3017ce51b7f09ef468cc4328" + "sha256": "42064f94b54676d45d53d5dc8c364b5ff7cf3a53b98dd357be5e83f8df7ace5a" }, "pipfile-spec": 6, "requires": {}, @@ -56,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==4.0.2" }, "contextlib2": { @@ -98,7 +98,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "idna": { @@ -226,26 +226,26 @@ }, "pyrsistent": { "hashes": [ - "sha256:f3b280d030afb652f79d67c5586157c5c1355c9a58dfc7940566e28d28f3df1b" + "sha256:cdc7b5e3ed77bed61270a47d35434a30617b9becdf2478af76ad2c6ade307280" ], - "version": "==0.15.6" + "version": "==0.15.7" }, "pyyaml": { "hashes": [ - "sha256:0e7f69397d53155e55d10ff68fdfb2cf630a35e6daf65cf0bdeaf04f127c09dc", - "sha256:2e9f0b7c5914367b0916c3c104a024bb68f269a486b9d04a2e8ac6f6597b7803", - "sha256:35ace9b4147848cafac3db142795ee42deebe9d0dad885ce643928e88daebdcc", - "sha256:38a4f0d114101c58c0f3a88aeaa44d63efd588845c5a2df5290b73db8f246d15", - "sha256:483eb6a33b671408c8529106df3707270bfacb2447bf8ad856a4b4f57f6e3075", - "sha256:4b6be5edb9f6bb73680f5bf4ee08ff25416d1400fbd4535fe0069b2994da07cd", - "sha256:7f38e35c00e160db592091751d385cd7b3046d6d51f578b29943225178257b31", - "sha256:8100c896ecb361794d8bfdb9c11fce618c7cf83d624d73d5ab38aef3bc82d43f", - "sha256:c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c", - "sha256:e4c015484ff0ff197564917b4b4246ca03f411b9bd7f16e02a2f586eb48b6d04", - "sha256:ebc4ed52dcc93eeebeae5cf5deb2ae4347b3a81c3fa12b0b8c976544829396a4" + "sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6", + "sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf", + "sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5", + "sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e", + "sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811", + "sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e", + "sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d", + "sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20", + "sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689", + "sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994", + "sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615" ], "index": "delphix", - "version": "==5.2" + "version": "==5.3" }, "requests": { "hashes": [ @@ -331,7 +331,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==4.0.2" }, "contextlib2": { @@ -344,52 +344,52 @@ }, "coverage": { "hashes": [ - "sha256:0cd13a6e98c37b510a2d34c8281d5e1a226aaf9b65b7d770ef03c63169965351", - "sha256:1a4b6b6a2a3a6612e6361130c2cc3dc4378d8c221752b96167ccbad94b47f3cd", - "sha256:2ee55e6dba516ddf6f484aa83ccabbb0adf45a18892204c23486938d12258cde", - "sha256:3be5338a2eb4ef03c57f20917e1d12a1fd10e3853fed060b6d6b677cb3745898", - "sha256:44b783b02db03c4777d8cf71bae19eadc171a6f2a96777d916b2c30a1eb3d070", - "sha256:475bf7c4252af0a56e1abba9606f1e54127cdf122063095c75ab04f6f99cf45e", - "sha256:47c81ee687eafc2f1db7f03fbe99aab81330565ebc62fb3b61edfc2216a550c8", - "sha256:4a7f8e72b18f2aca288ff02255ce32cc830bc04d993efbc87abf6beddc9e56c0", - "sha256:50197163a22fd17f79086e087a787883b3ec9280a509807daf158dfc2a7ded02", - "sha256:56b13000acf891f700f5067512b804d1ec8c301d627486c678b903859d07f798", - "sha256:79388ae29c896299b3567965dbcd93255f175c17c6c7bca38614d12718c47466", - "sha256:79fd5d3d62238c4f583b75d48d53cdae759fe04d4fb18fe8b371d88ad2b6f8be", - "sha256:7fe3e2fde2bf1d7ce25ebcd2d3de3650b8d60d9a73ce6dcef36e20191291613d", - "sha256:81042a24f67b96e4287774014fa27220d8a4d91af1043389e4d73892efc89ac6", - "sha256:81326f1095c53111f8afc95da281e1414185f4a538609a77ca50bdfa39a6c207", - "sha256:8873dc0d8f42142ea9f20c27bbdc485190fff93823c6795be661703369e5877d", - "sha256:88d2cbcb0a112f47eef71eb95460b6995da18e6f8ca50c264585abc2c473154b", - "sha256:91f2491aeab9599956c45a77c5666d323efdec790bfe23fcceafcd91105d585a", - "sha256:979daa8655ae5a51e8e7a24e7d34e250ae8309fd9719490df92cbb2fe2b0422b", - "sha256:9c871b006c878a890c6e44a5b2f3c6291335324b298c904dc0402ee92ee1f0be", - "sha256:a6d092545e5af53e960465f652e00efbf5357adad177b2630d63978d85e46a72", - "sha256:b5ed7837b923d1d71c4f587ae1539ccd96bfd6be9788f507dbe94dab5febbb5d", - "sha256:ba259f68250f16d2444cbbfaddaa0bb20e1560a4fdaad50bece25c199e6af864", - "sha256:be1d89614c6b6c36d7578496dc8625123bda2ff44f224cf8b1c45b810ee7383f", - "sha256:c1b030a79749aa8d1f1486885040114ee56933b15ccfc90049ba266e4aa2139f", - "sha256:c95bb147fab76f2ecde332d972d8f4138b8f2daee6c466af4ff3b4f29bd4c19e", - "sha256:d52c1c2d7e856cecc05aa0526453cb14574f821b7f413cc279b9514750d795c1", - "sha256:d609a6d564ad3d327e9509846c2c47f170456344521462b469e5cb39e48ba31c", - "sha256:e1bad043c12fb58e8c7d92b3d7f2f49977dcb80a08a6d1e7a5114a11bf819fca", - "sha256:e5a675f6829c53c87d79117a8eb656cc4a5f8918185a32fc93ba09778e90f6db", - "sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c" + "sha256:189aac76d6e0d7af15572c51892e7326ee451c076c5a50a9d266406cd6c49708", + "sha256:1bf7ba2af1d373a1750888724f84cffdfc697738f29a353c98195f98fc011509", + "sha256:1f4ee8e2e4243971618bc16fcc4478317405205f135e95226c2496e2a3b8dbbf", + "sha256:225e79a5d485bc1642cb7ba02281419c633c216cdc6b26c26494ba959f09e69f", + "sha256:23688ff75adfa8bfa2a67254d889f9bdf9302c27241d746e17547c42c732d3f4", + "sha256:28f7f73b34a05e23758e860a89a7f649b85c6749e252eff60ebb05532d180e86", + "sha256:2d0cb9b1fe6ad0d915d45ad3d87f03a38e979093a98597e755930db1f897afae", + "sha256:47874b4711c5aeb295c31b228a758ce3d096be83dc37bd56da48ed99efb8813b", + "sha256:511ec0c00840e12fb4e852e4db58fa6a01ca4da72f36a9766fae344c3d502033", + "sha256:53e7438fef0c97bc248f88ba1edd10268cd94d5609970aaf87abbe493691af87", + "sha256:569f9ee3025682afda6e9b0f5bb14897c0db03f1a1dc088b083dd36e743f92bb", + "sha256:593853aa1ac6dcc6405324d877544c596c9d948ef20d2e9512a0f5d2d3202356", + "sha256:5b0a07158360d22492f9abd02a0f2ee7981b33f0646bf796598b7673f6bbab14", + "sha256:7ca3db38a61f3655a2613ee2c190d63639215a7a736d3c64cc7bbdb002ce6310", + "sha256:7d1cc7acc9ce55179616cf72154f9e648136ea55987edf84addbcd9886ffeba2", + "sha256:88b51153657612aea68fa684a5b88037597925260392b7bb4509d4f9b0bdd889", + "sha256:955ec084f549128fa2702f0b2dc696392001d986b71acd8fd47424f28289a9c3", + "sha256:b251c7092cbb6d789d62dc9c9e7c4fb448c9138b51285c36aeb72462cad3600e", + "sha256:bd82b684bb498c60ef47bb1541a50e6d006dde8579934dcbdbc61d67d1ea70d9", + "sha256:bfe102659e2ec13b86c7f3b1db6c9a4e7beea4255058d006351339e6b342d5d2", + "sha256:c1e4e39e43057396a5e9d069bfbb6ffeee892e40c5d2effbd8cd71f34ee66c4d", + "sha256:cb2b74c123f65e8166f7e1265829a6c8ed755c3cd16d7f50e75a83456a5f3fd7", + "sha256:cca38ded59105f7705ef6ffe1e960b8db6c7d8279c1e71654a4775ab4454ca15", + "sha256:cf908840896f7aa62d0ec693beb53264b154f972eb8226fb864ac38975590c4f", + "sha256:d095a7b473f8a95f7efe821f92058c8a2ecfb18f8db6677ae3819e15dc11aaae", + "sha256:d22b4297e7e4225ccf01f1aa55e7a96412ea0796b532dd614c3fcbafa341128e", + "sha256:d4a2b578a7a70e0c71f662705262f87a456f1e6c1e40ada7ea699abaf070a76d", + "sha256:ddeb42a3d5419434742bf4cc71c9eaa22df3b76808e23a82bd0b0bd360f1a9f1", + "sha256:e65a5aa1670db6263f19fdc03daee1d7dbbadb5cb67fd0a1f16033659db13c1d", + "sha256:eaad65bd20955131bcdb3967a4dea66b4e4d4ca488efed7c00d91ee0173387e8", + "sha256:f45fba420b94165c17896861bb0e8b27fb7abdcedfeb154895d8553df90b7b00" ], "index": "delphix", - "version": "==5.0" + "version": "==5.0.2" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz", - "version": "== 1.1.0-internal-upgrade-005" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz", + "version": "== 1.1.0-internal-upgrade-006" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-005.tar.gz", - "version": "== 1.1.0-internal-upgrade-005" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-006.tar.gz", + "version": "== 1.1.0-internal-upgrade-006" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-005.tar.gz", - "version": "== 1.1.0-internal-upgrade-005" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-006.tar.gz", + "version": "== 1.1.0-internal-upgrade-006" }, "entrypoints": { "hashes": [ @@ -430,7 +430,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "futures": { @@ -489,10 +489,10 @@ }, "packaging": { "hashes": [ - "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", - "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" + "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", + "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" ], - "version": "==19.2" + "version": "==20.0" }, "pathlib2": { "hashes": [ @@ -511,10 +511,10 @@ }, "py": { "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" ], - "version": "==1.8.0" + "version": "==1.8.1" }, "pycodestyle": { "hashes": [ @@ -532,18 +532,18 @@ }, "pyparsing": { "hashes": [ - "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f", - "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a" + "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", + "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" ], - "version": "==2.4.5" + "version": "==2.4.6" }, "pytest": { "hashes": [ - "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", - "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" + "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", + "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], "index": "delphix", - "version": "==4.6.7" + "version": "==4.6.9" }, "pytest-cov": { "hashes": [ @@ -588,10 +588,10 @@ }, "wcwidth": { "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" ], - "version": "==0.1.7" + "version": "==0.1.8" }, "yapf": { "hashes": [ diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index 1372f14e..50f7fbfd 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,18 +1,18 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-005.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-005.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-005.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-006.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-006.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version < '3' +configparser==4.0.2 ; python_version < '3.2' contextlib2==0.6.0.post1 ; python_version < '3' -coverage==5.0 +coverage==5.0.2 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 funcsigs==1.0.2 ; python_version < '3.3' -functools32==3.2.3.post2 ; python_version < '3' +functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 importlib-metadata==1.3.0 ; python_version < '3.8' @@ -20,18 +20,18 @@ isort==4.3.21 mccabe==0.6.1 mock==3.0.5 more-itertools==5.0.0 -packaging==19.2 +packaging==20.0 pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 -py==1.8.0 +py==1.8.1 pycodestyle==2.5.0 pyflakes==2.1.1 -pyparsing==2.4.5 +pyparsing==2.4.6 pytest-cov==2.8.1 -pytest==4.6.7 +pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 typing==3.7.4.1 ; python_version < '3.5' -wcwidth==0.1.7 +wcwidth==0.1.8 yapf==0.28 zipp==0.6.0 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 2cef9f26..a4ba8740 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -4,12 +4,12 @@ certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version < '3' +configparser==4.0.2 ; python_version < '3.2' contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3' +functools32==3.2.3.post2 ; python_version < '3.2' idna==2.8 importlib-metadata==1.3.0 ; python_version < '3.8' jinja2==2.10.3 @@ -21,8 +21,8 @@ pathlib2==2.3.5 ; python_version < '3' protobuf==3.6.1 pycodestyle==2.5.0 pyflakes==2.1.1 -pyrsistent==0.15.6 -pyyaml==5.2 +pyrsistent==0.15.7 +pyyaml==5.3 requests==2.22.0 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/cli.py b/tools/src/main/python/dlpx/virtualization/_internal/cli.py index aef8caf9..a71c6343 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/cli.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/cli.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import logging @@ -9,9 +9,8 @@ from contextlib import contextmanager import click -from dlpx.virtualization._internal import (click_util, exceptions, - logging_util, package_util, - util_classes) +from dlpx.virtualization._internal import (click_util, const, exceptions, + logging_util, package_util) from dlpx.virtualization._internal.commands import build as build_internal from dlpx.virtualization._internal.commands import \ download_logs as download_logs_internal @@ -109,20 +108,19 @@ def delphix_sdk(verbose, quiet): @click.option( '-s', '--ingestion-strategy', - default=util_classes.DIRECT_TYPE, + default=const.DIRECT_TYPE, show_default=True, - type=click.Choice([util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE], + type=click.Choice([const.DIRECT_TYPE, const.STAGED_TYPE], case_sensitive=False), help=('Set the ingestion strategy of the plugin. A "direct" plugin ' 'ingests without a staging server while a "staged" plugin ' 'requires a staging server.')) @click.option('-t', '--host-type', - default=util_classes.UNIX_HOST_TYPE, + default=const.UNIX_HOST_TYPE, show_default=True, - type=click.Choice([ - util_classes.UNIX_HOST_TYPE, util_classes.WINDOWS_HOST_TYPE - ]), + type=click.Choice( + [const.UNIX_HOST_TYPE, const.WINDOWS_HOST_TYPE]), help='Set the host platform supported by the plugin.') def init(root, ingestion_strategy, name, host_type): """ diff --git a/tools/src/main/python/dlpx/virtualization/_internal/codegen.py b/tools/src/main/python/dlpx/virtualization/_internal/codegen.py index c8b11d0f..9eda39b7 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/codegen.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/codegen.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import copy @@ -10,7 +10,7 @@ import shutil import subprocess -from dlpx.virtualization._internal import exceptions, file_util, util_classes +from dlpx.virtualization._internal import const, exceptions, file_util logger = logging.getLogger(__name__) UNKNOWN_ERR = 'UNKNOWN_ERR' @@ -77,7 +77,7 @@ def generate_python(name, source_dir, plugin_config_dir, schema_content): # relevant to the plugin writer. We want to always force this to be # recreated. # - output_dir = os.path.join(plugin_config_dir, util_classes.OUTPUT_DIR_NAME) + output_dir = os.path.join(plugin_config_dir, const.OUTPUT_DIR_NAME) logger.info('Creating new output directory: {}'.format(output_dir)) file_util.make_dir(output_dir, True) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py index 9e805c8b..05b8eff4 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import logging @@ -10,8 +10,8 @@ import jinja2 import yaml -from dlpx.virtualization._internal import (codegen, exceptions, file_util, - plugin_util, util_classes) +from dlpx.virtualization._internal import (codegen, const, exceptions, + file_util, plugin_util) logger = logging.getLogger(__name__) @@ -163,15 +163,15 @@ def _get_entry_point_contents(plugin_name, ingestion_strategy, host_type): template = env.get_template(ENTRY_POINT_TEMPLATE_NAME) - if host_type == util_classes.WINDOWS_HOST_TYPE: + if host_type == const.WINDOWS_HOST_TYPE: default_mount_path = "C:\\\\tmp\\\\dlpx_staged_mounts\\\\{}" - elif host_type == util_classes.UNIX_HOST_TYPE: + elif host_type == const.UNIX_HOST_TYPE: default_mount_path = "/tmp/dlpx_staged_mounts/{}" - if ingestion_strategy == util_classes.DIRECT_TYPE: + if ingestion_strategy == const.DIRECT_TYPE: linked_operations = env.get_template( DIRECT_OPERATIONS_TEMPLATE_NAME).render() - elif ingestion_strategy == util_classes.STAGED_TYPE: + elif ingestion_strategy == const.STAGED_TYPE: linked_operations = env.get_template( STAGED_OPERATIONS_TEMPLATE_NAME).render( default_mount_path=default_mount_path) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/const.py b/tools/src/main/python/dlpx/virtualization/_internal/const.py new file mode 100644 index 00000000..2022af51 --- /dev/null +++ b/tools/src/main/python/dlpx/virtualization/_internal/const.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# + +import os + +UNIX_HOST_TYPE = 'UNIX' +WINDOWS_HOST_TYPE = 'WINDOWS' +STAGED_TYPE = 'STAGED' +DIRECT_TYPE = 'DIRECT' + +OUTPUT_DIR_NAME = '.dvp-gen-output' +PLUGIN_SCHEMAS_DIR = os.path.join(os.path.dirname(__file__), + 'validation_schemas') +PLUGIN_CONFIG_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, + 'plugin_config_schema.json') + +PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION = os.path.join( + PLUGIN_SCHEMAS_DIR, 'plugin_config_schema_no_id_validation.json') + +PLUGIN_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, 'plugin_schema.json') diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index f684ee36..8924de04 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import importlib import inspect @@ -10,17 +10,47 @@ from multiprocessing import Process, Queue import yaml -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE -from dlpx.virtualization._internal.util_classes import MessageUtils from flake8.api import legacy as flake8 logger = logging.getLogger(__name__) -PLUGIN_IMPORTER_YAML = os.path.join(util_classes.PLUGIN_SCHEMAS_DIR, +PLUGIN_IMPORTER_YAML = os.path.join(const.PLUGIN_SCHEMAS_DIR, 'plugin_importer.yaml') +class MessageUtils: + """ + Defines helpers methods to format warning and exception messages. + """ + @classmethod + def sdk_exception_msg(cls, warnings): + sdk_exception_msg = '\n'.join([ + cls.__format_msg('SDK Error', ex) + for ex in warnings['sdk exception'] + ]) + return sdk_exception_msg + + @classmethod + def exception_msg(cls, exceptions): + exception_msg = '\n'.join( + cls.__format_msg('Error', ex) for ex in exceptions['exception']) + return exception_msg + + @classmethod + def warning_msg(cls, warnings): + warning_msg = '\n'.join( + cls.__format_msg('Warning', warning) + for warning in warnings['warning']) + return warning_msg + + @staticmethod + def __format_msg(msg_type, msg): + msg_str = "{}: {}".format(msg_type, msg) + return msg_str + + def load_validation_maps(): """ Reads a plugin config file and raises UserError if there is an issue @@ -462,7 +492,7 @@ def _check_args(method_name, expected_args, actual_args): def _lookup_expected_args(plugin_type, plugin_op_type, plugin_op_name): - if plugin_type == util_classes.DIRECT_TYPE: + if plugin_type == const.DIRECT_TYPE: return PluginImporter.expected_direct_args_by_op[plugin_op_type][ plugin_op_name] else: diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py index 98c648cc..06497cbd 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py @@ -1,19 +1,31 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # +import enum import logging import os from contextlib import contextmanager -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.plugin_validator import PluginValidator from dlpx.virtualization._internal.schema_validator import SchemaValidator -from dlpx.virtualization._internal.util_classes import ValidationMode logger = logging.getLogger(__name__) +class ValidationMode(enum.Enum): + """ + Defines the validation mode that validator uses. + INFO - validator will give out info messages if validation fails. + WARNING - validator will log a warning if validation fails. + ERROR - validator will raise an exception if validation fails. + """ + INFO = 1 + WARNING = 2 + ERROR = 3 + + @contextmanager def validate_error_handler(plugin_file, validation_mode): try: @@ -42,9 +54,9 @@ def validate_plugin_config_file(plugin_config, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = ( - util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) + plugin_config_schema_file = (const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION + if skip_id_validation else + const.PLUGIN_CONFIG_SCHEMA) validator = PluginValidator(plugin_config, plugin_config_schema_file) with validate_error_handler(plugin_config, validation_mode): @@ -65,9 +77,9 @@ def get_plugin_manifest(plugin_config_file, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = ( - util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) + plugin_config_schema_file = (const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION + if skip_id_validation else + const.PLUGIN_CONFIG_SCHEMA) validator = PluginValidator.from_config_content(plugin_config_file, plugin_config_content, plugin_config_schema_file) @@ -88,7 +100,7 @@ def validate_schema_file(schema_file, stop_build): """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) with validate_error_handler(schema_file, validation_mode): validator.validate() diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 45c16bd5..1bd7106f 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # # @@ -26,7 +26,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-upgrade-005 +package_version = 1.1.0-internal-upgrade-006 virtualization_api_version = 1.1.0 distribution_name = dvp-tools package_author = Delphix diff --git a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py b/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py deleted file mode 100644 index ef2132ad..00000000 --- a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py +++ /dev/null @@ -1,73 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -import enum -import os - -UNIX_HOST_TYPE = 'UNIX' -WINDOWS_HOST_TYPE = 'WINDOWS' -STAGED_TYPE = 'STAGED' -DIRECT_TYPE = 'DIRECT' - -OUTPUT_DIR_NAME = '.dvp-gen-output' -PLUGIN_SCHEMAS_DIR = os.path.join(os.path.dirname(__file__), - 'validation_schemas') -PLUGIN_CONFIG_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, - 'plugin_config_schema.json') - -# -# This is a temporary file. Once blackbox has made the transition to 'id' -# instead of 'name' and uses UUIDs for the id, this, and everything -# associated with it can be removed. -# -PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION = os.path.join( - PLUGIN_SCHEMAS_DIR, 'plugin_config_schema_no_id_validation.json') - -PLUGIN_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, 'plugin_schema.json') - - -class ValidationMode(enum.Enum): - """ - Defines the validation mode that validator uses. - INFO - validator will give out info messages if validation fails. - WARNING - validator will log a warning if validation fails. - ERROR - validator will raise an exception if validation fails. - """ - INFO = 1 - WARNING = 2 - ERROR = 3 - - -class MessageUtils: - """ - Defines helpers methods to format warning and exception messages. - """ - @staticmethod - def sdk_exception_msg(warnings): - sdk_exception_msg = '\n'.join([ - MessageUtils.__format_msg('SDK Error', ex) - for ex in warnings['sdk exception'] - ]) - return sdk_exception_msg - - @staticmethod - def exception_msg(warnings): - exception_msg = '\n'.join([ - MessageUtils.__format_msg('Error', ex) - for ex in warnings['exception'] - ]) - return exception_msg - - @staticmethod - def warning_msg(warnings): - warning_msg = '\n'.join([ - MessageUtils.__format_msg('Warning', warning) - for warning in warnings['warning'] - ]) - return warning_msg - - @staticmethod - def __format_msg(msg_type, msg): - msg_str = "{}: {}".format(msg_type, msg) - return msg_str diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 44c7f4db..8478827e 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json @@ -8,7 +8,7 @@ import mock import pytest import yaml -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.commands import build from dlpx.virtualization._internal.plugin_validator import PluginValidator @@ -627,10 +627,10 @@ def test_manual_discovery_parameter(plugin_config_content, src_dir, @staticmethod def test_plugin_config_schemas_diff(): - with open(util_classes.PLUGIN_CONFIG_SCHEMA) as f: + with open(const.PLUGIN_CONFIG_SCHEMA) as f: config_schema = json.load(f) - with open(util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION) as f: + with open(const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION) as f: config_schema_no_id = json.load(f) # Only the id's pattern should be different so remove it. diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py index 30c64df4..adca36ce 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import errno @@ -8,8 +8,7 @@ import subprocess import pytest -from dlpx.virtualization._internal import (codegen, exceptions, file_util, - util_classes) +from dlpx.virtualization._internal import codegen, const, exceptions, file_util class TestCodegen: @@ -139,7 +138,7 @@ def test_codegen_success(codegen_gen_py_inputs, popen_helper): assert popen_helper.package_name == codegen.CODEGEN_PACKAGE assert popen_helper.module_name == codegen.CODEGEN_MODULE expected_output_dir = os.path.join(gen_py.plugin_content_dir, - util_classes.OUTPUT_DIR_NAME) + const.OUTPUT_DIR_NAME) assert popen_helper.output_dir == expected_output_dir # Validate that the "generated" file were copied. @@ -158,7 +157,7 @@ def test_codegen_success(codegen_gen_py_inputs, popen_helper): @staticmethod def test_get_build_dir_success(tmpdir): - testdir = os.path.join(tmpdir.strpath, util_classes.OUTPUT_DIR_NAME) + testdir = os.path.join(tmpdir.strpath, const.OUTPUT_DIR_NAME) file_util.make_dir(testdir, True) assert os.path.exists(testdir) assert os.path.isdir(testdir) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index e208108b..633596e6 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import ast @@ -9,9 +9,8 @@ import jinja2 import mock import pytest -from dlpx.virtualization._internal import (exceptions, plugin_util, - plugin_validator, schema_validator, - util_classes) +from dlpx.virtualization._internal import (const, exceptions, plugin_util, + plugin_validator, schema_validator) from dlpx.virtualization._internal.commands import initialize as init @@ -48,14 +47,14 @@ def format_entry_point_template(entry_point_template): template = jinja2.Environment().from_string(entry_point_template) def format_template(plugin_name, ingestion_strategy, host_type): - if host_type == util_classes.WINDOWS_HOST_TYPE: + if host_type == const.WINDOWS_HOST_TYPE: default_mount_path = "C:\\\\tmp\\\\dlpx_staged_mounts\\\\{}" - elif host_type == util_classes.UNIX_HOST_TYPE: + elif host_type == const.UNIX_HOST_TYPE: default_mount_path = "/tmp/dlpx_staged_mounts/{}" - if ingestion_strategy == util_classes.DIRECT_TYPE: + if ingestion_strategy == const.DIRECT_TYPE: operations = direct_operations_template() - elif ingestion_strategy == util_classes.STAGED_TYPE: + elif ingestion_strategy == const.STAGED_TYPE: operations = jinja2.Environment().from_string( staged_operations_template()) operations = operations.render( @@ -73,12 +72,10 @@ def format_template(plugin_name, ingestion_strategy, host_type): class TestInitialize: @staticmethod - @pytest.mark.parametrize( - 'ingestion_strategy', - [util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE]) - @pytest.mark.parametrize( - 'host_type', - [util_classes.UNIX_HOST_TYPE, util_classes.WINDOWS_HOST_TYPE]) + @pytest.mark.parametrize('ingestion_strategy', + [const.DIRECT_TYPE, const.STAGED_TYPE]) + @pytest.mark.parametrize('host_type', + [const.UNIX_HOST_TYPE, const.WINDOWS_HOST_TYPE]) def test_init(tmpdir, ingestion_strategy, host_type, schema_template, plugin_name, format_entry_point_template): # Initialize an empty directory. @@ -118,8 +115,7 @@ def test_init(tmpdir, ingestion_strategy, host_type, schema_template, @staticmethod def test_init_without_plugin_name(tmpdir): - init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, "", - util_classes.UNIX_HOST_TYPE) + init.init(tmpdir.strpath, const.DIRECT_TYPE, "", const.UNIX_HOST_TYPE) result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), @@ -132,8 +128,8 @@ def test_init_without_plugin_name(tmpdir): @staticmethod def test_init_windows_plugin(tmpdir, plugin_name): - init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, plugin_name, - util_classes.WINDOWS_HOST_TYPE) + init.init(tmpdir.strpath, const.DIRECT_TYPE, plugin_name, + const.WINDOWS_HOST_TYPE) result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), True) @@ -142,16 +138,15 @@ def test_init_windows_plugin(tmpdir, plugin_name): # Validate that the host type is WINDOWS host_types = config['hostTypes'] assert len(host_types) == 1 - assert host_types[0] == util_classes.WINDOWS_HOST_TYPE + assert host_types[0] == const.WINDOWS_HOST_TYPE @staticmethod - @pytest.mark.parametrize( - 'ingestion_strategy', - [util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE]) + @pytest.mark.parametrize('ingestion_strategy', + [const.DIRECT_TYPE, const.STAGED_TYPE]) def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, plugin_name): init.init(tmpdir.strpath, ingestion_strategy, plugin_name, - util_classes.UNIX_HOST_TYPE) + const.UNIX_HOST_TYPE) plugin_config_file = os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE) @@ -181,21 +176,20 @@ def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, @staticmethod def test_invalid_with_config_file(plugin_config_file): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(plugin_config_file), - util_classes.DIRECT_TYPE, None, - util_classes.UNIX_HOST_TYPE) + init.init(os.path.dirname(plugin_config_file), const.DIRECT_TYPE, + None, const.UNIX_HOST_TYPE) @staticmethod def test_invalid_with_schema_file(schema_file): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(schema_file), util_classes.DIRECT_TYPE, - None, util_classes.UNIX_HOST_TYPE) + init.init(os.path.dirname(schema_file), const.DIRECT_TYPE, None, + const.UNIX_HOST_TYPE) @staticmethod def test_invalid_with_src_dir(src_dir): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(src_dir), util_classes.DIRECT_TYPE, None, - util_classes.UNIX_HOST_TYPE) + init.init(os.path.dirname(src_dir), const.DIRECT_TYPE, None, + const.UNIX_HOST_TYPE) @staticmethod @mock.patch('yaml.dump') @@ -204,8 +198,8 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, tmpdir, plugin_name): mock_yaml_dump.side_effect = RuntimeError() with pytest.raises(exceptions.UserError): - init.init(tmpdir.strpath, util_classes.STAGED_TYPE, plugin_name, - util_classes.UNIX_HOST_TYPE) + init.init(tmpdir.strpath, const.STAGED_TYPE, plugin_name, + const.UNIX_HOST_TYPE) src_dir_path = os.path.join(tmpdir.strpath, init.DEFAULT_SRC_DIRECTORY) config_file_path = os.path.join(tmpdir.strpath, @@ -218,8 +212,8 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, @staticmethod def test_default_schema_definition(schema_template): - validator = schema_validator.SchemaValidator( - None, util_classes.PLUGIN_SCHEMA, schema_template) + validator = schema_validator.SchemaValidator(None, const.PLUGIN_SCHEMA, + schema_template) validator.validate() # Validate the repository schema only has the 'name' property. @@ -254,7 +248,7 @@ def test_default_schema_definition(schema_template): @staticmethod def test_default_entry_point(plugin_id): entry_point_contents = init._get_entry_point_contents( - plugin_id, util_classes.DIRECT_TYPE, util_classes.UNIX_HOST_TYPE) + plugin_id, const.DIRECT_TYPE, const.UNIX_HOST_TYPE) tree = ast.parse(entry_point_contents) for stmt in ast.walk(tree): if isinstance(stmt, ast.Assign): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index 1dd7d81e..0cc8c0af 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import configparser @@ -9,8 +9,7 @@ import pytest import yaml -from dlpx.virtualization._internal import (cli, click_util, package_util, - util_classes) +from dlpx.virtualization._internal import cli, click_util, const, package_util # # conftest.py is used to share fixtures among multiple tests files. pytest will @@ -243,7 +242,7 @@ def host_types(): @pytest.fixture def plugin_type(): - return util_classes.DIRECT_TYPE + return const.DIRECT_TYPE @pytest.fixture diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py b/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py index 85123600..5afc609b 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import os @@ -8,7 +8,7 @@ import mock import pytest import yaml -from dlpx.virtualization._internal import cli, exceptions, util_classes +from dlpx.virtualization._internal import cli, const, exceptions class TestCli: @@ -106,10 +106,8 @@ def test_command_user_error(mock_init, plugin_name): assert result.output == 'codegen_error\n' # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.initialize.init') @@ -123,10 +121,8 @@ def test_command_non_user_error(mock_init, plugin_name): assert 'Internal error, please contact Delphix.\n' in result.output # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) class TestInitCli: @@ -140,26 +136,21 @@ def test_default_params(mock_init, plugin_name): assert result.exit_code == 0, 'Output: {}'.format(result.output) # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.initialize.init') def test_non_default_params(mock_init, plugin_name): runner = click_testing.CliRunner() - result = runner.invoke(cli.delphix_sdk, [ - 'init', '-s', util_classes.STAGED_TYPE, '-r', '.', '-n', - plugin_name - ]) + result = runner.invoke( + cli.delphix_sdk, + ['init', '-s', const.STAGED_TYPE, '-r', '.', '-n', plugin_name]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_init.assert_called_once_with(os.getcwd(), - util_classes.STAGED_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.STAGED_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod def test_invalid_ingestion_strategy(plugin_name): @@ -184,8 +175,8 @@ def test_multiple_host_types(): runner = click_testing.CliRunner() result = runner.invoke(cli.delphix_sdk, [ - 'init', '-t', '{},{}'.format(util_classes.UNIX_HOST_TYPE, - util_classes.WINDOWS_HOST_TYPE) + 'init', '-t', '{},{}'.format(const.UNIX_HOST_TYPE, + const.WINDOWS_HOST_TYPE) ]) assert result.exit_code != 0 @@ -198,12 +189,10 @@ def test_windows_host_type(mock_init, plugin_name): result = runner.invoke( cli.delphix_sdk, - ['init', '-n', plugin_name, '-t', util_classes.WINDOWS_HOST_TYPE]) + ['init', '-n', plugin_name, '-t', const.WINDOWS_HOST_TYPE]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.WINDOWS_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.WINDOWS_HOST_TYPE) @staticmethod def test_invalid_host_type(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index f535ed92..09d23d0a 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # from dlpx.virtualization._internal import package_util @@ -8,7 +8,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-upgrade-005' + assert package_util.get_version() == '1.1.0-internal-upgrade-006' @staticmethod def test_get_virtualization_api_version(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index 43c37915..ce988f99 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json @@ -7,7 +7,7 @@ import mock import pytest -from dlpx.virtualization._internal import exceptions, plugin_util, util_classes +from dlpx.virtualization._internal import const, exceptions, plugin_util from dlpx.virtualization._internal.plugin_validator import PluginValidator @@ -42,7 +42,7 @@ def test_plugin_bad_schema(plugin_config_file, plugin_config_content, def test_plugin_bad_config_file(plugin_config_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_config() message = err_info.value.message @@ -59,7 +59,7 @@ def test_plugin_valid_content(mock_import_plugin, src_dir, plugin_config_file, plugin_config_content): validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() mock_import_plugin.assert_called() @@ -70,7 +70,7 @@ def test_plugin_missing_field(plugin_config_file, plugin_config_content): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message @@ -90,7 +90,7 @@ def test_plugin_version_format(mock_import_plugin, src_dir, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: @@ -116,7 +116,7 @@ def test_plugin_entry_point(mock_import_plugin, src_dir, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: @@ -132,7 +132,7 @@ def test_plugin_additional_properties(src_dir, plugin_config_file, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message @@ -147,7 +147,7 @@ def test_multiple_validation_errors(plugin_config_file, with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message @@ -170,7 +170,7 @@ def test_plugin_id(mock_import_plugin, src_dir, plugin_config_file, try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() mock_import_plugin.assert_called() except exceptions.SchemaValidationError as err_info: @@ -198,7 +198,7 @@ def test_successful_validation(mock_file_util, plugin_config_file, mock_file_util.return_value = fake_src_dir validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() @staticmethod @@ -224,7 +224,7 @@ def test_multiple_warnings(mock_file_util, plugin_config_file, with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() message = err_info.value.message @@ -247,7 +247,7 @@ def test_upgrade_warnings(mock_file_util, plugin_config_file, fake_src_dir, with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() message = err_info.value.message @@ -274,7 +274,7 @@ def test_wrapper_failures(mock_file_util, plugin_config_file, fake_src_dir, with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() message = err_info.value.message @@ -289,7 +289,7 @@ def test_sdk_error(mock_file_util, plugin_config_file, fake_src_dir): with pytest.raises(exceptions.SDKToolingError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA) + const.PLUGIN_CONFIG_SCHEMA) validator.validate_plugin_module() message = err_info.value.message diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py index 99ea6761..2b064b57 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py @@ -1,12 +1,12 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json import os import pytest -from dlpx.virtualization._internal import exceptions, plugin_util, util_classes +from dlpx.virtualization._internal import const, exceptions, plugin_util from dlpx.virtualization._internal.schema_validator import SchemaValidator @@ -29,8 +29,7 @@ def test_bad_meta_schema(schema_file, tmpdir, schema_filename): def test_bad_schema_file(schema_file): os.remove(schema_file) with pytest.raises(exceptions.UserError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -40,7 +39,7 @@ def test_bad_schema_file(schema_file): @staticmethod def test_valid_schema(schema_file): - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -65,7 +64,7 @@ def test_missing_root_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -87,8 +86,7 @@ def test_missing_root_type(schema_file): }]) def test_bad_root_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -113,8 +111,7 @@ def test_bad_root_type_num(schema_file): }]) def test_bad_root_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -138,8 +135,7 @@ def test_bad_root_type(schema_file): }]) def test_missing_identity_fields(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -163,8 +159,7 @@ def test_missing_identity_fields(schema_file): }]) def test_missing_name_field(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -191,7 +186,7 @@ def test_missing_sub_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -213,8 +208,7 @@ def test_missing_sub_type(schema_file): }]) def test_bad_sub_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -239,8 +233,7 @@ def test_bad_sub_type(schema_file): }]) def test_bad_sub_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -267,8 +260,7 @@ def test_missing_required_field(schema_file): # pytest.skip("required fields validation is not working yet") with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -292,8 +284,7 @@ def test_missing_required_field(schema_file): }]) def test_multiple_validation_errors(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -359,8 +350,7 @@ def test_bad_sub_type_info_warn_mode(schema_file): }]) def test_bad_type_in_array(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message From b9ab61e6e4da6e2679821fa3deb95769f5ddddfa Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Mon, 13 Jan 2020 21:55:07 -0800 Subject: [PATCH 09/25] PYT-836 Plugin Versioning - Add build number in plugin config PYT-837 Plugin Versioning - New build number field should be added on init Reviewed at: http://reviews.delphix.com/r/55151/ --- build.gradle | 2 +- libs/Pipfile.lock | 12 +- libs/lock.dev-requirements.txt | 2 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 12 +- platform/lock.dev-requirements.txt | 2 +- platform/lock.requirements.txt | 2 +- tools/Pipfile.lock | 110 +++++++++--------- tools/lock.dev-requirements.txt | 16 +-- tools/lock.requirements.txt | 8 +- .../_internal/commands/build.py | 3 + .../_internal/commands/initialize.py | 12 +- .../virtualization/_internal/plugin_util.py | 20 ++++ .../virtualization/_internal/settings.cfg | 2 +- .../plugin_config_schema.json | 6 +- ...plugin_config_schema_no_id_validation.json | 6 +- .../_internal/commands/test_build.py | 14 +++ .../_internal/commands/test_initialize.py | 2 + .../dlpx/virtualization/_internal/conftest.py | 12 +- .../_internal/test_package_util.py | 2 +- .../_internal/test_plugin_validator.py | 29 +++++ 21 files changed, 183 insertions(+), 93 deletions(-) diff --git a/build.gradle b/build.gradle index 014334e7..8b586807 100644 --- a/build.gradle +++ b/build.gradle @@ -8,7 +8,7 @@ plugins { } subprojects { - version = "1.1.0-internal-upgrade-006" + version = "1.1.0-internal-upgrade-007" } def binDir = "${rootProject.projectDir}/bin" diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index 6bf7224d..764d494a 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "1d74a651ce57525f8698ea70fbc7adcf9df8ec915f091b4b42b592d7ed5a2b5f" + "sha256": "34adfd1e70f92441f7d38a1dda2721eccceaea68cd5a93253880ba113c5fa659" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz", - "version": "== 1.1.0-internal-upgrade-006" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz", + "version": "== 1.1.0-internal-upgrade-007" }, "protobuf": { "hashes": [ @@ -89,11 +89,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", - "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", + "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" ], "markers": "python_version < '3.8'", - "version": "==1.3.0" + "version": "==1.4.0" }, "mock": { "hashes": [ diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 97327492..3ae6dd54 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -4,7 +4,7 @@ attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.3' -importlib-metadata==1.3.0 ; python_version < '3.8' +importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==20.0 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 94057d8b..46376d01 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,4 +1,4 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz protobuf==3.6.1 six==1.13.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index a93f32a3..c59b09d2 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "d74f43c96bc68d83b527b9f2b93e47005e2e2d68a68ee4a2350dabf535defb04" + "sha256": "0f136bc5b2db4e615922803c9c64db51d15393994f702cb8ae85ce002b759f6e" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz", - "version": "== 1.1.0-internal-upgrade-006" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz", + "version": "== 1.1.0-internal-upgrade-007" }, "enum34": { "hashes": [ @@ -100,11 +100,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", - "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", + "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" ], "markers": "python_version < '3.8'", - "version": "==1.3.0" + "version": "==1.4.0" }, "mock": { "hashes": [ diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index 97327492..3ae6dd54 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -4,7 +4,7 @@ attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.3' -importlib-metadata==1.3.0 ; python_version < '3.8' +importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==20.0 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index f3af65a9..7483e8f5 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 six==1.13.0 diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index d4ce5ecc..20f5c530 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "42064f94b54676d45d53d5dc8c364b5ff7cf3a53b98dd357be5e83f8df7ace5a" + "sha256": "5f2c350939cd9d95c60e40e233d28425a054d87b2c5116b5b51aab4d532b7e06" }, "pipfile-spec": 6, "requires": {}, @@ -56,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3.2'", + "markers": "python_version == '2.7'", "version": "==4.0.2" }, "contextlib2": { @@ -98,7 +98,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "idna": { @@ -110,11 +110,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", - "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", + "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" ], "markers": "python_version < '3.8'", - "version": "==1.3.0" + "version": "==1.4.0" }, "jinja2": { "hashes": [ @@ -297,10 +297,10 @@ }, "zipp": { "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", + "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" ], - "version": "==0.6.0" + "version": "==1.0.0" } }, "develop": { @@ -331,7 +331,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3.2'", + "markers": "python_version == '2.7'", "version": "==4.0.2" }, "contextlib2": { @@ -344,52 +344,52 @@ }, "coverage": { "hashes": [ - "sha256:189aac76d6e0d7af15572c51892e7326ee451c076c5a50a9d266406cd6c49708", - "sha256:1bf7ba2af1d373a1750888724f84cffdfc697738f29a353c98195f98fc011509", - "sha256:1f4ee8e2e4243971618bc16fcc4478317405205f135e95226c2496e2a3b8dbbf", - "sha256:225e79a5d485bc1642cb7ba02281419c633c216cdc6b26c26494ba959f09e69f", - "sha256:23688ff75adfa8bfa2a67254d889f9bdf9302c27241d746e17547c42c732d3f4", - "sha256:28f7f73b34a05e23758e860a89a7f649b85c6749e252eff60ebb05532d180e86", - "sha256:2d0cb9b1fe6ad0d915d45ad3d87f03a38e979093a98597e755930db1f897afae", - "sha256:47874b4711c5aeb295c31b228a758ce3d096be83dc37bd56da48ed99efb8813b", - "sha256:511ec0c00840e12fb4e852e4db58fa6a01ca4da72f36a9766fae344c3d502033", - "sha256:53e7438fef0c97bc248f88ba1edd10268cd94d5609970aaf87abbe493691af87", - "sha256:569f9ee3025682afda6e9b0f5bb14897c0db03f1a1dc088b083dd36e743f92bb", - "sha256:593853aa1ac6dcc6405324d877544c596c9d948ef20d2e9512a0f5d2d3202356", - "sha256:5b0a07158360d22492f9abd02a0f2ee7981b33f0646bf796598b7673f6bbab14", - "sha256:7ca3db38a61f3655a2613ee2c190d63639215a7a736d3c64cc7bbdb002ce6310", - "sha256:7d1cc7acc9ce55179616cf72154f9e648136ea55987edf84addbcd9886ffeba2", - "sha256:88b51153657612aea68fa684a5b88037597925260392b7bb4509d4f9b0bdd889", - "sha256:955ec084f549128fa2702f0b2dc696392001d986b71acd8fd47424f28289a9c3", - "sha256:b251c7092cbb6d789d62dc9c9e7c4fb448c9138b51285c36aeb72462cad3600e", - "sha256:bd82b684bb498c60ef47bb1541a50e6d006dde8579934dcbdbc61d67d1ea70d9", - "sha256:bfe102659e2ec13b86c7f3b1db6c9a4e7beea4255058d006351339e6b342d5d2", - "sha256:c1e4e39e43057396a5e9d069bfbb6ffeee892e40c5d2effbd8cd71f34ee66c4d", - "sha256:cb2b74c123f65e8166f7e1265829a6c8ed755c3cd16d7f50e75a83456a5f3fd7", - "sha256:cca38ded59105f7705ef6ffe1e960b8db6c7d8279c1e71654a4775ab4454ca15", - "sha256:cf908840896f7aa62d0ec693beb53264b154f972eb8226fb864ac38975590c4f", - "sha256:d095a7b473f8a95f7efe821f92058c8a2ecfb18f8db6677ae3819e15dc11aaae", - "sha256:d22b4297e7e4225ccf01f1aa55e7a96412ea0796b532dd614c3fcbafa341128e", - "sha256:d4a2b578a7a70e0c71f662705262f87a456f1e6c1e40ada7ea699abaf070a76d", - "sha256:ddeb42a3d5419434742bf4cc71c9eaa22df3b76808e23a82bd0b0bd360f1a9f1", - "sha256:e65a5aa1670db6263f19fdc03daee1d7dbbadb5cb67fd0a1f16033659db13c1d", - "sha256:eaad65bd20955131bcdb3967a4dea66b4e4d4ca488efed7c00d91ee0173387e8", - "sha256:f45fba420b94165c17896861bb0e8b27fb7abdcedfeb154895d8553df90b7b00" + "sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3", + "sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c", + "sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0", + "sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477", + "sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a", + "sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf", + "sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691", + "sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73", + "sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987", + "sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894", + "sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e", + "sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef", + "sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf", + "sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68", + "sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8", + "sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954", + "sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2", + "sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40", + "sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc", + "sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc", + "sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e", + "sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d", + "sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f", + "sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc", + "sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301", + "sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea", + "sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb", + "sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af", + "sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52", + "sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37", + "sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0" ], "index": "delphix", - "version": "==5.0.2" + "version": "==5.0.3" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz", - "version": "== 1.1.0-internal-upgrade-006" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz", + "version": "== 1.1.0-internal-upgrade-007" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-006.tar.gz", - "version": "== 1.1.0-internal-upgrade-006" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-007.tar.gz", + "version": "== 1.1.0-internal-upgrade-007" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-006.tar.gz", - "version": "== 1.1.0-internal-upgrade-006" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-007.tar.gz", + "version": "== 1.1.0-internal-upgrade-007" }, "entrypoints": { "hashes": [ @@ -430,7 +430,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "futures": { @@ -450,11 +450,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", - "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", + "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" ], "markers": "python_version < '3.8'", - "version": "==1.3.0" + "version": "==1.4.0" }, "isort": { "hashes": [ @@ -603,10 +603,10 @@ }, "zipp": { "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", + "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" ], - "version": "==0.6.0" + "version": "==1.0.0" } } } diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index 50f7fbfd..7d73c06f 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,21 +1,21 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-006.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-006.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-006.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-007.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-007.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version < '3.2' +configparser==4.0.2 ; python_version == '2.7' contextlib2==0.6.0.post1 ; python_version < '3' -coverage==5.0.2 +coverage==5.0.3 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 funcsigs==1.0.2 ; python_version < '3.3' -functools32==3.2.3.post2 ; python_version < '3.2' +functools32==3.2.3.post2 ; python_version < '3' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 -importlib-metadata==1.3.0 ; python_version < '3.8' +importlib-metadata==1.4.0 ; python_version < '3.8' isort==4.3.21 mccabe==0.6.1 mock==3.0.5 @@ -34,4 +34,4 @@ six==1.13.0 typing==3.7.4.1 ; python_version < '3.5' wcwidth==0.1.8 yapf==0.28 -zipp==0.6.0 +zipp==1.0.0 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index a4ba8740..8ac140fe 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -4,14 +4,14 @@ certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version < '3.2' +configparser==4.0.2 ; python_version == '2.7' contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3.2' +functools32==3.2.3.post2 ; python_version < '3' idna==2.8 -importlib-metadata==1.3.0 ; python_version < '3.8' +importlib-metadata==1.4.0 ; python_version < '3.8' jinja2==2.10.3 jsonschema==3.2.0 markupsafe==1.1.1 @@ -28,4 +28,4 @@ scandir==1.10.0 ; python_version < '3.5' six==1.13.0 typing==3.7.4.1 ; python_version < '3.5' urllib3==1.25.7 -zipp==0.6.0 +zipp==1.0.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py index 87aec7f8..41fc5ede 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py @@ -186,6 +186,9 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): plugin_config_content['hostTypes'], 'entryPoint': plugin_config_content['entryPoint'], + 'buildNumber': + plugin_util.get_standardized_build_number( + plugin_config_content['buildNumber']), 'buildApi': package_util.get_build_api_version(), 'engineApi': diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py index 05b8eff4..5bb1e955 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py @@ -23,6 +23,7 @@ DEFAULT_ENTRY_POINT_SYMBOL = 'plugin' DEFAULT_ENTRY_POINT = '{}:{}'.format(DEFAULT_ENTRY_POINT_FILE[:-3], DEFAULT_ENTRY_POINT_SYMBOL) +DEFAULT_VERSION = '0.1.0' # Internal constants for the template directory. ENTRY_POINT_TEMPLATE_NAME = 'entry_point.py.template' @@ -122,7 +123,8 @@ def init(root, ingestion_strategy, name, host_type): config = _get_default_plugin_config(plugin_id, ingestion_strategy, name, DEFAULT_ENTRY_POINT, DEFAULT_SRC_DIRECTORY, - DEFAULT_SCHEMA_FILE, host_type) + DEFAULT_SCHEMA_FILE, host_type, + DEFAULT_VERSION) yaml.dump(config, f, default_flow_style=False) # @@ -187,7 +189,7 @@ def _get_entry_point_contents(plugin_name, ingestion_strategy, host_type): def _get_default_plugin_config(plugin_id, ingestion_strategy, name, entry_point, src_dir_path, schema_file_path, - host_type): + host_type, default_version): """ Returns a valid plugin configuration as an OrderedDict. @@ -208,12 +210,14 @@ def _get_default_plugin_config(plugin_id, ingestion_strategy, name, # Ensure values are type 'str'. If they are type unicode yaml prints # them with '!!python/unicode' prepended to the value. config = OrderedDict([('id', plugin_id.encode('utf-8')), - ('name', name.encode('utf-8')), ('version', '0.1.0'), + ('name', name.encode('utf-8')), + ('version', default_version.encode('utf-8')), ('language', 'PYTHON27'), ('hostTypes', ['UNIX']), ('pluginType', ingestion_strategy.encode('utf-8')), ('entryPoint', entry_point.encode('utf-8')), ('srcDir', src_dir_path.encode('utf-8')), ('schemaFile', schema_file_path.encode('utf-8')), - ('hostTypes', [host_type.encode('utf-8')])]) + ('hostTypes', [host_type.encode('utf-8')]), + ('buildNumber', default_version.encode('utf-8'))]) return config diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py index 06497cbd..5123a366 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py @@ -129,3 +129,23 @@ def get_schema_file_path(plugin_config, schema_file): if not os.path.isfile(schema_file): raise exceptions.PathTypeError(schema_file, 'file') return os.path.normpath(schema_file) + + +def get_standardized_build_number(build_number): + """ + Converts the build number the way back end expects it to be - without + leading or trailing zeros in each part of the multi part build number that + is separated by dots. + """ + # Split on the period and convert to integer + array = [int(i) for i in build_number.split('.')] + + # Next we want to trim all trailing zeros so ex: 5.3.0.0 == 5.3 + while array: + if not array[-1]: + # Remove the last element which is a zero from array + array.pop() + else: + break + + return '.'.join(str(i) for i in array) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 1bd7106f..490b4b78 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -26,7 +26,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-upgrade-006 +package_version = 1.1.0-internal-upgrade-007 virtualization_api_version = 1.1.0 distribution_name = dvp-tools package_author = Delphix diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json index 35a6ba1b..dcd3e0da 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json @@ -47,8 +47,12 @@ "defaultLocale": { "type": "string", "default": "en-us" + }, + "buildNumber": { + "type": "string", + "pattern": "^([0-9]+\\.)*[0-9]*[1-9][0-9]*(\\.[0-9]+)*$" } }, "additionalProperties": false, - "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language"] + "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] } diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json index 504d3837..b54ac778 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json @@ -46,8 +46,12 @@ "defaultLocale": { "type": "string", "default": "en-us" + }, + "buildNumber": { + "type": "string", + "pattern": "^([0-9]+\\.)*[0-9]*[1-9][0-9]*(\\.[0-9]+)*$" } }, "additionalProperties": false, - "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language"] + "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] } diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 8478827e..31d2cb35 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -637,3 +637,17 @@ def test_plugin_config_schemas_diff(): config_schema['properties']['id'].pop('pattern') assert config_schema == config_schema_no_id + + @staticmethod + @pytest.mark.parametrize('build_number, expected', [ + pytest.param('0.0.1', '0.0.1'), + pytest.param('0.1.0', '0.1'), + pytest.param('1.0.01.0', '1.0.1') + ]) + def test_build_number_parameter(plugin_config_content, src_dir, + schema_content, expected): + + upload_artifact = build.prepare_upload_artifact( + plugin_config_content, src_dir, schema_content, {}) + + assert expected == upload_artifact['buildNumber'] diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index 633596e6..9988c398 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -94,6 +94,8 @@ def test_init(tmpdir, ingestion_strategy, host_type, schema_template, assert config['entryPoint'] == init.DEFAULT_ENTRY_POINT assert config['srcDir'] == init.DEFAULT_SRC_DIRECTORY assert config['schemaFile'] == init.DEFAULT_SCHEMA_FILE + assert config['version'] == init.DEFAULT_VERSION + assert config['buildNumber'] == init.DEFAULT_VERSION # Validate the schema file is identical to the template. schema_file_path = os.path.join(tmpdir.strpath, config['schemaFile']) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index 0cc8c0af..fd7aed87 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -170,7 +170,7 @@ def artifact_file_created(): @pytest.fixture def plugin_config_content(plugin_id, plugin_name, version, language, host_types, plugin_type, entry_point, src_dir, - schema_file, manual_discovery): + schema_file, manual_discovery, build_number): """ This fixutre creates the dict expected in the properties yaml file the customer must provide for the build and compile commands. @@ -212,6 +212,9 @@ def plugin_config_content(plugin_id, plugin_name, version, language, if manual_discovery is not None: config['manualDiscovery'] = manual_discovery + if build_number: + config['buildNumber'] = build_number + return config @@ -265,6 +268,11 @@ def manual_discovery(): return None +@pytest.fixture +def build_number(): + return '2.0.0' + + @pytest.fixture def artifact_manual_discovery(): return True @@ -551,6 +559,7 @@ def basic_artifact_content(engine_api, virtual_source_definition, 'buildApi': package_util.get_build_api_version(), 'engineApi': engine_api, 'rootSquashEnabled': True, + 'buildNumber': '2', 'sourceCode': 'UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==', 'manifest': {} } @@ -597,6 +606,7 @@ def artifact_content(engine_api, virtual_source_definition, 'buildApi': package_util.get_build_api_version(), 'sourceCode': 'UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==', 'rootSquashEnabled': True, + 'buildNumber': '2', 'manifest': {} } diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 09d23d0a..004a258f 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -8,7 +8,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-upgrade-006' + assert package_util.get_version() == '1.1.0-internal-upgrade-007' @staticmethod def test_get_virtualization_api_version(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index ce988f99..ec9b60ac 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -296,3 +296,32 @@ def test_sdk_error(mock_file_util, plugin_config_file, fake_src_dir): assert ('SDK Error: Got an arbitrary non-platforms error for testing.' in message) assert '0 Warning(s). 1 Error(s).' in message + + @staticmethod + @mock.patch('os.path.isabs', return_value=False) + @mock.patch.object(PluginValidator, + '_PluginValidator__import_plugin', + return_value=({}, None)) + @pytest.mark.parametrize('build_number, expected', + [('xxx', "'xxx' does not match"), ('1', None), + ('1.x', "'1.x' does not match"), ('1.100', None), + ('0.1.2', None), ('02.5000', None), + (None, "'buildNumber' is a required property"), + ('1.0.0_HF', "'1.0.0_HF' does not match"), + ('0.0.0', "'0.0.0' does not match"), + ('0', "'0' does not match"), + ('0.0.00', "'0.0.00' does not match"), + ('0.1', None)]) + def test_plugin_build_number_format(mock_import_plugin, src_dir, + plugin_config_file, + plugin_config_content, expected): + + try: + validator = PluginValidator.from_config_content( + plugin_config_file, plugin_config_content, + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_module() + mock_import_plugin.assert_called() + except exceptions.SchemaValidationError as err_info: + message = err_info.message + assert expected in message From efa421b187521a31923e9ddd30c66c66894e19d1 Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Thu, 16 Jan 2020 12:39:26 -0800 Subject: [PATCH 10/25] PYT-1029 Cleanup plugin_importer module Reviewed at: http://reviews.delphix.com/r/55036/ --- build.gradle | 2 +- libs/Pipfile.lock | 52 +-- libs/lock.dev-requirements.txt | 18 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 52 +-- platform/lock.dev-requirements.txt | 18 +- platform/lock.requirements.txt | 2 +- .../dlpx/virtualization/platform/__init__.py | 4 +- .../virtualization/platform/exceptions.py | 17 + .../virtualization/platform/import_util.py | 132 ++++++++ .../platform/import_validations.py | 116 +++++++ tools/Pipfile.lock | 164 +++++----- tools/lock.dev-requirements.txt | 22 +- tools/lock.requirements.txt | 12 +- .../virtualization/_internal/exceptions.py | 41 +++ .../_internal/plugin_importer.py | 298 +++++------------- .../virtualization/_internal/plugin_util.py | 21 +- .../_internal/plugin_validator.py | 124 ++++---- .../virtualization/_internal/settings.cfg | 2 +- .../_internal/commands/test_build.py | 16 +- .../_internal/commands/test_initialize.py | 15 +- .../_internal/test_package_util.py | 2 +- .../_internal/test_plugin_importer.py | 104 +++++- .../_internal/test_plugin_validator.py | 96 +----- 24 files changed, 755 insertions(+), 577 deletions(-) create mode 100644 platform/src/main/python/dlpx/virtualization/platform/import_util.py create mode 100644 platform/src/main/python/dlpx/virtualization/platform/import_validations.py diff --git a/build.gradle b/build.gradle index a4f17c4a..0854ac65 100644 --- a/build.gradle +++ b/build.gradle @@ -12,7 +12,7 @@ subprojects { * dvpApiVersion is the version of the Virtualization API that we want this version of the SDK to be built against. */ project.ext.dvpApiVersion = "1.1.0-master-003" - version = "1.1.0-internal-006" + version = "1.1.0-internal-007" } def binDir = "${rootProject.projectDir}/bin" diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index ef4c8136..43ac8270 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9fdae34cda2117051576372f70698033622d25c390672a83669f63d5ae206370" + "sha256": "5b8cc310478557abd479e28e9702a44e0c3eddf8ab89b1ebe4cc8b781a13fb03" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz", - "version": "== 1.1.0-internal-006" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz", + "version": "== 1.1.0-internal-007" }, "protobuf": { "hashes": [ @@ -91,16 +91,16 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "importlib-metadata": { "hashes": [ - "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", - "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", + "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" ], "markers": "python_version < '3.8'", - "version": "==1.3.0" + "version": "==1.4.0" }, "mock": { "hashes": [ @@ -121,17 +121,17 @@ }, "packaging": { "hashes": [ - "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", - "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" + "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", + "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" ], - "version": "==19.2" + "version": "==20.0" }, "pathlib2": { "hashes": [ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.6'", "version": "==2.3.5" }, "pluggy": { @@ -143,25 +143,25 @@ }, "py": { "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" ], - "version": "==1.8.0" + "version": "==1.8.1" }, "pyparsing": { "hashes": [ - "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f", - "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a" + "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", + "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" ], - "version": "==2.4.5" + "version": "==2.4.6" }, "pytest": { "hashes": [ - "sha256:6192875be8af57b694b7c4904e909680102befcb99e610ef3d9f786952f795aa", - "sha256:f8447ebf8fd3d362868a5d3f43a9df786dfdfe9608843bd9002a2d47a104808f" + "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", + "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], "index": "delphix", - "version": "==4.6.8" + "version": "==4.6.9" }, "scandir": { "hashes": [ @@ -189,17 +189,17 @@ }, "wcwidth": { "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" ], - "version": "==0.1.7" + "version": "==0.1.8" }, "zipp": { "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", + "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" ], - "version": "==0.6.0" + "version": "==1.0.0" } } } diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 6ddfb04e..2c26a313 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -3,17 +3,17 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' -importlib-metadata==1.3.0 ; python_version < '3.8' +funcsigs==1.0.2 ; python_version < '3.0' +importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.2 -pathlib2==2.3.5 ; python_version < '3' +packaging==20.0 +pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.1 -py==1.8.0 -pyparsing==2.4.5 -pytest==4.6.8 +py==1.8.1 +pyparsing==2.4.6 +pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.7 -zipp==0.6.0 +wcwidth==0.1.8 +zipp==1.0.0 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 83fb69f1..83eed067 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz dvp-api==1.1.0-master-003 protobuf==3.6.1 six==1.13.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 067d5491..1bf66e19 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "486b00d11f451beda7c61fbb8a4b65f6aa8aec9866dfd4f460fb2034bdaac806" + "sha256": "9a160aba7064f75aa9de830acc856c9bce452ba43e4d0b21f6b21dcea5f8abf1" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz", - "version": "== 1.1.0-internal-006" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz", + "version": "== 1.1.0-internal-007" }, "enum34": { "hashes": [ @@ -102,16 +102,16 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "importlib-metadata": { "hashes": [ - "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", - "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", + "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" ], "markers": "python_version < '3.8'", - "version": "==1.3.0" + "version": "==1.4.0" }, "mock": { "hashes": [ @@ -132,17 +132,17 @@ }, "packaging": { "hashes": [ - "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", - "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" + "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", + "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" ], - "version": "==19.2" + "version": "==20.0" }, "pathlib2": { "hashes": [ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.6'", "version": "==2.3.5" }, "pluggy": { @@ -154,25 +154,25 @@ }, "py": { "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" ], - "version": "==1.8.0" + "version": "==1.8.1" }, "pyparsing": { "hashes": [ - "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f", - "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a" + "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", + "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" ], - "version": "==2.4.5" + "version": "==2.4.6" }, "pytest": { "hashes": [ - "sha256:6192875be8af57b694b7c4904e909680102befcb99e610ef3d9f786952f795aa", - "sha256:f8447ebf8fd3d362868a5d3f43a9df786dfdfe9608843bd9002a2d47a104808f" + "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", + "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], "index": "delphix", - "version": "==4.6.8" + "version": "==4.6.9" }, "scandir": { "hashes": [ @@ -200,17 +200,17 @@ }, "wcwidth": { "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" ], - "version": "==0.1.7" + "version": "==0.1.8" }, "zipp": { "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", + "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" ], - "version": "==0.6.0" + "version": "==1.0.0" } } } diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index 6ddfb04e..2c26a313 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -3,17 +3,17 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' -importlib-metadata==1.3.0 ; python_version < '3.8' +funcsigs==1.0.2 ; python_version < '3.0' +importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.2 -pathlib2==2.3.5 ; python_version < '3' +packaging==20.0 +pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.1 -py==1.8.0 -pyparsing==2.4.5 -pytest==4.6.8 +py==1.8.1 +pyparsing==2.4.6 +pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.7 -zipp==0.6.0 +wcwidth==0.1.8 +zipp==1.0.0 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 16d5c83c..a8fb7978 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz dvp-api==1.1.0-master-003 enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 diff --git a/platform/src/main/python/dlpx/virtualization/platform/__init__.py b/platform/src/main/python/dlpx/virtualization/platform/__init__.py index 51c5d6aa..718409b4 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/__init__.py +++ b/platform/src/main/python/dlpx/virtualization/platform/__init__.py @@ -6,4 +6,6 @@ from dlpx.virtualization.platform._plugin_classes import * from dlpx.virtualization.platform._plugin import * -from dlpx.virtualization.platform.util import * \ No newline at end of file +from dlpx.virtualization.platform.util import * +from dlpx.virtualization.platform.import_util import * +from dlpx.virtualization.platform.import_validations import * \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py index ffaceb06..1f12161f 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py +++ b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py @@ -108,3 +108,20 @@ def __init__(self, reference): message = ("Reference '{}' is not a correctly formatted host environment reference.".format(reference)) super(IncorrectReferenceFormatError, self).__init__(message) +class IncorrectPluginCodeError(PluginRuntimeError): + """ + This gets thrown if the import validations come across invalid plugin + code that causes import to fail, or if the expected plugin entry point is + not found in the plugin code. + Args: + message (str): A user-readable message describing the exception. + + Attributes: + message (str): A user-readable message describing the exception. + """ + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(IncorrectPluginCodeError, self).__init__(message) \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_util.py b/platform/src/main/python/dlpx/virtualization/platform/import_util.py new file mode 100644 index 00000000..2d9e8235 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/import_util.py @@ -0,0 +1,132 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# +import inspect + +from dlpx.virtualization.platform import exceptions + + +_IMPORT_CHECKS = {} +_POST_IMPORT_CHECKS = {} + + +class PluginModule: + """ + Import helper class for the plugin. An instance of this class helps to pass + state of imported module and relevant info to all the validation methods. + """ + def __init__(self, + src_dir, + module, + entry_point, + plugin_type, + module_content, + expected_direct_args_by_op, + expected_staged_args_by_op, + validate_args=False): + self.__src_dir = src_dir + self.__module = module + self.__entry_point = entry_point + self.__type = plugin_type + self.__module_content = module_content + self.__expected_direct_args_by_op = expected_direct_args_by_op + self.__expected_staged_args_by_op = expected_staged_args_by_op + self.__validate_args = validate_args + + @property + def src_dir(self): + return self.__src_dir + + @property + def module(self): + return self.__module + + @property + def entry_point(self): + return self.__entry_point + + @property + def plugin_type(self): + return self.__type + + @property + def module_content(self): + return self.__module_content + + @property + def expected_direct_args_by_op(self): + return self.__expected_direct_args_by_op + + @property + def expected_staged_args_by_op(self): + return self.__expected_staged_args_by_op + + @property + def validate_args(self): + return self.__validate_args + + +def import_check(ordinal): + """ + This is the import check decorator. Ordinal here signifies the order in + which the checks are executed. + """ + def import_check_decorator(f): + assert inspect.isfunction(f) + assert ordinal not in _IMPORT_CHECKS + + _IMPORT_CHECKS[ordinal] = f + + return f + + return import_check_decorator + + +def post_import_check(ordinal): + """ + This is the post import check decorator. Ordinal here signifies the order + in which the checks are executed. + """ + def post_import_check_decorator(f): + assert inspect.isfunction(f) + assert ordinal not in _POST_IMPORT_CHECKS + + _POST_IMPORT_CHECKS[ordinal] = f + + return f + + return post_import_check_decorator + + +def validate_import(plugin_module): + """ + Runs validations on the module imported and checks if import was fine + and imported content is valid or not. + NOTE: Dependency checks are not handled well. A failure in one validation + should not impact the next one if each validation defines its dependencies + well. For now, any exception from one is considered failure of all + validations. This can be enhanced to define dependencies well. + """ + for key in sorted(_IMPORT_CHECKS.keys()): + try: + _IMPORT_CHECKS[key](plugin_module) + except exceptions.IncorrectPluginCodeError as plugin_err: + return [plugin_err.message] + except exceptions.UserError as user_err: + return [user_err.message] + return [] + + +def validate_post_import(plugin_module): + """ + Runs post import validations on the module content. + """ + warnings = [] + + # + # warnings.extend is used below since each import check returns a list of + # warnings. + # + for key in sorted(_POST_IMPORT_CHECKS.keys()): + warnings.extend(_POST_IMPORT_CHECKS[key](plugin_module)) + return warnings \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_validations.py b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py new file mode 100644 index 00000000..52f99c57 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py @@ -0,0 +1,116 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# +import inspect + +from dlpx.virtualization.platform.import_util import (import_check, + post_import_check, + PluginModule) +from dlpx.virtualization.platform import exceptions + + +@import_check(ordinal=1) +def validate_module_content(plugin_module): + # This should never happen and if it does, flag an error. + if plugin_module.module_content is None: + raise exceptions.IncorrectPluginCodeError( + 'Plugin module content is None.') + + +@import_check(ordinal=2) +def validate_entry_point(plugin_module): + # + # Schema validation on plugin config file would have ensured entry is a + # string and should never be none - so raise an error if it does. + # + if plugin_module.entry_point is None: + raise exceptions.IncorrectPluginCodeError( + 'Plugin entry point object is None.') + + if not hasattr(plugin_module.module_content, plugin_module.entry_point): + raise exceptions.UserError( + 'Entry point \'{}:{}\' does not exist. \'{}\' is not a symbol' + ' in module \'{}\'.'.format(plugin_module.module, + plugin_module.entry_point, + plugin_module.entry_point, + plugin_module.module)) + + +@import_check(ordinal=3) +def validate_plugin_object(plugin_module): + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point, + None) + + if plugin_object is None: + raise exceptions.UserError('Plugin object retrieved from the entry' + ' point {} is None'.format + (plugin_module.entry_point)) + + +@post_import_check(ordinal=1) +def validate_named_args(plugin_module): + """ + Does named argument validation based on the plugin type. + """ + warnings = [] + + if plugin_module.validate_args: + + # + # Validated methods args against expected args and return any + # resulting warnings to the caller to process. + # These warnings should be treated as an exception to make + # sure build fails. + # + + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point) + + # Iterate over attributes objects of the Plugin object + for plugin_attrib in plugin_object.__dict__.values(): + # + # For each plugin attribute object, its __dict__.keys will give + # us the name of the plugin implemntation method name. That name + # is useful in looking up named arguments expected and what is + # actually in the plugin code. And plugin_op_type can be, for e.g. + # LinkedOperations, DiscoveryOperations, VirtualOperations + # + plugin_op_type = plugin_attrib.__class__.__name__ + for op_name_key, op_name in plugin_attrib.__dict__.items(): + if op_name is None: + continue + actual_args = inspect.getargspec(op_name) + warnings.extend( + _check_args(method_name=op_name.__name__, + expected_args=_lookup_expected_args( + plugin_module, plugin_op_type, + op_name_key), + actual_args=actual_args.args)) + + return warnings + + +def _check_args(method_name, expected_args, actual_args): + warnings = [] + + if len(expected_args) != len(actual_args): + warnings.append('Number of arguments do not match in method {}.' + ' Expected: {}, Found: {}.'.format( + method_name, list(expected_args), actual_args)) + + if not all(arg in expected_args for arg in actual_args): + warnings.append('Named argument mismatch in method {}.' + ' Expected: {}, Found: {}.'.format( + method_name, list(expected_args), actual_args)) + + return warnings + + +def _lookup_expected_args(plugin_module, plugin_op_type, plugin_op_name): + if plugin_module.plugin_type == 'DIRECT': + return plugin_module.expected_direct_args_by_op[plugin_op_type][ + plugin_op_name] + else: + return plugin_module.expected_staged_args_by_op[plugin_op_type][ + plugin_op_name] \ No newline at end of file diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index f5c98751..18a6fa96 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "cc67692aa51c06b35e89165038a8e0789edc38dd91b0e60f0f932d631739931b" + "sha256": "a57907c06ad1cf10cb03ff7ea5d19bb160291a181fcc08137d75cfccb71d2751" }, "pipfile-spec": 6, "requires": {}, @@ -68,8 +68,8 @@ "version": "==0.6.0.post1" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-006.tar.gz", - "version": "== 1.1.0-internal-006" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-007.tar.gz", + "version": "== 1.1.0-internal-007" }, "entrypoints": { "hashes": [ @@ -102,7 +102,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "idna": { @@ -114,11 +114,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", - "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", + "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" ], "markers": "python_version < '3.8'", - "version": "==1.3.0" + "version": "==1.4.0" }, "jinja2": { "hashes": [ @@ -208,26 +208,26 @@ }, "pyrsistent": { "hashes": [ - "sha256:f3b280d030afb652f79d67c5586157c5c1355c9a58dfc7940566e28d28f3df1b" + "sha256:cdc7b5e3ed77bed61270a47d35434a30617b9becdf2478af76ad2c6ade307280" ], - "version": "==0.15.6" + "version": "==0.15.7" }, "pyyaml": { "hashes": [ - "sha256:0e7f69397d53155e55d10ff68fdfb2cf630a35e6daf65cf0bdeaf04f127c09dc", - "sha256:2e9f0b7c5914367b0916c3c104a024bb68f269a486b9d04a2e8ac6f6597b7803", - "sha256:35ace9b4147848cafac3db142795ee42deebe9d0dad885ce643928e88daebdcc", - "sha256:38a4f0d114101c58c0f3a88aeaa44d63efd588845c5a2df5290b73db8f246d15", - "sha256:483eb6a33b671408c8529106df3707270bfacb2447bf8ad856a4b4f57f6e3075", - "sha256:4b6be5edb9f6bb73680f5bf4ee08ff25416d1400fbd4535fe0069b2994da07cd", - "sha256:7f38e35c00e160db592091751d385cd7b3046d6d51f578b29943225178257b31", - "sha256:8100c896ecb361794d8bfdb9c11fce618c7cf83d624d73d5ab38aef3bc82d43f", - "sha256:c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c", - "sha256:e4c015484ff0ff197564917b4b4246ca03f411b9bd7f16e02a2f586eb48b6d04", - "sha256:ebc4ed52dcc93eeebeae5cf5deb2ae4347b3a81c3fa12b0b8c976544829396a4" + "sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6", + "sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf", + "sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5", + "sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e", + "sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811", + "sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e", + "sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d", + "sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20", + "sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689", + "sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994", + "sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615" ], "index": "delphix", - "version": "==5.2" + "version": "==5.3" }, "requests": { "hashes": [ @@ -279,10 +279,10 @@ }, "zipp": { "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", + "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" ], - "version": "==0.6.0" + "version": "==1.0.0" } }, "develop": { @@ -326,48 +326,48 @@ }, "coverage": { "hashes": [ - "sha256:0cd13a6e98c37b510a2d34c8281d5e1a226aaf9b65b7d770ef03c63169965351", - "sha256:1a4b6b6a2a3a6612e6361130c2cc3dc4378d8c221752b96167ccbad94b47f3cd", - "sha256:2ee55e6dba516ddf6f484aa83ccabbb0adf45a18892204c23486938d12258cde", - "sha256:3be5338a2eb4ef03c57f20917e1d12a1fd10e3853fed060b6d6b677cb3745898", - "sha256:44b783b02db03c4777d8cf71bae19eadc171a6f2a96777d916b2c30a1eb3d070", - "sha256:475bf7c4252af0a56e1abba9606f1e54127cdf122063095c75ab04f6f99cf45e", - "sha256:47c81ee687eafc2f1db7f03fbe99aab81330565ebc62fb3b61edfc2216a550c8", - "sha256:4a7f8e72b18f2aca288ff02255ce32cc830bc04d993efbc87abf6beddc9e56c0", - "sha256:50197163a22fd17f79086e087a787883b3ec9280a509807daf158dfc2a7ded02", - "sha256:56b13000acf891f700f5067512b804d1ec8c301d627486c678b903859d07f798", - "sha256:79388ae29c896299b3567965dbcd93255f175c17c6c7bca38614d12718c47466", - "sha256:79fd5d3d62238c4f583b75d48d53cdae759fe04d4fb18fe8b371d88ad2b6f8be", - "sha256:7fe3e2fde2bf1d7ce25ebcd2d3de3650b8d60d9a73ce6dcef36e20191291613d", - "sha256:81042a24f67b96e4287774014fa27220d8a4d91af1043389e4d73892efc89ac6", - "sha256:81326f1095c53111f8afc95da281e1414185f4a538609a77ca50bdfa39a6c207", - "sha256:8873dc0d8f42142ea9f20c27bbdc485190fff93823c6795be661703369e5877d", - "sha256:88d2cbcb0a112f47eef71eb95460b6995da18e6f8ca50c264585abc2c473154b", - "sha256:91f2491aeab9599956c45a77c5666d323efdec790bfe23fcceafcd91105d585a", - "sha256:979daa8655ae5a51e8e7a24e7d34e250ae8309fd9719490df92cbb2fe2b0422b", - "sha256:9c871b006c878a890c6e44a5b2f3c6291335324b298c904dc0402ee92ee1f0be", - "sha256:a6d092545e5af53e960465f652e00efbf5357adad177b2630d63978d85e46a72", - "sha256:b5ed7837b923d1d71c4f587ae1539ccd96bfd6be9788f507dbe94dab5febbb5d", - "sha256:ba259f68250f16d2444cbbfaddaa0bb20e1560a4fdaad50bece25c199e6af864", - "sha256:be1d89614c6b6c36d7578496dc8625123bda2ff44f224cf8b1c45b810ee7383f", - "sha256:c1b030a79749aa8d1f1486885040114ee56933b15ccfc90049ba266e4aa2139f", - "sha256:c95bb147fab76f2ecde332d972d8f4138b8f2daee6c466af4ff3b4f29bd4c19e", - "sha256:d52c1c2d7e856cecc05aa0526453cb14574f821b7f413cc279b9514750d795c1", - "sha256:d609a6d564ad3d327e9509846c2c47f170456344521462b469e5cb39e48ba31c", - "sha256:e1bad043c12fb58e8c7d92b3d7f2f49977dcb80a08a6d1e7a5114a11bf819fca", - "sha256:e5a675f6829c53c87d79117a8eb656cc4a5f8918185a32fc93ba09778e90f6db", - "sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c" + "sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3", + "sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c", + "sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0", + "sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477", + "sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a", + "sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf", + "sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691", + "sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73", + "sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987", + "sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894", + "sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e", + "sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef", + "sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf", + "sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68", + "sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8", + "sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954", + "sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2", + "sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40", + "sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc", + "sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc", + "sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e", + "sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d", + "sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f", + "sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc", + "sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301", + "sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea", + "sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb", + "sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af", + "sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52", + "sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37", + "sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0" ], "index": "delphix", - "version": "==5.0" + "version": "==5.0.3" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz", - "version": "== 1.1.0-internal-006" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz", + "version": "== 1.1.0-internal-007" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-006.tar.gz", - "version": "== 1.1.0-internal-006" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-007.tar.gz", + "version": "== 1.1.0-internal-007" }, "entrypoints": { "hashes": [ @@ -408,7 +408,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "futures": { @@ -428,11 +428,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", - "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", + "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" ], "markers": "python_version < '3.8'", - "version": "==1.3.0" + "version": "==1.4.0" }, "isort": { "hashes": [ @@ -467,10 +467,10 @@ }, "packaging": { "hashes": [ - "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", - "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" + "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", + "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" ], - "version": "==19.2" + "version": "==20.0" }, "pathlib2": { "hashes": [ @@ -489,10 +489,10 @@ }, "py": { "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", + "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" ], - "version": "==1.8.0" + "version": "==1.8.1" }, "pycodestyle": { "hashes": [ @@ -510,18 +510,18 @@ }, "pyparsing": { "hashes": [ - "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f", - "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a" + "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", + "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" ], - "version": "==2.4.5" + "version": "==2.4.6" }, "pytest": { "hashes": [ - "sha256:6192875be8af57b694b7c4904e909680102befcb99e610ef3d9f786952f795aa", - "sha256:f8447ebf8fd3d362868a5d3f43a9df786dfdfe9608843bd9002a2d47a104808f" + "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", + "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], "index": "delphix", - "version": "==4.6.8" + "version": "==4.6.9" }, "pytest-cov": { "hashes": [ @@ -566,10 +566,10 @@ }, "wcwidth": { "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" + "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", + "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" ], - "version": "==0.1.7" + "version": "==0.1.8" }, "yapf": { "hashes": [ @@ -581,10 +581,10 @@ }, "zipp": { "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", + "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" ], - "version": "==0.6.0" + "version": "==1.0.0" } } } diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index 3f7c8477..016c70fb 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,36 +1,36 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-006.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-006.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-007.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -coverage==5.0 +coverage==5.0.3 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 funcsigs==1.0.2 ; python_version < '3.0' -functools32==3.2.3.post2 ; python_version < '3' +functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 -importlib-metadata==1.3.0 ; python_version < '3.8' +importlib-metadata==1.4.0 ; python_version < '3.8' isort==4.3.21 mccabe==0.6.1 mock==3.0.5 more-itertools==5.0.0 -packaging==19.2 +packaging==20.0 pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 -py==1.8.0 +py==1.8.1 pycodestyle==2.5.0 pyflakes==2.1.1 -pyparsing==2.4.5 +pyparsing==2.4.6 pytest-cov==2.8.1 -pytest==4.6.8 +pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 typing==3.7.4.1 ; python_version < '3.5' -wcwidth==0.1.7 +wcwidth==0.1.8 yapf==0.28 -zipp==0.6.0 +zipp==1.0.0 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 55df3adb..e262d83f 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../platform/build/python-dist/dvp-platform-1.1.0-internal-006.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-007.tar.gz attrs==19.3.0 certifi==2019.11.28 chardet==3.0.4 @@ -10,9 +10,9 @@ contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3' +functools32==3.2.3.post2 ; python_version < '3.2' idna==2.8 -importlib-metadata==1.3.0 ; python_version < '3.8' +importlib-metadata==1.4.0 ; python_version < '3.8' jinja2==2.10.3 jsonschema==3.2.0 markupsafe==1.1.1 @@ -21,11 +21,11 @@ more-itertools==5.0.0 pathlib2==2.3.5 ; python_version < '3' pycodestyle==2.5.0 pyflakes==2.1.1 -pyrsistent==0.15.6 -pyyaml==5.2 +pyrsistent==0.15.7 +pyyaml==5.3 requests==2.22.0 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 typing==3.7.4.1 ; python_version < '3.5' urllib3==1.25.7 -zipp==0.6.0 +zipp==1.0.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py index 8d2880ea..7f2e474c 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py @@ -254,3 +254,44 @@ def __init__(self, command, exit_code, output): "{} failed with exit code {}.").format( output, command, exit_code) super(SubprocessFailedError, self).__init__(message) + + +class ValidationFailedError(UserError): + """ + ValidationFailedError gets raised when validation fails on plugin config + and its contents. + Defines helpers methods to format warning and exception messages. + """ + def __init__(self, warnings): + message = self.__report_warnings_and_exceptions(warnings) + super(ValidationFailedError, self).__init__(message) + + @classmethod + def __report_warnings_and_exceptions(cls, warnings): + """ + Prints the warnings and errors that were found in the plugin code, if + the warnings dictionary contains the 'exception' key. + """ + exception_msg = cls.exception_msg(warnings) + exception_msg += '\n{}'.format(cls.warning_msg(warnings)) + return '{}\n{} Warning(s). {} Error(s).'.format( + exception_msg, len(warnings['warning']), + len(warnings['exception'])) + + @classmethod + def exception_msg(cls, exceptions): + exception_msg = '\n'.join( + cls.__format_msg('Error', ex) for ex in exceptions['exception']) + return exception_msg + + @classmethod + def warning_msg(cls, warnings): + warning_msg = '\n'.join( + cls.__format_msg('Warning', warning) + for warning in warnings['warning']) + return warning_msg + + @staticmethod + def __format_msg(msg_type, msg): + msg_str = "{}: {}".format(msg_type, msg) + return msg_str diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 540b0830..b8ca580f 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -2,45 +2,21 @@ # Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import importlib -import inspect import logging import os import sys -from collections import defaultdict +from collections import defaultdict, namedtuple from multiprocessing import Process, Queue import yaml from dlpx.virtualization._internal import const, exceptions -from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE -from flake8.api import legacy as flake8 +from dlpx.virtualization.platform import import_util logger = logging.getLogger(__name__) PLUGIN_IMPORTER_YAML = os.path.join(const.PLUGIN_SCHEMAS_DIR, 'plugin_importer.yaml') - - -class MessageUtils: - """ - Defines helpers methods to format warning and exception messages. - """ - @classmethod - def exception_msg(cls, exceptions): - exception_msg = '\n'.join( - cls.__format_msg('Error', ex) for ex in exceptions['exception']) - return exception_msg - - @classmethod - def warning_msg(cls, warnings): - warning_msg = '\n'.join( - cls.__format_msg('Warning', warning) - for warning in warnings['warning']) - return warning_msg - - @staticmethod - def __format_msg(msg_type, msg): - msg_str = "{}: {}".format(msg_type, msg) - return msg_str +validation_result = namedtuple('validation_result', ['plugin_manifest']) def load_validation_maps(): @@ -55,7 +31,7 @@ def load_validation_maps(): class PluginImporter: """ Import helper class for the plugin. Imports the plugin module in a sub - process to ensure its isolated and does not pollute caller's runtime. + process to ensure it's isolated and does not pollute caller's runtime. On successful import, callers can get the manifest describing what methods are implemented in the plugin code. If import fails or has issues with validation of module content and entry points- will save @@ -80,12 +56,18 @@ def __init__(self, self.__plugin_entry_point = entry_point self.__plugin_type = plugin_type self.__validate = validate + self.__post_import_checks = [self.__check_for_required_methods] - def import_plugin(self): + @property + def result(self): + return validation_result(plugin_manifest=self.__plugin_manifest) + + def validate_plugin_module(self): """ - Imports the plugin module, does basic validation. + Imports the plugin module, does post import validation. Returns: plugin manifest - dict describing methods implemented in the plugin + is available to callers via the result property. NOTE: Importing module in the current context pollutes the runtime of the caller, in this case dvp. If the module being imported, for @@ -95,22 +77,10 @@ def import_plugin(self): in a sub-process and on completion return the output. """ logger.debug('Importing plugin module : %s', self.__plugin_module) + self.__plugin_manifest, warnings = self.__internal_import() + self.__run_checks(warnings) - self.__pre_import_checks() - plugin_manifest, warnings = self.__import_plugin() - self.__post_import_checks(plugin_manifest, warnings) - - return plugin_manifest - - def __pre_import_checks(self): - """ - Performs checks of the plugin code that should take place prior to - importing. - """ - warnings = self.__check_for_undefined_names(self.__src_dir) - self.__report_warnings_and_exceptions(warnings) - - def __import_plugin(self): + def __internal_import(self): """ Imports the module in a sub-process to check for errors or issues. Also does an eval on the entry point. @@ -133,40 +103,6 @@ def __import_plugin(self): return plugin_manifest, warnings - def __post_import_checks(self, plugin_manifest, warnings): - """ - Performs checks of the plugin code that should take place after - importing. - """ - check_warnings = self.__check_for_required_methods( - plugin_manifest, self.__plugin_type) - - if check_warnings and 'warning' in check_warnings: - warnings['warning'].extend(check_warnings['warning']) - - self.__report_warnings_and_exceptions(warnings) - - @staticmethod - def __check_for_required_methods(plugin_manifest, plugin_type): - """ - Checks for required methods in the manifest and adds warnings for any - missing methods. - """ - warnings = defaultdict(list) - if not plugin_manifest: - return warnings - for method_key, method_name in \ - PluginImporter.required_methods_by_plugin_type[ - plugin_type].items(): - if plugin_manifest[method_key] is False: - warnings['warning'].append( - 'Implementation missing ' - 'for required method {}. The Plugin Operation \'{}\' ' - 'will fail when executed.'.format( - method_name, PluginImporter. - required_methods_description[method_key])) - return warnings - @staticmethod def __import_in_subprocess(src_dir, module, entry_point, plugin_type, validate): @@ -203,50 +139,46 @@ def __parse_queue(queue): return manifest, warnings - @staticmethod - def __check_for_undefined_names(src_dir): + def __run_checks(self, warnings): """ - Checks the plugin module for undefined names. This catches - missing imports, references to nonexistent variables, etc. - - ..note:: - We are using the legacy flake8 api, because there is currently - no public, stable api for flake8 >= 3.0.0 - - For more info, see - https://flake8.pycqa.org/en/latest/user/python-api.html + Performs checks of the plugin code that should take place after + importing. """ - warnings = defaultdict(list) - exclude_dir = os.path.sep.join([src_dir, CODEGEN_PACKAGE]) - style_guide = flake8.get_style_guide(select=["F821"], - exclude=[exclude_dir], - quiet=1) - style_guide.check_files(paths=[src_dir]) - file_checkers = style_guide._application.file_checker_manager.checkers - for checker in file_checkers: - for result in checker.results: - # From the api code, result is a tuple defined as: error = - # (error_code, line_number, column, text, physical_line) - if result[0] == 'F821': - msg = "{} on line {} in {}".format(result[3], result[1], - checker.filename) - warnings['exception'].append(exceptions.UserError(msg)) + for check in self.__post_import_checks: + check_warnings = check() + if check_warnings and 'warning' in check_warnings: + warnings['warning'].extend(check_warnings['warning']) - return warnings + if warnings and 'exception' in warnings: + raise exceptions.ValidationFailedError(warnings) - @staticmethod - def __report_warnings_and_exceptions(warnings): + if warnings and 'warning' in warnings: + # + # Use the ValidationFailedError type to get a formatted message + # with number of warnings included in the message. + # + warning_msg = exceptions.ValidationFailedError(warnings).message + logger.warn(warning_msg) + + def __check_for_required_methods(self): """ - Prints the warnings and errors that were found in the plugin code, if - the warnings dictionary contains the 'exception' key. + Checks for required methods in the manifest and adds warnings for any + missing methods. """ - if warnings and 'exception' in warnings: - exception_msg = MessageUtils.exception_msg(warnings) - exception_msg += '\n{}'.format(MessageUtils.warning_msg(warnings)) - raise exceptions.UserError( - '{}\n{} Warning(s). {} Error(s).'.format( - exception_msg, len(warnings['warning']), - len(warnings['exception']))) + warnings = defaultdict(list) + if not self.__plugin_manifest: + return warnings + for method_key, method_name in \ + PluginImporter.required_methods_by_plugin_type[ + self.__plugin_type].items(): + if self.__plugin_manifest[method_key] is False: + warnings['warning'].append( + 'Implementation missing ' + 'for required method {}. The Plugin Operation \'{}\' ' + 'will fail when executed.'.format( + method_name, PluginImporter. + required_methods_description[method_key])) + return warnings def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): @@ -254,70 +186,56 @@ def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): sys.path.append(src_dir) try: module_content = importlib.import_module(module) - manifest = _validate_and_get_manifest(module, module_content, - entry_point) - - if validate: - # - # Validated methods args against expected args and add any - # resulting warnings to the queue for caller to process. - # These warnings should be treated as an exception to make - # sure build fails. - # - warnings = _validate_named_args(module_content, entry_point, - plugin_type) - if warnings: - map(lambda warning: queue.put({'exception': warning}), - warnings) - except ImportError as err: + except (ImportError, TypeError) as err: queue.put({'exception': err}) - except exceptions.UserError as user_err: - queue.put({'exception': user_err}) - except RuntimeError as rt_err: - queue.put({'exception': rt_err}) finally: sys.path.remove(src_dir) + # + # Create an instance of plugin module with associated state to pass around + # to the validation code. + # + plugin_module = import_util.PluginModule( + src_dir, module, entry_point, plugin_type, module_content, + PluginImporter.expected_direct_args_by_op, + PluginImporter.expected_staged_args_by_op, validate) + + # Validate if the module imported fine and is the expected one. + warnings = import_util.validate_import(plugin_module) + _process_warnings(queue, warnings) + + # If the import itself had issues, no point validating further. + if warnings and len(warnings) > 0: + return + + # Run post import validations and consolidate issues. + warnings = import_util.validate_post_import(plugin_module) + _process_warnings(queue, warnings) + + manifest = _prepare_manifest(entry_point, module_content) queue.put({'manifest': manifest}) -def _validate_and_get_manifest(module, module_content, entry_point): +def _process_warnings(queue, warnings): + for warning in warnings: + queue.put({'exception': warning}) + + +def _prepare_manifest(entry_point, module_content): """ Creates a plugin manifest indicating which plugin operations have been implemented by a plugin developer. Plugin_module_content is a module object which must have plugin_entry_point_name as one of its attributes. Args: - module: name of the module imported - module_content: plugin module content from import entry_point: name of entry point to the above plugin module + module_content: plugin module content from import Returns: dict: dictionary that represents plugin's manifest """ - # This should never happen and if it does, flag a run time error. - if module_content is None: - raise RuntimeError('Plugin module content is None.') - - # - # Schema validation on plugin config file would have ensured entry - # is a string and should never happen its none - so raise a run time - # error if it does. - # - if entry_point is None: - raise RuntimeError('Plugin entry point object is None.') - - if not hasattr(module_content, entry_point): - raise exceptions.UserError( - 'Entry point \'{}:{}\' does not exist. \'{}\' is not a symbol' - ' in module \'{}\'.'.format(module, entry_point, entry_point, - module)) plugin_object = getattr(module_content, entry_point) - if plugin_object is None: - raise exceptions.UserError('Plugin object retrieved from the entry' - ' point {} is None'.format(entry_point)) - # Check which methods on the plugin object have been implemented. manifest = { 'type': @@ -363,61 +281,3 @@ def _validate_and_get_manifest(module, module_content, entry_point): } return manifest - - -def _validate_named_args(module_content, entry_point, plugin_type): - """ - Does named argument validation based on the plugin type. - """ - warnings = [] - - plugin_object = getattr(module_content, entry_point) - - # Iterate over attributes objects of the Plugin object - for plugin_attrib in plugin_object.__dict__.values(): - # - # For each plugin attribute object, its __dict__.keys will give - # us the name of the plugin implemntation method name. That name - # is useful in looking up named arguments expected and what is - # actually in the plugin code. And plugin_op_type can be, for e.g. - # LinkedOperations, DiscoveryOperations, VirtualOperations - # - plugin_op_type = plugin_attrib.__class__.__name__ - for op_name_key, op_name in plugin_attrib.__dict__.items(): - if op_name is None: - continue - actual_args = inspect.getargspec(op_name) - warnings.extend( - _check_args(method_name=op_name.__name__, - expected_args=_lookup_expected_args( - plugin_type, plugin_op_type, op_name_key), - actual_args=actual_args.args)) - - return warnings - - -def _check_args(method_name, expected_args, actual_args): - warnings = [] - - if len(expected_args) != len(actual_args): - warnings.append('Number of arguments do not match in method {}.' - ' Expected: {}, Found: {}.'.format( - method_name, list(expected_args), - str(actual_args))) - - if not all(arg in expected_args for arg in actual_args): - warnings.append('Named argument mismatch in method {}.' - ' Expected: {}, Found: {}.'.format( - method_name, list(expected_args), - str(actual_args))) - - return warnings - - -def _lookup_expected_args(plugin_type, plugin_op_type, plugin_op_name): - if plugin_type == const.DIRECT_TYPE: - return PluginImporter.expected_direct_args_by_op[plugin_op_type][ - plugin_op_name] - else: - return PluginImporter.expected_staged_args_by_op[plugin_op_type][ - plugin_op_name] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py index 06497cbd..ae198908 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py @@ -7,7 +7,8 @@ import os from contextlib import contextmanager -from dlpx.virtualization._internal import const, exceptions +from dlpx.virtualization._internal import const, exceptions, file_util +from dlpx.virtualization._internal.plugin_importer import PluginImporter from dlpx.virtualization._internal.plugin_validator import PluginValidator from dlpx.virtualization._internal.schema_validator import SchemaValidator @@ -77,17 +78,19 @@ def get_plugin_manifest(plugin_config_file, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = (const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else - const.PLUGIN_CONFIG_SCHEMA) - validator = PluginValidator.from_config_content(plugin_config_file, - plugin_config_content, - plugin_config_schema_file) + src_dir = file_util.get_src_dir_path(plugin_config_file, + plugin_config_content['srcDir']) + entry_point_module, entry_point_object = PluginValidator.split_entry_point( + plugin_config_content['entryPoint']) + plugin_type = plugin_config_content['pluginType'] + + importer = PluginImporter(src_dir, entry_point_module, entry_point_object, + plugin_type, True) with validate_error_handler(plugin_config_file, validation_mode): - validator.validate_plugin_module() + importer.validate_plugin_module() - return validator.result + return importer.result def validate_schema_file(schema_file, stop_build): diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py index e4a1d572..46c46435 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py @@ -5,17 +5,17 @@ import json import logging import os -from collections import namedtuple +from collections import defaultdict, namedtuple import yaml -from dlpx.virtualization._internal import (exceptions, file_util, - plugin_importer) +from dlpx.virtualization._internal import exceptions +from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE +from flake8.api import legacy as flake8 from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple('validation_result', - ['plugin_config_content', 'plugin_manifest']) +validation_result = namedtuple('validation_result', ['plugin_config_content']) class PluginValidator: @@ -37,12 +37,16 @@ def __init__(self, self.__plugin_config_schema = plugin_config_schema self.__plugin_config_content = plugin_config_content self.__plugin_manifest = None + self.__pre_import_checks = [ + self.__validate_plugin_config_content, + self.__validate_plugin_entry_point, + self.__check_for_undefined_names + ] @property def result(self): return validation_result( - plugin_config_content=self.__plugin_config_content, - plugin_manifest=self.__plugin_manifest) + plugin_config_content=self.__plugin_config_content) @classmethod def from_config_content(cls, plugin_config_file, plugin_config_content, @@ -60,37 +64,21 @@ def from_config_content(cls, plugin_config_file, plugin_config_content, def validate_plugin_config(self): """ Reads a plugin config file and validates the contents using a - pre-defined schema. If validation is successful, tries to import - the plugin module and validates the entry point specified. + pre-defined schema. """ - logger.info('Reading plugin config file %s', self.__plugin_config) - if self.__plugin_config_content is None: self.__plugin_config_content = self.__read_plugin_config_file() logger.debug('Validating plugin config file content : %s', self.__plugin_config_content) - self.__validate_plugin_config_content() - - def validate_plugin_module(self): - """ - Tries to import the plugin module and validates the entry point - specified. - """ - self.validate_plugin_config() - - src_dir = file_util.get_src_dir_path( - self.__plugin_config, self.__plugin_config_content['srcDir']) - - logger.debug('Validating plugin entry point : %s', - self.__plugin_config_content['entryPoint']) - self.__validate_plugin_entry_point(src_dir) + self.__run_checks() def __read_plugin_config_file(self): """ Reads a plugin config file and raises UserError if there is an issue reading the file. """ + logger.info('Reading plugin config file %s', self.__plugin_config) try: with open(self.__plugin_config, 'rb') as f: try: @@ -111,6 +99,18 @@ def __read_plugin_config_file(self): '\nError code: {}. Error message: {}'.format( self.__plugin_config, err.errno, os.strerror(err.errno))) + def __run_checks(self): + """ + Runs validations on the plugin config content and raise exceptions + if any. + """ + # + # All the pre-import checks need to happen in sequence. So no point + # validating further if a check fails. + # + for check in self.__pre_import_checks: + check() + def __validate_plugin_config_content(self): """ Validates the given plugin configuration is valid. @@ -170,43 +170,55 @@ def __validate_plugin_config_content(self): raise exceptions.SchemaValidationError(self.__plugin_config, validation_errors) - def __validate_plugin_entry_point(self, src_dir): + def __validate_plugin_entry_point(self): """ Validates the plugin entry point by parsing the entry - point to get module and entry point. Imports the module - to check for errors or issues. Also does an eval on the - entry point. + point to get module and entry point. """ - entry_point_field = self.__plugin_config_content['entryPoint'] - entry_point_strings = entry_point_field.split(':') - # Get the module and entry point name to import - entry_point_module = entry_point_strings[0] - entry_point_object = entry_point_strings[1] - plugin_type = self.__plugin_config_content['pluginType'] + entry_point_module, entry_point_object = self.split_entry_point( + self.__plugin_config_content['entryPoint']) - try: - self.__plugin_manifest = (self.__import_plugin( - src_dir, entry_point_module, entry_point_object, plugin_type)) - except ImportError as err: - raise exceptions.UserError( - 'Unable to load module \'{}\' specified in ' - 'pluginEntryPoint \'{}\' from path \'{}\'. ' - 'Error message: {}'.format(entry_point_module, - entry_point_object, src_dir, err)) + if not entry_point_module: + raise exceptions.UserError('Plugin module is invalid') - logger.debug("Got manifest %s", self.__plugin_manifest) + if not entry_point_object: + raise exceptions.UserError('Plugin object is invalid') - @staticmethod - def __import_plugin(src_dir, entry_point_module, entry_point_object, - plugin_type): + def __check_for_undefined_names(self): """ - Imports the given python module, does some validations ans returns the - manifest describing implemented plugin operations. + Checks the plugin module for undefined names. This catches + missing imports, references to nonexistent variables, etc. + + ..note:: + We are using the legacy flake8 api, because there is currently + no public, stable api for flake8 >= 3.0.0 + + For more info, see + https://flake8.pycqa.org/en/latest/user/python-api.html """ - importer = plugin_importer.PluginImporter(src_dir, entry_point_module, - entry_point_object, - plugin_type, True) - manifest = importer.import_plugin() + warnings = defaultdict(list) + src_dir = self.__plugin_config_content['srcDir'] + exclude_dir = os.path.sep.join([src_dir, CODEGEN_PACKAGE]) + style_guide = flake8.get_style_guide(select=["F821"], + exclude=[exclude_dir], + quiet=1) + style_guide.check_files(paths=[src_dir]) + file_checkers = style_guide._application.file_checker_manager.checkers + + for checker in file_checkers: + for result in checker.results: + # From the api code, result is a tuple defined as: error = + # (error_code, line_number, column, text, physical_line) + if result[0] == 'F821': + msg = "{} on line {} in {}".format(result[3], result[1], + checker.filename) + warnings['exception'].append(exceptions.UserError(msg)) + + if warnings and len(warnings) > 0: + raise exceptions.ValidationFailedError(warnings) - return manifest + @staticmethod + def split_entry_point(entry_point): + entry_point_strings = entry_point.split(':') + return entry_point_strings[0], entry_point_strings[1] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index e51ab54d..6ab550f8 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -21,7 +21,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-006 +package_version = 1.1.0-internal-007 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 8478827e..e97ab253 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -10,7 +10,7 @@ import yaml from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.commands import build -from dlpx.virtualization._internal.plugin_validator import PluginValidator +from dlpx.virtualization._internal.plugin_importer import PluginImporter @pytest.fixture @@ -57,9 +57,9 @@ def test_build_success(mock_relative_path, mock_install_deps, @staticmethod @pytest.mark.parametrize('artifact_filename', ['somefile.json']) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value={}) + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', + return_value=({}, None)) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') @mock.patch( 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') @@ -341,8 +341,8 @@ def test_zip_and_encode_source_files_encode_fail(mock_encode, src_dir): ''.format(src_dir, 'something')) @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) @mock.patch( 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') @@ -358,8 +358,8 @@ def test_id_validation_positive(mock_relative_path, mock_install_deps, skip_id_validation) @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) @pytest.mark.parametrize('plugin_id', ['mongo']) def test_id_validation_negative(mock_import_plugin, plugin_config_file, diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index 633596e6..3c6ef269 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -154,24 +154,13 @@ def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, validator = plugin_validator.PluginValidator(plugin_config_file, schema_file) - # Assert config file and import validations are not done. + # Assert config file validation is not done. assert not validator.result.plugin_config_content - assert not validator.result.plugin_manifest validator.validate_plugin_config() - # Assert config file is validated and import validation is not done. + # Assert config file is validated. assert validator.result.plugin_config_content - assert not validator.result.plugin_manifest - - validator.validate_plugin_module() - - # - # Assert both config content and import validation are done and result - # tuple has both set to valid values. - # - assert validator.result.plugin_config_content - assert validator.result.plugin_manifest @staticmethod def test_invalid_with_config_file(plugin_config_file): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index c35d4510..d44a01ef 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -9,7 +9,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-006' + assert package_util.get_version() == '1.1.0-internal-007' @staticmethod def test_get_virtualization_api_version(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py index 74f26ed8..8e9157e7 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py @@ -2,12 +2,33 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # import exceptions +import os +import uuid +from collections import OrderedDict import mock import pytest +import yaml +from dlpx.virtualization._internal import (file_util, plugin_util, + plugin_validator) from dlpx.virtualization._internal.plugin_importer import PluginImporter +def get_plugin_importer(plugin_config_file): + plugin_config_content = None + with open(plugin_config_file, 'rb') as f: + plugin_config_content = yaml.safe_load(f) + + src_dir = file_util.get_src_dir_path(plugin_config_file, + plugin_config_content['srcDir']) + entry_point_module, entry_point_object = plugin_validator.PluginValidator\ + .split_entry_point(plugin_config_content['entryPoint']) + plugin_type = plugin_config_content['pluginType'] + + return PluginImporter(src_dir, entry_point_module, entry_point_object, + plugin_type, True) + + class TestPluginImporter: @staticmethod @mock.patch('importlib.import_module') @@ -17,25 +38,26 @@ def test_get_plugin_manifest(mock_import, src_dir, plugin_type, mock_import.return_value = plugin_module_content importer = PluginImporter(src_dir, plugin_name, plugin_entry_point_name, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() - assert manifest == plugin_manifest + assert importer.result.plugin_manifest == plugin_manifest @staticmethod @mock.patch('importlib.import_module') def test_plugin_module_content_none(mock_import, src_dir, plugin_type, plugin_name, plugin_entry_point_name): mock_import.return_value = None - manifest = {} + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, plugin_entry_point_name, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = str(err_info) - assert manifest == {} + assert result == () assert 'Plugin module content is None.' in message @staticmethod @@ -43,15 +65,16 @@ def test_plugin_module_content_none(mock_import, src_dir, plugin_type, def test_plugin_entry_object_none(mock_import, src_dir, plugin_type, plugin_name, plugin_module_content): mock_import.return_value = plugin_module_content - manifest = {} + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, None, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = str(err_info) - assert manifest == {} + assert result == () assert 'Plugin entry point object is None.' in message @staticmethod @@ -61,15 +84,16 @@ def test_plugin_entry_point_nonexistent(mock_import, src_dir, plugin_type, plugin_module_content): entry_point_name = "nonexistent entry point" mock_import.return_value = plugin_module_content - manifest = {} + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, entry_point_name, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = err_info.value.message - assert manifest == {} + assert result == () assert ('\'{}\' is not a symbol in module'.format(entry_point_name) in message) @@ -81,14 +105,66 @@ def test_plugin_object_none(mock_import, src_dir, plugin_type, plugin_name, setattr(plugin_module_content, none_entry_point, None) mock_import.return_value = plugin_module_content - manifest = {} + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, none_entry_point, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = err_info.value.message - assert manifest == {} + assert result == () assert ('Plugin object retrieved from the entry point {} is' ' None'.format(none_entry_point)) in message + + @staticmethod + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_staged_plugin(mock_file_util, fake_staged_plugin_config): + src_dir = os.path.dirname(fake_staged_plugin_config) + mock_file_util.return_value = os.path.join(src_dir, 'src/') + importer = get_plugin_importer(fake_staged_plugin_config) + + with pytest.raises(exceptions.UserError) as err_info: + importer.validate_plugin_module() + + message = err_info.value.message + assert 'Named argument mismatch in method' in message + assert 'Number of arguments do not match' in message + assert 'Implementation missing for required method' in message + + @staticmethod + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_direct_plugin(mock_file_util, fake_direct_plugin_config): + src_dir = os.path.dirname(fake_direct_plugin_config) + mock_file_util.return_value = os.path.join(src_dir, 'src/') + importer = get_plugin_importer(fake_direct_plugin_config) + + with pytest.raises(exceptions.UserError) as err_info: + importer.validate_plugin_module() + + message = err_info.value.message + assert 'Named argument mismatch in method' in message + assert 'Number of arguments do not match' in message + assert 'Implementation missing for required method' in message + + @staticmethod + @mock.patch('os.path.isabs', return_value=False) + @mock.patch('importlib.import_module') + def test_plugin_info_warn_mode(mock_import, mock_relative_path, + plugin_config_file, src_dir, + plugin_module_content): + plugin_config_content = OrderedDict([ + ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), + ('version', '0.1.0'), ('language', 'PYTHON27'), + ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), + ('manualDiscovery', True), + ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), + ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) + ]) + mock_import.return_value = plugin_module_content + try: + plugin_util.get_plugin_manifest(plugin_config_file, + plugin_config_content, False) + except Exception: + raise AssertionError() diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index afea9f1b..9cfd0645 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -9,7 +9,7 @@ import mock import pytest -from dlpx.virtualization._internal import const, exceptions, plugin_util +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.plugin_validator import PluginValidator @@ -63,11 +63,8 @@ def test_plugin_bad_config_file(plugin_config_file): @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - def test_plugin_valid_content(mock_import_plugin, mock_relative_path, - src_dir, plugin_config_file): + def test_plugin_valid_content(mock_relative_path, src_dir, + plugin_config_file): plugin_config_content = OrderedDict([ ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), ('version', '0.1.0'), ('language', 'PYTHON27'), @@ -80,9 +77,7 @@ def test_plugin_valid_content(mock_import_plugin, mock_relative_path, validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - - mock_import_plugin.assert_called() + validator.validate_plugin_config() @staticmethod def test_plugin_missing_field(plugin_config_file): @@ -105,17 +100,13 @@ def test_plugin_missing_field(plugin_config_file): @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) @pytest.mark.parametrize('version, expected', [ pytest.param('xxx', "'xxx' does not match"), pytest.param('1.0.0', None), pytest.param('1.0.0_HF', None) ]) - def test_plugin_version_format(mock_import_plugin, mock_path_is_relative, - src_dir, plugin_config_file, version, - expected): + def test_plugin_version_format(mock_path_is_relative, src_dir, + plugin_config_file, version, expected): plugin_config_content = OrderedDict([ ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), ('version', version), ('language', 'PYTHON27'), @@ -129,17 +120,13 @@ def test_plugin_version_format(mock_import_plugin, mock_path_is_relative, validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - mock_import_plugin.assert_called() + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) @pytest.mark.parametrize('entry_point, expected', [ pytest.param('staged_plugin', "'staged_plugin' does not match"), pytest.param(':staged_plugin', "':staged_plugin' does not match"), @@ -150,9 +137,8 @@ def test_plugin_version_format(mock_import_plugin, mock_path_is_relative, "':staged_plugin:staged:' does not match"), pytest.param('staged_plugin:staged', None) ]) - def test_plugin_entry_point(mock_import_plugin, mock_relative_path, - src_dir, plugin_config_file, entry_point, - expected): + def test_plugin_entry_point(mock_relative_path, src_dir, + plugin_config_file, entry_point, expected): plugin_config_content = OrderedDict([ ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), ('version', '1.0.0'), ('language', 'PYTHON27'), @@ -166,8 +152,7 @@ def test_plugin_entry_point(mock_import_plugin, mock_relative_path, validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - mock_import_plugin.assert_called() + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @@ -214,43 +199,8 @@ def test_multiple_validation_errors(plugin_config_file): assert "'srcDir' is a required property" in message assert "'xxx' is not one of ['UNIX', 'WINDOWS']" in message - @staticmethod - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_staged_plugin(mock_file_util, fake_staged_plugin_config): - src_dir = os.path.dirname(fake_staged_plugin_config) - mock_file_util.return_value = os.path.join(src_dir, 'src/') - - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(fake_staged_plugin_config, - const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - - message = err_info.value.message - assert 'Named argument mismatch in method' in message - assert 'Number of arguments do not match' in message - assert 'Implementation missing for required method' in message - - @staticmethod - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_direct_plugin(mock_file_util, fake_direct_plugin_config): - src_dir = os.path.dirname(fake_direct_plugin_config) - mock_file_util.return_value = os.path.join(src_dir, 'src/') - - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(fake_direct_plugin_config, - const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - - message = err_info.value.message - assert 'Named argument mismatch in method' in message - assert 'Number of arguments do not match' in message - assert 'Implementation missing for required method' in message - @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) @pytest.mark.parametrize('plugin_id , expected', [ pytest.param('Staged_plugin', "'Staged_plugin' does not match"), pytest.param('staged_Plugin', "'staged_Plugin' does not match"), @@ -258,8 +208,8 @@ def test_direct_plugin(mock_file_util, fake_direct_plugin_config): pytest.param('E3b69c61-4c30-44f7-92c0-504c8388b91e', None), pytest.param('e3b69c61-4c30-44f7-92c0-504c8388b91e', None) ]) - def test_plugin_id(mock_import_plugin, mock_relative_path, src_dir, - plugin_config_file, plugin_id, expected): + def test_plugin_id(mock_relative_path, src_dir, plugin_config_file, + plugin_id, expected): plugin_config_content = OrderedDict([ ('id', plugin_id.encode('utf-8')), ('name', 'python_vfiles'), ('version', '1.0.0'), ('language', 'PYTHON27'), @@ -273,27 +223,7 @@ def test_plugin_id(mock_import_plugin, mock_relative_path, src_dir, validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - mock_import_plugin.assert_called() + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message - - @staticmethod - def test_plugin_info_warn_mode(plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - err_info = None - try: - plugin_util.get_plugin_manifest(plugin_config_file, - plugin_config_content, False) - except Exception as e: - err_info = e - - assert err_info is None From e912fbdeb91347c87a477ae36eba80b4198ade42 Mon Sep 17 00:00:00 2001 From: Filip Drozdowski Date: Tue, 21 Jan 2020 11:25:43 -0800 Subject: [PATCH 11/25] PYT-1058 Update the Virtualization SDK docs on Gitlab to reflect blackbox changes Reviewed at: http://reviews.delphix.com/r/55287/ --- README-dev.md | 52 ++++++------------- build.gradle | 2 +- libs/Pipfile.lock | 22 ++++---- libs/lock.dev-requirements.txt | 6 +-- libs/lock.requirements.txt | 4 +- platform/Pipfile.lock | 20 +++---- platform/lock.dev-requirements.txt | 4 +- platform/lock.requirements.txt | 4 +- tools/Pipfile.lock | 26 +++++----- tools/README-dev.md | 2 +- tools/lock.dev-requirements.txt | 6 +-- tools/lock.requirements.txt | 4 +- .../virtualization/_internal/settings.cfg | 2 +- .../_internal/test_package_util.py | 2 +- 14 files changed, 67 insertions(+), 89 deletions(-) diff --git a/README-dev.md b/README-dev.md index 549c40ba..ad2b44c5 100644 --- a/README-dev.md +++ b/README-dev.md @@ -13,14 +13,11 @@ This repository is going through a lot of changes. It is being migrated to GitHu At a very high level, our development process usually looks like this: 1. Make changes to SDK and appgate code. Test these changes manually. Iterate on this until you have everything working. -2. Publish a development build of the SDK to artifactory. -3. Update the version of the SDK specified in the app gate. -4. Publish a review for SDK code, and also publish a "provisional" review of appgate code. Address any feedback. -5. Push the SDK code and publish new SDK builds to our internal servers. -6. Finalize your appgate review. -7. Push the appgate changes - -Not every type of change requires every step. +2. Update the version of the SDK. +3. Create a remote branch in the virtualization-sdk Gitlab repo (e.g. projects/my-test). +4. Push your commit to that branch. +5. Publish a review for SDK code. Address any feedback. Run unit and blackbox tests. +6. Push the SDK code. These steps are described in more detail below. @@ -86,30 +83,23 @@ Running `./gradlew test` from the top level of the repository will run all SDK u #### Testing sdk-gate changes with app-gate code -At the moment blackbox refers to a property file in the app-gate to determine the version of the SDK to install for tests so this property always needs to updated for automated testing. - -NOTE: The app-gate does not pull in the wrappers or CLI from this repository. - -The easiest way to do both of these is: - -1. Update the version of the SDK to something unique and clearly a development build. The standard is `x.y.z-internal-abc-`. For example, `1.1.0-internal-001-grant`. -2. Run `./gradlew publishDebug` from the root of this repository. -3. In `appliance/gradle.properties` in the app-gate update `virtualizationSdkVer` to match the SDK version. - -Run an appliance-update for manual testing and/or kick off automated blackbox tests by running `git blackbox -s appdata_python_samples` from your app-gate development branch. - - -## SDK Review and Provisional app-gate review +Blackbox expects you to push your SDK changes to a branch on Gitlab. -Once you're finished with local development and testing, you can publish your final SDK review to reviewboard. +1. Push your SDK changes to a remote branch on Gitlab, let's call it `projects/my-test`. +2. Navigate to the app-gate directory and run `git blackbox -s appdata_python_samples --extra-params="-p virt-sdk-branch=projects/my-test"`. +If you also want to specify the repository (the Virtualization SDK Gitlab repo is the default), you can do that via `virt-sdk-repo` parameter: +`git blackbox -s appdata_python_samples --extra-params="-p virt-sdk-repo=https://gitlab.delphix.com/virtualization-platform/virtualization-sdk.git -p virt-sdk-branch=projects/my-test"`. +If for some reason you want to build all Python distributions and upload them to artifactory, you can still do that using `sdk-version` parameter: +`git blackbox -s appdata_python_samples --extra-params="-p sdk-version=1.1.0-internal-007-upgrade"`. -In addition, it's customary to publish a "provisional" appgate review, so that people can get insight into how the out-for-review SDK changes will actually be used by the appgate. Of course, this review will contain all your temporary local-build changes mentioned above. So, in your review, you'll want to mention that these temporary changes will be reverted before the review is finalized. +For manual testing, you can install the SDK locally, build a plugin using the SDK, and upload it to your Delphix engine. There are no changes required to the app-gate code. ## Pushing and Deploying SDK Code - ### Publishing +Since Blackbox can build SDK from source, there's no need to publish the SDK Python distributions to the artifactory. However, if for some reason you need to do that, the process is described below. + There are two Gradle tasks that do publishing: `publishDebug` and `publishProd`. They differ in two ways: 1. They publish the Python distributions to separate repositories on Artifactory. `publishDebug` uploads to `dvp-local-pypi`. This is a special repository that has been setup to test the SDK. It falls back our our production PyPI repository, but artifacts uploaded to `dvp-local-pypi` do not impact production artifacts. This should be used for testing. `publishProd` does upload the Python distributions to our production Artifactory PyPI repository, `delphix-local`. @@ -128,15 +118,3 @@ NOTE: The external release to `pypi.org` is done outside of the build system. 2. `twine` needs to be installed. This is a Python package that is used to upload Python distributions. If it's not installed, install it by running `pip install twine`. -#### Final Publishing - -Once you are absolutely certain all changes have been made run `./gradlew publishProd`. This will run checks, create the Python distributions, and upload all of them to Artifactory with the Python distributions going to `delphix-local`. - -## Using Newly-Deployed SDK Build - -Now, we have to go back to our `appgate` code and make it point to the newly-deployed build on artifactory, instead of the local build we used to test. To achieve that, -modify `appliance/gradle.properties` and change `virtualizationSdkVer` to refer to your new version number. - -## Finalizing Appgate Review - -Once you've got the above changes completed, tested, and checked into git, you can update your appgate review. Now, your review will be ready for final ship-its. diff --git a/build.gradle b/build.gradle index 0854ac65..9e534596 100644 --- a/build.gradle +++ b/build.gradle @@ -12,7 +12,7 @@ subprojects { * dvpApiVersion is the version of the Virtualization API that we want this version of the SDK to be built against. */ project.ext.dvpApiVersion = "1.1.0-master-003" - version = "1.1.0-internal-007" + version = "1.1.0-internal-008" } def binDir = "${rootProject.projectDir}/bin" diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index 43ac8270..d7d970d9 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "5b8cc310478557abd479e28e9702a44e0c3eddf8ab89b1ebe4cc8b781a13fb03" + "sha256": "1b3a265683ddb416a20ef34792fb84d9a9a9d291a97ee83464537d5a8e8987be" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz", - "version": "== 1.1.0-internal-007" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-008.tar.gz", + "version": "== 1.1.0-internal-008" }, "protobuf": { "hashes": [ @@ -49,10 +49,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" } }, "develop": { @@ -91,7 +91,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "importlib-metadata": { @@ -131,7 +131,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { @@ -182,10 +182,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" }, "wcwidth": { "hashes": [ diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 2c26a313..201df839 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -3,17 +3,17 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' +funcsigs==1.0.2 ; python_version < '3.3' importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==20.0 -pathlib2==2.3.5 ; python_version < '3.6' +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.1 pyparsing==2.4.6 pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' -six==1.13.0 +six==1.14.0 wcwidth==0.1.8 zipp==1.0.0 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 83eed067..117959fc 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-008.tar.gz dvp-api==1.1.0-master-003 protobuf==3.6.1 -six==1.13.0 +six==1.14.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 1bf66e19..1efdf94e 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9a160aba7064f75aa9de830acc856c9bce452ba43e4d0b21f6b21dcea5f8abf1" + "sha256": "5cab10b6c18da4084a794f7f780d293ab787e50f790c31f3d9268a926c914069" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz", - "version": "== 1.1.0-internal-007" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-008.tar.gz", + "version": "== 1.1.0-internal-008" }, "enum34": { "hashes": [ @@ -60,10 +60,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" } }, "develop": { @@ -102,7 +102,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "importlib-metadata": { @@ -193,10 +193,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" }, "wcwidth": { "hashes": [ diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index 2c26a313..0c178c98 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -3,7 +3,7 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' +funcsigs==1.0.2 ; python_version < '3.3' importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' @@ -14,6 +14,6 @@ py==1.8.1 pyparsing==2.4.6 pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' -six==1.13.0 +six==1.14.0 wcwidth==0.1.8 zipp==1.0.0 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index a8fb7978..bc4d2858 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,6 +1,6 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-008.tar.gz dvp-api==1.1.0-master-003 enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 -six==1.13.0 +six==1.14.0 diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index 18a6fa96..e03221fd 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a57907c06ad1cf10cb03ff7ea5d19bb160291a181fcc08137d75cfccb71d2751" + "sha256": "4e9e4b0382c9867e2752e06ab8903663d819ecacdc50ff7b5caec2241308d0c4" }, "pipfile-spec": 6, "requires": {}, @@ -68,8 +68,8 @@ "version": "==0.6.0.post1" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-007.tar.gz", - "version": "== 1.1.0-internal-007" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-008.tar.gz", + "version": "== 1.1.0-internal-008" }, "entrypoints": { "hashes": [ @@ -256,10 +256,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" }, "typing": { "hashes": [ @@ -362,12 +362,12 @@ "version": "==5.0.3" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz", - "version": "== 1.1.0-internal-007" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-008.tar.gz", + "version": "== 1.1.0-internal-008" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-007.tar.gz", - "version": "== 1.1.0-internal-007" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-008.tar.gz", + "version": "== 1.1.0-internal-008" }, "entrypoints": { "hashes": [ @@ -550,10 +550,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" }, "typing": { "hashes": [ diff --git a/tools/README-dev.md b/tools/README-dev.md index 0b248133..e689b0f0 100644 --- a/tools/README-dev.md +++ b/tools/README-dev.md @@ -17,7 +17,7 @@ Development should be done in a personal virtualenv. To setup the virtual enviro 1. `virtualenv /path/to/env/root`. This should be a Python 2.7 virtualenv. 2. `source ~/path/ot/env/root/bin/activate` -3. `pip install -r lock.dev-requirements.txt`. This installs the required devlopment packages. +3. `pip install -r lock.dev-requirements.txt`. This installs the required development packages. 4. `../gradlew makeSetupPy` (this command will generate the setup.py file) 5. `pip install -e .` diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index 016c70fb..e74b7c96 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,6 +1,6 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-007.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-007.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-008.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-008.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' @@ -29,7 +29,7 @@ pyparsing==2.4.6 pytest-cov==2.8.1 pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' -six==1.13.0 +six==1.14.0 typing==3.7.4.1 ; python_version < '3.5' wcwidth==0.1.8 yapf==0.28 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index e262d83f..b6663f24 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../platform/build/python-dist/dvp-platform-1.1.0-internal-007.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-008.tar.gz attrs==19.3.0 certifi==2019.11.28 chardet==3.0.4 @@ -25,7 +25,7 @@ pyrsistent==0.15.7 pyyaml==5.3 requests==2.22.0 scandir==1.10.0 ; python_version < '3.5' -six==1.13.0 +six==1.14.0 typing==3.7.4.1 ; python_version < '3.5' urllib3==1.25.7 zipp==1.0.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 6ab550f8..6cb29273 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -21,7 +21,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-007 +package_version = 1.1.0-internal-008 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index d44a01ef..d421b1fb 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -9,7 +9,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-007' + assert package_util.get_version() == '1.1.0-internal-008' @staticmethod def test_get_virtualization_api_version(): From 351e641a0a325f538f04c07b0f2f00dbef62fdde Mon Sep 17 00:00:00 2001 From: jeff ngo Date: Tue, 21 Jan 2020 12:49:18 -0800 Subject: [PATCH 12/25] PYT-1055 Fix test_libs unit tests to accept multiple orderings from unordered dictionary Reviewed at: http://reviews.delphix.com/r/55262/ --- .../python/dlpx/virtualization/test_libs.py | 46 +++++++++---------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/libs/src/test/python/dlpx/virtualization/test_libs.py b/libs/src/test/python/dlpx/virtualization/test_libs.py index 4e5ce096..10dd8dd6 100644 --- a/libs/src/test/python/dlpx/virtualization/test_libs.py +++ b/libs/src/test/python/dlpx/virtualization/test_libs.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import mock @@ -189,11 +189,12 @@ def test_run_bash_bad_variables(remote_connection): with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_bash(remote_connection, command, variables, use_login_shell) - assert err_info.value.message == ( - "The function run_bash's argument 'variables' was" - " a dict of {type 'str':type 'int', type 'str':type 'str'}" - " but should be of" - " type 'dict of basestring:basestring' if defined.") + message = ("The function run_bash's argument 'variables' was" + " a dict of {{type 'str':type '{}', type 'str':type '{}'}}" + " but should be of" + " type 'dict of basestring:basestring' if defined.") + assert (err_info.value.message == message.format('int', 'str') or + err_info.value.message == message.format('str', 'int')) @staticmethod def test_run_bash_bad_use_login_shell(remote_connection): @@ -213,7 +214,6 @@ def test_run_bash_bad_use_login_shell(remote_connection): class TestLibsRunSync: @staticmethod def test_run_sync(remote_connection): - expected_run_sync_response = libs_pb2.RunSyncResponse() expected_source_directory = 'sourceDirectory' @@ -269,7 +269,6 @@ def test_run_sync_with_actionable_error(remote_connection): @staticmethod def test_run_sync_with_nonactionable_error(remote_connection): - response = libs_pb2.RunSyncResponse() na_error = libs_pb2.NonActionableLibraryError() response.error.non_actionable_error.CopyFrom(na_error) @@ -593,11 +592,12 @@ def test_run_powershell_bad_variables(remote_connection): with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_powershell(remote_connection, command, variables) - assert err_info.value.message == ( - "The function run_powershell's argument 'variables' was" - " a dict of {type 'str':type 'int', type 'str':type 'str'}" - " but should be of" - " type 'dict of basestring:basestring' if defined.") + message = ("The function run_powershell's argument 'variables' was" + " a dict of {{type 'str':type '{}', type 'str':type '{}'}}" + " but should be of" + " type 'dict of basestring:basestring' if defined.") + assert (err_info.value.message == message.format('int', 'str') or + err_info.value.message == message.format('str', 'int')) class TestLibsRunExpect: @@ -647,12 +647,12 @@ def test_run_expect_check_true_exitcode_success(remote_connection): def mock_run_expect(actual_run_expect_request): assert actual_run_expect_request.command == expected_command assert ( - actual_run_expect_request.remote_connection.environment.name - == remote_connection.environment.name + actual_run_expect_request.remote_connection.environment.name + == remote_connection.environment.name ) assert ( - actual_run_expect_request.remote_connection.environment.reference - == remote_connection.environment.reference + actual_run_expect_request.remote_connection.environment.reference + == remote_connection.environment.reference ) return expected_run_expect_response @@ -704,7 +704,6 @@ def test_run_expect_with_actionable_error(remote_connection): @staticmethod def test_run_expect_with_nonactionable_error(remote_connection): - response = libs_pb2.RunExpectResponse() na_error = libs_pb2.NonActionableLibraryError() response.error.non_actionable_error.CopyFrom(na_error) @@ -768,8 +767,9 @@ def test_run_expect_bad_variables(remote_connection): with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_expect(remote_connection, command, variables) - assert err_info.value.message == ( - "The function run_expect's argument 'variables' was" - " a dict of {type 'str':type 'int', type 'str':type 'str'}" - " but should be of" - " type 'dict of basestring:basestring' if defined.") + message = ("The function run_expect's argument 'variables' was" + " a dict of {{type 'str':type '{}', type 'str':type '{}'}}" + " but should be of" + " type 'dict of basestring:basestring' if defined.") + assert (err_info.value.message == message.format('int', 'str') or + err_info.value.message == message.format('str', 'int')) From 2c15bf4439739817f7e50f2b431ae22037706901 Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Wed, 22 Jan 2020 11:10:56 -0800 Subject: [PATCH 13/25] PYT-1060 Cleanup plugin_importer module - merge from master to upgrade branch Reviewed at: http://reviews.delphix.com/r/55352/ --- build.gradle | 2 +- libs/Pipfile.lock | 28 +- libs/lock.dev-requirements.txt | 8 +- libs/lock.requirements.txt | 4 +- platform/Pipfile.lock | 28 +- platform/lock.dev-requirements.txt | 8 +- platform/lock.requirements.txt | 4 +- .../dlpx/virtualization/platform/__init__.py | 2 + .../virtualization/platform/exceptions.py | 17 + .../virtualization/platform/import_util.py | 138 +++++++ .../platform/import_validations.py | 191 +++++++++ tools/Pipfile.lock | 42 +- tools/lock.dev-requirements.txt | 14 +- tools/lock.requirements.txt | 8 +- .../virtualization/_internal/exceptions.py | 50 +++ .../_internal/plugin_importer.py | 365 +++++------------- .../virtualization/_internal/plugin_util.py | 21 +- .../_internal/plugin_validator.py | 124 +++--- .../virtualization/_internal/settings.cfg | 2 +- .../_internal/commands/test_build.py | 16 +- .../_internal/commands/test_initialize.py | 15 +- .../_internal/test_package_util.py | 2 +- .../_internal/test_plugin_importer.py | 187 ++++++++- .../_internal/test_plugin_validator.py | 182 +-------- 24 files changed, 835 insertions(+), 623 deletions(-) create mode 100644 platform/src/main/python/dlpx/virtualization/platform/import_util.py create mode 100644 platform/src/main/python/dlpx/virtualization/platform/import_validations.py diff --git a/build.gradle b/build.gradle index 8b586807..a8988cc3 100644 --- a/build.gradle +++ b/build.gradle @@ -8,7 +8,7 @@ plugins { } subprojects { - version = "1.1.0-internal-upgrade-007" + version = "1.1.0-internal-upgrade-008" } def binDir = "${rootProject.projectDir}/bin" diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index 764d494a..f0b768a3 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "34adfd1e70f92441f7d38a1dda2721eccceaea68cd5a93253880ba113c5fa659" + "sha256": "36907c9dc35ef41d53048ab38a5a5c05e03f64caa33795342572cc04eaf4eb28" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz", - "version": "== 1.1.0-internal-upgrade-007" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz", + "version": "== 1.1.0-internal-upgrade-008" }, "protobuf": { "hashes": [ @@ -42,10 +42,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" } }, "develop": { @@ -84,7 +84,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "importlib-metadata": { @@ -124,7 +124,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.6'", "version": "==2.3.5" }, "pluggy": { @@ -175,10 +175,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" }, "wcwidth": { "hashes": [ @@ -189,10 +189,10 @@ }, "zipp": { "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", + "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" ], - "version": "==0.6.0" + "version": "==1.0.0" } } } diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 3ae6dd54..23715d75 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -3,17 +3,17 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' +funcsigs==1.0.2 ; python_version < '3.0' importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==20.0 -pathlib2==2.3.5 ; python_version < '3' +pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.1 py==1.8.1 pyparsing==2.4.6 pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' -six==1.13.0 +six==1.14.0 wcwidth==0.1.8 -zipp==0.6.0 +zipp==1.0.0 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 46376d01..0aa55070 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,4 +1,4 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz protobuf==3.6.1 -six==1.13.0 +six==1.14.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index c59b09d2..581b13f9 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "0f136bc5b2db4e615922803c9c64db51d15393994f702cb8ae85ce002b759f6e" + "sha256": "9f3db23b7533c52560570d11d7ad0c200856a00c14d6a968233ed6d4238269f8" }, "pipfile-spec": 6, "requires": {}, @@ -15,8 +15,8 @@ }, "default": { "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz", - "version": "== 1.1.0-internal-upgrade-007" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz", + "version": "== 1.1.0-internal-upgrade-008" }, "enum34": { "hashes": [ @@ -53,10 +53,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" } }, "develop": { @@ -95,7 +95,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "importlib-metadata": { @@ -135,7 +135,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.6'", "version": "==2.3.5" }, "pluggy": { @@ -186,10 +186,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" }, "wcwidth": { "hashes": [ @@ -200,10 +200,10 @@ }, "zipp": { "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" + "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", + "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" ], - "version": "==0.6.0" + "version": "==1.0.0" } } } diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index 3ae6dd54..23715d75 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -3,17 +3,17 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.3' +funcsigs==1.0.2 ; python_version < '3.0' importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==20.0 -pathlib2==2.3.5 ; python_version < '3' +pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.1 py==1.8.1 pyparsing==2.4.6 pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' -six==1.13.0 +six==1.14.0 wcwidth==0.1.8 -zipp==0.6.0 +zipp==1.0.0 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 7483e8f5..33cf4c0a 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 -six==1.13.0 +six==1.14.0 diff --git a/platform/src/main/python/dlpx/virtualization/platform/__init__.py b/platform/src/main/python/dlpx/virtualization/platform/__init__.py index 412bcb38..866d0169 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/__init__.py +++ b/platform/src/main/python/dlpx/virtualization/platform/__init__.py @@ -13,3 +13,5 @@ from dlpx.virtualization.platform._upgrade import * from dlpx.virtualization.platform._virtual import * from dlpx.virtualization.platform._plugin import * +from dlpx.virtualization.platform.import_util import * +from dlpx.virtualization.platform.import_validations import * diff --git a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py index 4f508ea6..d800120f 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py +++ b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py @@ -214,3 +214,20 @@ def __init__(self, reference): " environment reference.".format(reference)) super(IncorrectReferenceFormatError, self).__init__(message) +class IncorrectPluginCodeError(PluginRuntimeError): + """ + This gets thrown if the import validations come across invalid plugin + code that causes import to fail, or if the expected plugin entry point is + not found in the plugin code. + Args: + message (str): A user-readable message describing the exception. + + Attributes: + message (str): A user-readable message describing the exception. + """ + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(IncorrectPluginCodeError, self).__init__(message) \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_util.py b/platform/src/main/python/dlpx/virtualization/platform/import_util.py new file mode 100644 index 00000000..a8e8f807 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/import_util.py @@ -0,0 +1,138 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# +import inspect + +from dlpx.virtualization.platform import exceptions + + +_IMPORT_CHECKS = {} +_POST_IMPORT_CHECKS = {} + + +class PluginModule: + """ + Import helper class for the plugin. An instance of this class helps to pass + state of imported module and relevant info to all the validation methods. + """ + def __init__(self, + src_dir, + module, + entry_point, + plugin_type, + module_content, + v_maps, + validate_args=False): + self.__src_dir = src_dir + self.__module = module + self.__entry_point = entry_point + self.__type = plugin_type + self.__module_content = module_content + self.__expected_direct_args_by_op =\ + v_maps['EXPECTED_DIRECT_ARGS_BY_OP'] + self.__expected_staged_args_by_op =\ + v_maps['EXPECTED_STAGED_ARGS_BY_OP'] + self.__expected_upgrade_args = v_maps['EXPECTED_UPGRADE_ARGS'] + self.__validate_args = validate_args + + @property + def src_dir(self): + return self.__src_dir + + @property + def module(self): + return self.__module + + @property + def entry_point(self): + return self.__entry_point + + @property + def plugin_type(self): + return self.__type + + @property + def module_content(self): + return self.__module_content + + @property + def expected_direct_args_by_op(self): + return self.__expected_direct_args_by_op + + @property + def expected_staged_args_by_op(self): + return self.__expected_staged_args_by_op + + @property + def expected_upgrade_args(self): + return self.__expected_upgrade_args + + @property + def validate_args(self): + return self.__validate_args + + +def import_check(ordinal): + """ + This is the import check decorator. Ordinal here signifies the order in + which the checks are executed. + """ + def import_check_decorator(f): + assert inspect.isfunction(f) + assert ordinal not in _IMPORT_CHECKS + + _IMPORT_CHECKS[ordinal] = f + + return f + + return import_check_decorator + + +def post_import_check(ordinal): + """ + This is the post import check decorator. Ordinal here signifies the order + in which the checks are executed. + """ + def post_import_check_decorator(f): + assert inspect.isfunction(f) + assert ordinal not in _POST_IMPORT_CHECKS + + _POST_IMPORT_CHECKS[ordinal] = f + + return f + + return post_import_check_decorator + + +def validate_import(plugin_module): + """ + Runs validations on the module imported and checks if import was fine + and imported content is valid or not. + NOTE: Dependency checks are not handled well. A failure in one validation + should not impact the next one if each validation defines its dependencies + well. For now, any exception from one is considered failure of all + validations. This can be enhanced to define dependencies well. + """ + for key in sorted(_IMPORT_CHECKS.keys()): + try: + _IMPORT_CHECKS[key](plugin_module) + except exceptions.IncorrectPluginCodeError as plugin_err: + return [plugin_err.message] + except exceptions.UserError as user_err: + return [user_err.message] + return [] + + +def validate_post_import(plugin_module): + """ + Runs post import validations on the module content. + """ + warnings = [] + + # + # warnings.extend is used below since each import check returns a list of + # warnings. + # + for key in sorted(_POST_IMPORT_CHECKS.keys()): + warnings.extend(_POST_IMPORT_CHECKS[key](plugin_module)) + return warnings \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_validations.py b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py new file mode 100644 index 00000000..1a7ef3da --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py @@ -0,0 +1,191 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# +import inspect + +from dlpx.virtualization.platform.import_util import (import_check, + post_import_check, + PluginModule) +from dlpx.virtualization.platform import exceptions + + +@import_check(ordinal=1) +def validate_module_content(plugin_module): + # This should never happen and if it does, flag an error. + if plugin_module.module_content is None: + raise exceptions.IncorrectPluginCodeError( + 'Plugin module content is None.') + + +@import_check(ordinal=2) +def validate_entry_point(plugin_module): + # + # Schema validation on plugin config file would have ensured entry is a + # string and should never be none - so raise an error if it does. + # + if plugin_module.entry_point is None: + raise exceptions.IncorrectPluginCodeError( + 'Plugin entry point object is None.') + + if not hasattr(plugin_module.module_content, plugin_module.entry_point): + raise exceptions.UserError( + 'Entry point \'{}:{}\' does not exist. \'{}\' is not a symbol' + ' in module \'{}\'.'.format(plugin_module.module, + plugin_module.entry_point, + plugin_module.entry_point, + plugin_module.module)) + + +@import_check(ordinal=3) +def validate_plugin_object(plugin_module): + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point, + None) + + if plugin_object is None: + raise exceptions.UserError('Plugin object retrieved from the entry' + ' point {} is None'.format + (plugin_module.entry_point)) + + +@post_import_check(ordinal=1) +def validate_named_args(plugin_module): + """ + Does named argument validation based on the plugin type. + """ + warnings = [] + + if plugin_module.validate_args: + + # + # Validated methods args against expected args and return any + # resulting warnings to the caller to process. + # These warnings should be treated as an exception to make + # sure build fails. + # + + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point) + + # Iterate over attributes objects of the Plugin object + for plugin_attrib in plugin_object.__dict__.values(): + # + # For each plugin attribute object, its __dict__.keys will give + # us the name of the plugin implemntation method name. That name + # is useful in looking up named arguments expected and what is + # actually in the plugin code. And plugin_op_type can be, for e.g. + # LinkedOperations, DiscoveryOperations, VirtualOperations + # + plugin_op_type = plugin_attrib.__class__.__name__ + + # UpgradeOperations are validated differently, so ignore. + if plugin_op_type == 'UpgradeOperations': + continue + + for op_name_key, op_name in plugin_attrib.__dict__.items(): + if op_name is None: + continue + actual_args = inspect.getargspec(op_name) + warnings.extend( + _check_args(method_name=op_name.__name__, + expected_args=_lookup_expected_args( + plugin_module, plugin_op_type, + op_name_key), + actual_args=actual_args.args)) + + return warnings + + +@post_import_check(ordinal=2) +def check_upgrade_operations(plugin_module): + """ + Does named argument validation on UpgradeOperations. + """ + warnings = [] + + if plugin_module.validate_args: + + # + # Validated methods args against expected args and return any + # resulting warnings to the caller to process. + # These warnings should be treated as an exception to make + # sure build fails. + # + + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point) + + # Iterate over attributes objects of the Plugin object + for plugin_attrib in plugin_object.__dict__.values(): + # + # For each plugin attribute object, its __dict__.keys will give + # us the name of the plugin implemntation method name. That name + # is useful in looking up named arguments expected and what is + # actually in the plugin code. And plugin_op_type can be, for e.g. + # LinkedOperations, DiscoveryOperations, VirtualOperations + # + plugin_op_type = plugin_attrib.__class__.__name__ + + if plugin_op_type != 'UpgradeOperations': + continue + + warnings.extend(_check_upgrade_args( + plugin_attrib, plugin_module.expected_upgrade_args)) + + return warnings + + +def _check_upgrade_args(upgrade_operations, expected_upgrade_args): + """ + Does named argument validation of all functions in dictionaries by looping + first through all the attributes in the UpgradeOperations for this plugin. + Any attributes that are not dictionaries that map migration_id -> + upgrade_function are skipped. We then loop through every key/value pair + of each of the dictionaries and validate that the argument in the defined + function has the expected name. + """ + warnings = [] + + for attribute_name, attribute in vars(upgrade_operations).items(): + if attribute_name not in expected_upgrade_args.keys(): + # Skip if not in one of the operation dicts we store functions in. + continue + # + # If the attribute_name was in the expected upgrade dicts then we know + # it is a dict containing migration id -> upgrade function that we can + # iterate on. + # + for migration_id, migration_func in attribute.items(): + actual = inspect.getargspec(migration_func).args + expected = expected_upgrade_args[attribute_name] + warnings.extend( + _check_args(method_name=migration_func.__name__, + expected_args=expected, + actual_args=actual)) + + return warnings + + +def _check_args(method_name, expected_args, actual_args): + warnings = [] + + if len(expected_args) != len(actual_args): + warnings.append('Number of arguments do not match in method {}.' + ' Expected: {}, Found: {}.'.format( + method_name, list(expected_args), actual_args)) + + if not all(arg in expected_args for arg in actual_args): + warnings.append('Named argument mismatch in method {}.' + ' Expected: {}, Found: {}.'.format( + method_name, list(expected_args), actual_args)) + + return warnings + + +def _lookup_expected_args(plugin_module, plugin_op_type, plugin_op_name): + if plugin_module.plugin_type == 'DIRECT': + return plugin_module.expected_direct_args_by_op[plugin_op_type][ + plugin_op_name] + else: + return plugin_module.expected_staged_args_by_op[plugin_op_type][ + plugin_op_name] \ No newline at end of file diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index 20f5c530..fb65f349 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "5f2c350939cd9d95c60e40e233d28425a054d87b2c5116b5b51aab4d532b7e06" + "sha256": "afaf41071782a6af61cacb8ff605fba417d37cd93cc79d17726ecb0388430f99" }, "pipfile-spec": 6, "requires": {}, @@ -56,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version == '2.7'", + "markers": "python_version < '3.2'", "version": "==4.0.2" }, "contextlib2": { @@ -98,7 +98,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "idna": { @@ -274,10 +274,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" }, "typing": { "hashes": [ @@ -290,10 +290,10 @@ }, "urllib3": { "hashes": [ - "sha256:a8a318824cc77d1fd4b2bec2ded92646630d7fe8619497b142c84a9e6f5a7293", - "sha256:f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745" + "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc", + "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc" ], - "version": "==1.25.7" + "version": "==1.25.8" }, "zipp": { "hashes": [ @@ -331,7 +331,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version == '2.7'", + "markers": "python_version < '3.2'", "version": "==4.0.2" }, "contextlib2": { @@ -380,16 +380,16 @@ "version": "==5.0.3" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz", - "version": "== 1.1.0-internal-upgrade-007" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz", + "version": "== 1.1.0-internal-upgrade-008" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-007.tar.gz", - "version": "== 1.1.0-internal-upgrade-007" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-008.tar.gz", + "version": "== 1.1.0-internal-upgrade-008" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-007.tar.gz", - "version": "== 1.1.0-internal-upgrade-007" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-008.tar.gz", + "version": "== 1.1.0-internal-upgrade-008" }, "entrypoints": { "hashes": [ @@ -422,7 +422,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "functools32": { @@ -430,7 +430,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "futures": { @@ -572,10 +572,10 @@ }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", + "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" ], - "version": "==1.13.0" + "version": "==1.14.0" }, "typing": { "hashes": [ diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index 7d73c06f..6f607495 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,18 +1,18 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-007.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-007.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-007.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-008.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-008.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version == '2.7' +configparser==4.0.2 ; python_version < '3.2' contextlib2==0.6.0.post1 ; python_version < '3' coverage==5.0.3 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -funcsigs==1.0.2 ; python_version < '3.3' -functools32==3.2.3.post2 ; python_version < '3' +funcsigs==1.0.2 ; python_version < '3.0' +functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 importlib-metadata==1.4.0 ; python_version < '3.8' @@ -30,7 +30,7 @@ pyparsing==2.4.6 pytest-cov==2.8.1 pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' -six==1.13.0 +six==1.14.0 typing==3.7.4.1 ; python_version < '3.5' wcwidth==0.1.8 yapf==0.28 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 8ac140fe..17e5ab15 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -4,12 +4,12 @@ certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version == '2.7' +configparser==4.0.2 ; python_version < '3.2' contextlib2==0.6.0.post1 ; python_version < '3' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3' +functools32==3.2.3.post2 ; python_version < '3.2' idna==2.8 importlib-metadata==1.4.0 ; python_version < '3.8' jinja2==2.10.3 @@ -25,7 +25,7 @@ pyrsistent==0.15.7 pyyaml==5.3 requests==2.22.0 scandir==1.10.0 ; python_version < '3.5' -six==1.13.0 +six==1.14.0 typing==3.7.4.1 ; python_version < '3.5' -urllib3==1.25.7 +urllib3==1.25.8 zipp==1.0.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py index e3bc9bf0..bb1e5f47 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py @@ -269,3 +269,53 @@ def __init__(self, command, exit_code, output): "{} failed with exit code {}.").format( output, command, exit_code) super(SubprocessFailedError, self).__init__(message) + + +class ValidationFailedError(UserError): + """ + ValidationFailedError gets raised when validation fails on plugin config + and its contents. + Defines helpers methods to format warning and exception messages. + """ + def __init__(self, warnings): + message = self.__report_warnings_and_exceptions(warnings) + super(ValidationFailedError, self).__init__(message) + + @classmethod + def __report_warnings_and_exceptions(cls, warnings): + """ + Prints the warnings and errors that were found in the plugin code, if + the warnings dictionary contains the 'exception' key. + """ + exception_msg = cls.sdk_exception_msg(warnings) + exception_msg += cls.exception_msg(warnings) + exception_msg += '\n{}'.format(cls.warning_msg(warnings)) + return '{}\n{} Warning(s). {} Error(s).'.format( + exception_msg, len(warnings['warning']), + len(warnings['exception']) + len(warnings['sdk exception'])) + + @classmethod + def sdk_exception_msg(cls, warnings): + sdk_exception_msg = '\n'.join([ + cls.__format_msg('SDK Error', ex) + for ex in warnings['sdk exception'] + ]) + return sdk_exception_msg + + @classmethod + def exception_msg(cls, exceptions): + exception_msg = '\n'.join( + cls.__format_msg('Error', ex) for ex in exceptions['exception']) + return exception_msg + + @classmethod + def warning_msg(cls, warnings): + warning_msg = '\n'.join( + cls.__format_msg('Warning', warning) + for warning in warnings['warning']) + return warning_msg + + @staticmethod + def __format_msg(msg_type, msg): + msg_str = "{}: {}".format(msg_type, msg) + return msg_str diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 8924de04..b38ebfb3 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -2,53 +2,22 @@ # Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import importlib -import inspect import logging import os import sys -from collections import defaultdict +from collections import defaultdict, namedtuple from multiprocessing import Process, Queue import yaml from dlpx.virtualization._internal import const, exceptions -from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE -from flake8.api import legacy as flake8 +from dlpx.virtualization.platform import import_util logger = logging.getLogger(__name__) PLUGIN_IMPORTER_YAML = os.path.join(const.PLUGIN_SCHEMAS_DIR, 'plugin_importer.yaml') - -class MessageUtils: - """ - Defines helpers methods to format warning and exception messages. - """ - @classmethod - def sdk_exception_msg(cls, warnings): - sdk_exception_msg = '\n'.join([ - cls.__format_msg('SDK Error', ex) - for ex in warnings['sdk exception'] - ]) - return sdk_exception_msg - - @classmethod - def exception_msg(cls, exceptions): - exception_msg = '\n'.join( - cls.__format_msg('Error', ex) for ex in exceptions['exception']) - return exception_msg - - @classmethod - def warning_msg(cls, warnings): - warning_msg = '\n'.join( - cls.__format_msg('Warning', warning) - for warning in warnings['warning']) - return warning_msg - - @staticmethod - def __format_msg(msg_type, msg): - msg_str = "{}: {}".format(msg_type, msg) - return msg_str +validation_result = namedtuple('validation_result', ['plugin_manifest']) def load_validation_maps(): @@ -63,16 +32,13 @@ def load_validation_maps(): class PluginImporter: """ Import helper class for the plugin. Imports the plugin module in a sub - process to ensure its isolated and does not pollute caller's runtime. + process to ensure it's isolated and does not pollute caller's runtime. On successful import, callers can get the manifest describing what methods are implemented in the plugin code. If import fails or has issues with validation of module content and entry points- will save errors/warnings in a dict that callers can access. """ v_maps = load_validation_maps() - expected_staged_args_by_op = v_maps['EXPECTED_STAGED_ARGS_BY_OP'] - expected_direct_args_by_op = v_maps['EXPECTED_DIRECT_ARGS_BY_OP'] - expected_upgrade_args = v_maps['EXPECTED_UPGRADE_ARGS'] required_methods_by_plugin_type = v_maps['REQUIRED_METHODS_BY_PLUGIN_TYPE'] required_methods_description = v_maps['REQUIRED_METHODS_DESCRIPTION'] @@ -87,12 +53,18 @@ def __init__(self, self.__plugin_entry_point = entry_point self.__plugin_type = plugin_type self.__validate = validate + self.__post_import_checks = [self.__check_for_required_methods] + + @property + def result(self): + return validation_result(plugin_manifest=self.__plugin_manifest) - def import_plugin(self): + def validate_plugin_module(self): """ - Imports the plugin module, does basic validation. + Imports the plugin module, does post import validation. Returns: plugin manifest - dict describing methods implemented in the plugin + is available to callers via the result property. NOTE: Importing module in the current context pollutes the runtime of the caller, in this case dvp. If the module being imported, for @@ -102,22 +74,10 @@ def import_plugin(self): in a sub-process and on completion return the output. """ logger.debug('Importing plugin module : %s', self.__plugin_module) + self.__plugin_manifest, warnings = self.__internal_import() + self.__run_checks(warnings) - self.__pre_import_checks() - plugin_manifest, warnings = self.__import_plugin() - self.__post_import_checks(plugin_manifest, warnings) - - return plugin_manifest - - def __pre_import_checks(self): - """ - Performs checks of the plugin code that should take place prior to - importing. - """ - warnings = self.__check_for_undefined_names(self.__src_dir) - self.__report_warnings_and_exceptions(warnings) - - def __import_plugin(self): + def __internal_import(self): """ Imports the module in a sub-process to check for errors or issues. Also does an eval on the entry point. @@ -140,40 +100,6 @@ def __import_plugin(self): return plugin_manifest, warnings - def __post_import_checks(self, plugin_manifest, warnings): - """ - Performs checks of the plugin code that should take place after - importing. - """ - check_warnings = self.__check_for_required_methods( - plugin_manifest, self.__plugin_type) - - if check_warnings and 'warning' in check_warnings: - warnings['warning'].extend(check_warnings['warning']) - - self.__report_warnings_and_exceptions(warnings) - - @staticmethod - def __check_for_required_methods(plugin_manifest, plugin_type): - """ - Checks for required methods in the manifest and adds warnings for any - missing methods. - """ - warnings = defaultdict(list) - if not plugin_manifest: - return warnings - for method_key, method_name in \ - PluginImporter.required_methods_by_plugin_type[ - plugin_type].items(): - if plugin_manifest[method_key] is False: - warnings['warning'].append( - 'Implementation missing ' - 'for required method {}. The Plugin Operation \'{}\' ' - 'will fail when executed.'.format( - method_name, PluginImporter. - required_methods_description[method_key])) - return warnings - @staticmethod def __import_in_subprocess(src_dir, module, entry_point, plugin_type, validate): @@ -210,59 +136,52 @@ def __parse_queue(queue): return manifest, warnings - @staticmethod - def __check_for_undefined_names(src_dir): + def __run_checks(self, warnings): """ - Checks the plugin module for undefined names. This catches - missing imports, references to nonexistent variables, etc. - - ..note:: - We are using the legacy flake8 api, because there is currently - no public, stable api for flake8 >= 3.0.0 - - For more info, see - https://flake8.pycqa.org/en/latest/user/python-api.html + Performs checks of the plugin code that should take place after + importing. """ - warnings = defaultdict(list) - exclude_dir = os.path.sep.join([src_dir, CODEGEN_PACKAGE]) - style_guide = flake8.get_style_guide(select=["F821"], - exclude=[exclude_dir], - quiet=1) - style_guide.check_files(paths=[src_dir]) - file_checkers = style_guide._application.file_checker_manager.checkers - for checker in file_checkers: - for result in checker.results: - # From the api code, result is a tuple defined as: error = - # (error_code, line_number, column, text, physical_line) - if result[0] == 'F821': - msg = "{} on line {} in {}".format(result[3], result[1], - checker.filename) - warnings['exception'].append(exceptions.UserError(msg)) + for check in self.__post_import_checks: + check_warnings = check() + if check_warnings and 'warning' in check_warnings: + warnings['warning'].extend(check_warnings['warning']) - return warnings - - @staticmethod - def __report_warnings_and_exceptions(warnings): + if warnings: + if 'exception' in warnings: + raise exceptions.ValidationFailedError(warnings) + if 'sdk exception' in warnings: + sdk_exception_msg =\ + exceptions.ValidationFailedError(warnings).message + raise exceptions.SDKToolingError(sdk_exception_msg) + + if 'warning' in warnings: + # + # Use the ValidationFailedError type to get a formatted message + # with number of warnings included in the message. + # + warning_msg = exceptions.ValidationFailedError( + warnings).message + logger.warn(warning_msg) + + def __check_for_required_methods(self): """ - Prints the warnings and errors that were found in the plugin code, if - the warnings dictionary contains the 'sdk exception' key this means - there was an sdk error and we should throw the error as such. + Checks for required methods in the manifest and adds warnings for any + missing methods. """ - if warnings: - final_message = '\n'.join( - filter(None, [ - MessageUtils.sdk_exception_msg(warnings), - MessageUtils.exception_msg(warnings), - MessageUtils.warning_msg(warnings), - '{} Warning(s). {} Error(s).'.format( - len(warnings['warning']), - len(warnings['exception']) + - len(warnings['sdk exception'])) - ])) - if warnings['sdk exception']: - raise exceptions.SDKToolingError(final_message) - elif warnings['exception']: - raise exceptions.UserError(final_message) + warnings = defaultdict(list) + if not self.__plugin_manifest: + return warnings + for method_key, method_name in \ + PluginImporter.required_methods_by_plugin_type[ + self.__plugin_type].items(): + if self.__plugin_manifest[method_key] is False: + warnings['warning'].append( + 'Implementation missing ' + 'for required method {}. The Plugin Operation \'{}\' ' + 'will fail when executed.'.format( + method_name, PluginImporter. + required_methods_description[method_key])) + return warnings def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): @@ -270,27 +189,8 @@ def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): sys.path.append(src_dir) try: module_content = importlib.import_module(module) - manifest = _validate_and_get_manifest(module, module_content, - entry_point) - - if validate: - # - # Validated methods args against expected args and add any - # resulting warnings to the queue for caller to process. - # These warnings should be treated as an exception to make - # sure build fails. - # - warnings = _validate_named_args(module_content, entry_point, - plugin_type) - if warnings: - map(lambda warning: queue.put({'exception': warning}), - warnings) - except ImportError as err: + except (ImportError, TypeError) as err: queue.put({'exception': err}) - except exceptions.UserError as user_err: - queue.put({'exception': user_err}) - except RuntimeError as rt_err: - queue.put({'exception': rt_err}) except Exception as err: # # We need to figure out if this is an error that was raised inside the @@ -310,49 +210,54 @@ def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): # error = exceptions.SDKToolingError(str(err)) queue.put({'sdk exception': error}) + finally: sys.path.remove(src_dir) + # + # Create an instance of plugin module with associated state to pass around + # to the validation code. + # + plugin_module = import_util.PluginModule(src_dir, module, entry_point, + plugin_type, module_content, + PluginImporter.v_maps, validate) + + # Validate if the module imported fine and is the expected one. + warnings = import_util.validate_import(plugin_module) + _process_warnings(queue, warnings) + + # If the import itself had issues, no point validating further. + if warnings and len(warnings) > 0: + return + + # Run post import validations and consolidate issues. + warnings = import_util.validate_post_import(plugin_module) + _process_warnings(queue, warnings) + + manifest = _prepare_manifest(entry_point, module_content) queue.put({'manifest': manifest}) -def _validate_and_get_manifest(module, module_content, entry_point): +def _process_warnings(queue, warnings): + for warning in warnings: + queue.put({'exception': warning}) + + +def _prepare_manifest(entry_point, module_content): """ Creates a plugin manifest indicating which plugin operations have been implemented by a plugin developer. Plugin_module_content is a module object which must have plugin_entry_point_name as one of its attributes. Args: - module: name of the module imported - module_content: plugin module content from import entry_point: name of entry point to the above plugin module + module_content: plugin module content from import Returns: dict: dictionary that represents plugin's manifest """ - # This should never happen and if it does, flag a run time error. - if module_content is None: - raise RuntimeError('Plugin module content is None.') - - # - # Schema validation on plugin config file would have ensured entry - # is a string and should never happen its none - so raise a run time - # error if it does. - # - if entry_point is None: - raise RuntimeError('Plugin entry point object is None.') - - if not hasattr(module_content, entry_point): - raise exceptions.UserError( - 'Entry point \'{}:{}\' does not exist. \'{}\' is not a symbol' - ' in module \'{}\'.'.format(module, entry_point, entry_point, - module)) plugin_object = getattr(module_content, entry_point) - if plugin_object is None: - raise exceptions.UserError('Plugin object retrieved from the entry' - ' point {} is None'.format(entry_point)) - # Check which methods on the plugin object have been implemented. manifest = { 'type': @@ -400,101 +305,3 @@ def _validate_and_get_manifest(module, module_content, entry_point): } return manifest - - -def _validate_named_args(module_content, entry_point, plugin_type): - """ - Does named argument validation based on the plugin type. - """ - warnings = [] - - plugin_object = getattr(module_content, entry_point) - - # Iterate over attributes objects of the Plugin object - for plugin_attrib in plugin_object.__dict__.values(): - # - # For each plugin attribute object, its __dict__.keys will give - # us the name of the plugin implemntation method name. That name - # is useful in looking up named arguments expected and what is - # actually in the plugin code. And plugin_op_type can be, for e.g. - # LinkedOperations, DiscoveryOperations, VirtualOperations. - # UpgradeOperations will need to be handled separately because it's - # attributes are different. - # - plugin_op_type = plugin_attrib.__class__.__name__ - if plugin_op_type == 'UpgradeOperations': - # - # Handle the upgrade operations separately because they aren't - # just functions. - # - warnings.extend(_check_upgrade_operations(plugin_attrib)) - continue - for op_name_key, op_name in vars(plugin_attrib).items(): - if op_name is None: - continue - actual = inspect.getargspec(op_name) - warnings.extend( - _check_args(method_name=op_name.__name__, - expected_args=_lookup_expected_args( - plugin_type, plugin_op_type, op_name_key), - actual_args=actual.args)) - - return warnings - - -def _check_upgrade_operations(upgrade_operations): - """ - Does named argument validation of all functions in dictionaries by looping - first through all the attributes in the UpgradeOperations for this plugin. - Any attributes that are not dictionaries that map migration_id -> - upgrade_function are skipped. We then loop through every key/value pair - of each of the dictionaries and validate that the argument in the defined - function has the expected name. - """ - warnings = [] - - for attribute_name, attribute in vars(upgrade_operations).items(): - if attribute_name not in PluginImporter.expected_upgrade_args.keys(): - # Skip if not in one of the operation dicts we store functions in. - continue - # - # If the attribute_name was in the expected upgrade dicts then we know - # it is a dict containing migration id -> upgrade function that we can - # iterate on. - # - for migration_id, migration_func in attribute.items(): - actual = inspect.getargspec(migration_func).args - expected = PluginImporter.expected_upgrade_args[attribute_name] - warnings.extend( - _check_args(method_name=migration_func.__name__, - expected_args=expected, - actual_args=actual)) - - return warnings - - -def _check_args(method_name, expected_args, actual_args): - warnings = [] - - if len(expected_args) != len(actual_args): - warnings.append('Number of arguments do not match in method {}.' - ' Expected: {}, Found: {}.'.format( - method_name, list(expected_args), - str(actual_args))) - - if not all(arg in expected_args for arg in actual_args): - warnings.append('Named argument mismatch in method {}.' - ' Expected: {}, Found: {}.'.format( - method_name, list(expected_args), - str(actual_args))) - - return warnings - - -def _lookup_expected_args(plugin_type, plugin_op_type, plugin_op_name): - if plugin_type == const.DIRECT_TYPE: - return PluginImporter.expected_direct_args_by_op[plugin_op_type][ - plugin_op_name] - else: - return PluginImporter.expected_staged_args_by_op[plugin_op_type][ - plugin_op_name] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py index 5123a366..1a210423 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py @@ -7,7 +7,8 @@ import os from contextlib import contextmanager -from dlpx.virtualization._internal import const, exceptions +from dlpx.virtualization._internal import const, exceptions, file_util +from dlpx.virtualization._internal.plugin_importer import PluginImporter from dlpx.virtualization._internal.plugin_validator import PluginValidator from dlpx.virtualization._internal.schema_validator import SchemaValidator @@ -77,17 +78,19 @@ def get_plugin_manifest(plugin_config_file, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = (const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else - const.PLUGIN_CONFIG_SCHEMA) - validator = PluginValidator.from_config_content(plugin_config_file, - plugin_config_content, - plugin_config_schema_file) + src_dir = file_util.get_src_dir_path(plugin_config_file, + plugin_config_content['srcDir']) + entry_point_module, entry_point_object = PluginValidator.split_entry_point( + plugin_config_content['entryPoint']) + plugin_type = plugin_config_content['pluginType'] + + importer = PluginImporter(src_dir, entry_point_module, entry_point_object, + plugin_type, True) with validate_error_handler(plugin_config_file, validation_mode): - validator.validate_plugin_module() + importer.validate_plugin_module() - return validator.result + return importer.result def validate_schema_file(schema_file, stop_build): diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py index e4a1d572..46c46435 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py @@ -5,17 +5,17 @@ import json import logging import os -from collections import namedtuple +from collections import defaultdict, namedtuple import yaml -from dlpx.virtualization._internal import (exceptions, file_util, - plugin_importer) +from dlpx.virtualization._internal import exceptions +from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE +from flake8.api import legacy as flake8 from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple('validation_result', - ['plugin_config_content', 'plugin_manifest']) +validation_result = namedtuple('validation_result', ['plugin_config_content']) class PluginValidator: @@ -37,12 +37,16 @@ def __init__(self, self.__plugin_config_schema = plugin_config_schema self.__plugin_config_content = plugin_config_content self.__plugin_manifest = None + self.__pre_import_checks = [ + self.__validate_plugin_config_content, + self.__validate_plugin_entry_point, + self.__check_for_undefined_names + ] @property def result(self): return validation_result( - plugin_config_content=self.__plugin_config_content, - plugin_manifest=self.__plugin_manifest) + plugin_config_content=self.__plugin_config_content) @classmethod def from_config_content(cls, plugin_config_file, plugin_config_content, @@ -60,37 +64,21 @@ def from_config_content(cls, plugin_config_file, plugin_config_content, def validate_plugin_config(self): """ Reads a plugin config file and validates the contents using a - pre-defined schema. If validation is successful, tries to import - the plugin module and validates the entry point specified. + pre-defined schema. """ - logger.info('Reading plugin config file %s', self.__plugin_config) - if self.__plugin_config_content is None: self.__plugin_config_content = self.__read_plugin_config_file() logger.debug('Validating plugin config file content : %s', self.__plugin_config_content) - self.__validate_plugin_config_content() - - def validate_plugin_module(self): - """ - Tries to import the plugin module and validates the entry point - specified. - """ - self.validate_plugin_config() - - src_dir = file_util.get_src_dir_path( - self.__plugin_config, self.__plugin_config_content['srcDir']) - - logger.debug('Validating plugin entry point : %s', - self.__plugin_config_content['entryPoint']) - self.__validate_plugin_entry_point(src_dir) + self.__run_checks() def __read_plugin_config_file(self): """ Reads a plugin config file and raises UserError if there is an issue reading the file. """ + logger.info('Reading plugin config file %s', self.__plugin_config) try: with open(self.__plugin_config, 'rb') as f: try: @@ -111,6 +99,18 @@ def __read_plugin_config_file(self): '\nError code: {}. Error message: {}'.format( self.__plugin_config, err.errno, os.strerror(err.errno))) + def __run_checks(self): + """ + Runs validations on the plugin config content and raise exceptions + if any. + """ + # + # All the pre-import checks need to happen in sequence. So no point + # validating further if a check fails. + # + for check in self.__pre_import_checks: + check() + def __validate_plugin_config_content(self): """ Validates the given plugin configuration is valid. @@ -170,43 +170,55 @@ def __validate_plugin_config_content(self): raise exceptions.SchemaValidationError(self.__plugin_config, validation_errors) - def __validate_plugin_entry_point(self, src_dir): + def __validate_plugin_entry_point(self): """ Validates the plugin entry point by parsing the entry - point to get module and entry point. Imports the module - to check for errors or issues. Also does an eval on the - entry point. + point to get module and entry point. """ - entry_point_field = self.__plugin_config_content['entryPoint'] - entry_point_strings = entry_point_field.split(':') - # Get the module and entry point name to import - entry_point_module = entry_point_strings[0] - entry_point_object = entry_point_strings[1] - plugin_type = self.__plugin_config_content['pluginType'] + entry_point_module, entry_point_object = self.split_entry_point( + self.__plugin_config_content['entryPoint']) - try: - self.__plugin_manifest = (self.__import_plugin( - src_dir, entry_point_module, entry_point_object, plugin_type)) - except ImportError as err: - raise exceptions.UserError( - 'Unable to load module \'{}\' specified in ' - 'pluginEntryPoint \'{}\' from path \'{}\'. ' - 'Error message: {}'.format(entry_point_module, - entry_point_object, src_dir, err)) + if not entry_point_module: + raise exceptions.UserError('Plugin module is invalid') - logger.debug("Got manifest %s", self.__plugin_manifest) + if not entry_point_object: + raise exceptions.UserError('Plugin object is invalid') - @staticmethod - def __import_plugin(src_dir, entry_point_module, entry_point_object, - plugin_type): + def __check_for_undefined_names(self): """ - Imports the given python module, does some validations ans returns the - manifest describing implemented plugin operations. + Checks the plugin module for undefined names. This catches + missing imports, references to nonexistent variables, etc. + + ..note:: + We are using the legacy flake8 api, because there is currently + no public, stable api for flake8 >= 3.0.0 + + For more info, see + https://flake8.pycqa.org/en/latest/user/python-api.html """ - importer = plugin_importer.PluginImporter(src_dir, entry_point_module, - entry_point_object, - plugin_type, True) - manifest = importer.import_plugin() + warnings = defaultdict(list) + src_dir = self.__plugin_config_content['srcDir'] + exclude_dir = os.path.sep.join([src_dir, CODEGEN_PACKAGE]) + style_guide = flake8.get_style_guide(select=["F821"], + exclude=[exclude_dir], + quiet=1) + style_guide.check_files(paths=[src_dir]) + file_checkers = style_guide._application.file_checker_manager.checkers + + for checker in file_checkers: + for result in checker.results: + # From the api code, result is a tuple defined as: error = + # (error_code, line_number, column, text, physical_line) + if result[0] == 'F821': + msg = "{} on line {} in {}".format(result[3], result[1], + checker.filename) + warnings['exception'].append(exceptions.UserError(msg)) + + if warnings and len(warnings) > 0: + raise exceptions.ValidationFailedError(warnings) - return manifest + @staticmethod + def split_entry_point(entry_point): + entry_point_strings = entry_point.split(':') + return entry_point_strings[0], entry_point_strings[1] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 490b4b78..a1a62b6c 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -26,7 +26,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-upgrade-007 +package_version = 1.1.0-internal-upgrade-008 virtualization_api_version = 1.1.0 distribution_name = dvp-tools package_author = Delphix diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 31d2cb35..f79fc911 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -10,7 +10,7 @@ import yaml from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.commands import build -from dlpx.virtualization._internal.plugin_validator import PluginValidator +from dlpx.virtualization._internal.plugin_importer import PluginImporter @pytest.fixture @@ -57,9 +57,9 @@ def test_build_success(mock_relative_path, mock_install_deps, @staticmethod @pytest.mark.parametrize('artifact_filename', ['somefile.json']) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value={}) + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', + return_value=({}, None)) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') @mock.patch( 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') @@ -341,8 +341,8 @@ def test_zip_and_encode_source_files_encode_fail(mock_encode, src_dir): ''.format(src_dir, 'something')) @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) @mock.patch( 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') @@ -358,8 +358,8 @@ def test_id_validation_positive(mock_relative_path, mock_install_deps, skip_id_validation) @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) @pytest.mark.parametrize('plugin_id', ['mongo']) def test_id_validation_negative(mock_import_plugin, plugin_config_file, diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index 9988c398..985765d4 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -156,24 +156,13 @@ def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, validator = plugin_validator.PluginValidator(plugin_config_file, schema_file) - # Assert config file and import validations are not done. + # Assert config file validation is not done. assert not validator.result.plugin_config_content - assert not validator.result.plugin_manifest validator.validate_plugin_config() - # Assert config file is validated and import validation is not done. + # Assert config file is validated. assert validator.result.plugin_config_content - assert not validator.result.plugin_manifest - - validator.validate_plugin_module() - - # - # Assert both config content and import validation are done and result - # tuple has both set to valid values. - # - assert validator.result.plugin_config_content - assert validator.result.plugin_manifest @staticmethod def test_invalid_with_config_file(plugin_config_file): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 004a258f..09ec8b4a 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -8,7 +8,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-upgrade-007' + assert package_util.get_version() == '1.1.0-internal-upgrade-008' @staticmethod def test_get_virtualization_api_version(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py index 4136e990..6949f2f5 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py @@ -2,12 +2,42 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # import exceptions +import os +import uuid +from collections import OrderedDict import mock import pytest +import yaml +from dlpx.virtualization._internal import (file_util, plugin_util, + plugin_validator) from dlpx.virtualization._internal.plugin_importer import PluginImporter +@pytest.fixture +def fake_src_dir(plugin_type): + """ + This fixture gets the path of the fake plugin src files used for testing + """ + return os.path.join(os.path.dirname(__file__), 'fake_plugin', + plugin_type.lower()) + + +def get_plugin_importer(plugin_config_file): + plugin_config_content = None + with open(plugin_config_file, 'rb') as f: + plugin_config_content = yaml.safe_load(f) + + src_dir = file_util.get_src_dir_path(plugin_config_file, + plugin_config_content['srcDir']) + entry_point_module, entry_point_object = plugin_validator.PluginValidator\ + .split_entry_point(plugin_config_content['entryPoint']) + plugin_type = plugin_config_content['pluginType'] + + return PluginImporter(src_dir, entry_point_module, entry_point_object, + plugin_type, True) + + class TestPluginImporter: @staticmethod @mock.patch('importlib.import_module') @@ -15,11 +45,12 @@ def test_get_plugin_manifest(mock_import, src_dir, plugin_type, entry_point_module, entry_point_object, plugin_module_content, plugin_manifest): mock_import.return_value = plugin_module_content + importer = PluginImporter(src_dir, entry_point_module, entry_point_object, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() - assert manifest == plugin_manifest + assert importer.result.plugin_manifest == plugin_manifest @staticmethod @mock.patch('importlib.import_module') @@ -27,15 +58,16 @@ def test_plugin_module_content_none(mock_import, src_dir, plugin_type, entry_point_module, entry_point_object): mock_import.return_value = None - manifest = {} + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, entry_point_module, entry_point_object, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = str(err_info) - assert manifest == {} + assert result == () assert 'Plugin module content is None.' in message @staticmethod @@ -43,15 +75,16 @@ def test_plugin_module_content_none(mock_import, src_dir, plugin_type, def test_plugin_entry_object_none(mock_import, src_dir, plugin_type, plugin_name, plugin_module_content): mock_import.return_value = plugin_module_content - manifest = {} + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, None, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = str(err_info) - assert manifest == {} + assert result == () assert 'Plugin entry point object is None.' in message @staticmethod @@ -61,15 +94,16 @@ def test_plugin_entry_point_nonexistent(mock_import, src_dir, plugin_type, plugin_module_content): entry_point_name = "nonexistent entry point" mock_import.return_value = plugin_module_content - manifest = {} + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, entry_point_name, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = err_info.value.message - assert manifest == {} + assert result == () assert ('\'{}\' is not a symbol in module'.format(entry_point_name) in message) @@ -81,14 +115,139 @@ def test_plugin_object_none(mock_import, src_dir, plugin_type, plugin_name, setattr(plugin_module_content, none_entry_point, None) mock_import.return_value = plugin_module_content - manifest = {} + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, none_entry_point, plugin_type, False) - manifest = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = err_info.value.message - assert manifest == {} + assert result == () assert ('Plugin object retrieved from the entry point {} is' ' None'.format(none_entry_point)) in message + + @staticmethod + @pytest.mark.parametrize('entry_point,plugin_type', + [('successful:staged', 'STAGED'), + ('successful:direct', 'DIRECT')]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_successful_validation(mock_file_util, plugin_config_file, + fake_src_dir): + mock_file_util.return_value = fake_src_dir + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,plugin_type,expected_errors', + [('multiple_warnings:staged', 'STAGED', [ + 'Error: Named argument mismatch in method repository_discovery', + 'Error: Number of arguments do not match in method stop', + 'Error: Named argument mismatch in method stop', + 'Warning: Implementation missing for required method' + ' virtual.mount_specification().', '1 Warning(s). 3 Error(s).' + ]), + ('multiple_warnings:vfiles', 'DIRECT', [ + 'Error: Number of arguments do not match in method status', + 'Error: Named argument mismatch in method status', + 'Warning: Implementation missing for required method' + ' virtual.reconfigure().', '1 Warning(s). 2 Error(s).' + ])]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_multiple_warnings(mock_file_util, plugin_config_file, + fake_src_dir, expected_errors): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + for error in expected_errors: + assert error in message + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,expected_errors', [('upgrade_warnings:direct', [ + 'Error: Named argument mismatch in method snap_upgrade.', + 'Error: Number of arguments do not match in method ls_upgrade.', + 'Error: Named argument mismatch in method ls_upgrade.', + 'Error: Named argument mismatch in method ls_upgrade.', + '0 Warning(s). 4 Error(s).' + ])]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_upgrade_warnings(mock_file_util, plugin_config_file, fake_src_dir, + expected_errors): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + for error in expected_errors: + assert error in message + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,expected_error', + [('op_already_defined:plugin', 'has already been defined'), + ('dec_not_function:plugin', "decorated by 'linked.pre_snapshot()'" + " is not a function"), + ('id_not_string:plugin', "The migration id '['testing', 'out'," + " 'validation']' used in the function" + " 'repo_upgrade' should be a string."), + ('id_bad_format:plugin', "used in the function 'repo_upgrade' does" + " not follow the correct format"), + ('id_used:plugin', "'5.04.000.01' used in the function 'snap_upgrade'" + " has the same canonical form '5.4.0.1' as another migration")]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_wrapper_failures(mock_file_util, plugin_config_file, fake_src_dir, + expected_error): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + assert expected_error in message + assert '0 Warning(s). 1 Error(s).' in message + + @staticmethod + @pytest.mark.parametrize('entry_point', ['arbitrary_error:plugin']) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_sdk_error(mock_file_util, plugin_config_file, fake_src_dir): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.SDKToolingError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + assert ('SDK Error: Got an arbitrary non-platforms error for testing.' + in message) + assert '0 Warning(s). 1 Error(s).' in message + + @staticmethod + @mock.patch('os.path.isabs', return_value=False) + @mock.patch('importlib.import_module') + def test_plugin_info_warn_mode(mock_import, mock_relative_path, + plugin_config_file, src_dir, + plugin_module_content): + plugin_config_content = OrderedDict([ + ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), + ('version', '0.1.0'), ('language', 'PYTHON27'), + ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), + ('manualDiscovery', True), + ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), + ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) + ]) + mock_import.return_value = plugin_module_content + try: + plugin_util.get_plugin_manifest(plugin_config_file, + plugin_config_content, False) + except Exception: + raise AssertionError() diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index ec9b60ac..35711d5e 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -3,23 +3,13 @@ # import json -import os import mock import pytest -from dlpx.virtualization._internal import const, exceptions, plugin_util +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.plugin_validator import PluginValidator -@pytest.fixture -def fake_src_dir(plugin_type): - """ - This fixture gets the path of the fake plugin src files used for testing - """ - return os.path.join(os.path.dirname(__file__), 'fake_plugin', - plugin_type.lower()) - - class TestPluginValidator: @staticmethod @pytest.mark.parametrize( @@ -52,17 +42,12 @@ def test_plugin_bad_config_file(plugin_config_file): @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - def test_plugin_valid_content(mock_import_plugin, src_dir, - plugin_config_file, plugin_config_content): + def test_plugin_valid_content(src_dir, plugin_config_file, + plugin_config_content): validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - - mock_import_plugin.assert_called() + validator.validate_plugin_config() @staticmethod @pytest.mark.parametrize('src_dir', [None]) @@ -77,31 +62,22 @@ def test_plugin_missing_field(plugin_config_file, plugin_config_content): @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) @pytest.mark.parametrize('version,expected', [('xxx', "'xxx' does not match"), ('1.0.0', None), ('1.0.0_HF', None)]) - def test_plugin_version_format(mock_import_plugin, src_dir, - plugin_config_file, plugin_config_content, - expected): - + def test_plugin_version_format(src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - mock_import_plugin.assert_called() + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) @pytest.mark.parametrize( 'entry_point,expected', [('staged_plugin', "'staged_plugin' does not match"), @@ -110,15 +86,13 @@ def test_plugin_version_format(mock_import_plugin, src_dir, ('staged_plugin::staged', "'staged_plugin::staged' does not match"), (':staged_plugin:staged:', "':staged_plugin:staged:' does not match"), ('staged_plugin:staged', None)]) - def test_plugin_entry_point(mock_import_plugin, src_dir, - plugin_config_file, plugin_config_content, - expected): + def test_plugin_entry_point(src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - mock_import_plugin.assert_called() + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @@ -155,9 +129,6 @@ def test_multiple_validation_errors(plugin_config_file, @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) @pytest.mark.parametrize( 'plugin_id , expected', [('Staged_plugin', "'Staged_plugin' does not match"), @@ -171,137 +142,13 @@ def test_plugin_id(mock_import_plugin, src_dir, plugin_config_file, validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - mock_import_plugin.assert_called() + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message - @staticmethod - def test_plugin_info_warn_mode(plugin_config_file, plugin_config_content): - err_info = None - try: - plugin_util.get_plugin_manifest(plugin_config_file, - plugin_config_content, False) - except Exception as e: - err_info = e - - assert err_info is None - - @staticmethod - @pytest.mark.parametrize('entry_point,plugin_type', - [('successful:staged', 'STAGED'), - ('successful:direct', 'DIRECT')]) - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_successful_validation(mock_file_util, plugin_config_file, - fake_src_dir): - mock_file_util.return_value = fake_src_dir - - validator = PluginValidator(plugin_config_file, - const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - - @staticmethod - @pytest.mark.parametrize( - 'entry_point,plugin_type,expected_errors', - [('multiple_warnings:staged', 'STAGED', [ - 'Error: Named argument mismatch in method repository_discovery', - 'Error: Number of arguments do not match in method stop', - 'Error: Named argument mismatch in method stop', - 'Warning: Implementation missing for required method' - ' virtual.mount_specification().', '1 Warning(s). 3 Error(s).' - ]), - ('multiple_warnings:vfiles', 'DIRECT', [ - 'Error: Number of arguments do not match in method status', - 'Error: Named argument mismatch in method status', - 'Warning: Implementation missing for required method' - ' virtual.reconfigure().', '1 Warning(s). 2 Error(s).' - ])]) - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_multiple_warnings(mock_file_util, plugin_config_file, - fake_src_dir, expected_errors): - mock_file_util.return_value = fake_src_dir - - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(plugin_config_file, - const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - - message = err_info.value.message - for error in expected_errors: - assert error in message - - @staticmethod - @pytest.mark.parametrize( - 'entry_point,expected_errors', [('upgrade_warnings:direct', [ - 'Error: Named argument mismatch in method snap_upgrade.', - 'Error: Number of arguments do not match in method ls_upgrade.', - 'Error: Named argument mismatch in method ls_upgrade.', - 'Error: Named argument mismatch in method ls_upgrade.', - '0 Warning(s). 4 Error(s).' - ])]) - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_upgrade_warnings(mock_file_util, plugin_config_file, fake_src_dir, - expected_errors): - mock_file_util.return_value = fake_src_dir - - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(plugin_config_file, - const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - - message = err_info.value.message - for error in expected_errors: - assert error in message - - @staticmethod - @pytest.mark.parametrize( - 'entry_point,expected_error', - [('op_already_defined:plugin', 'has already been defined'), - ('dec_not_function:plugin', "decorated by 'linked.pre_snapshot()'" - " is not a function"), - ('id_not_string:plugin', "The migration id '['testing', 'out'," - " 'validation']' used in the function" - " 'repo_upgrade' should be a string."), - ('id_bad_format:plugin', "used in the function 'repo_upgrade' does" - " not follow the correct format"), - ('id_used:plugin', "'5.04.000.01' used in the function 'snap_upgrade'" - " has the same canonical form '5.4.0.1' as another migration")]) - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_wrapper_failures(mock_file_util, plugin_config_file, fake_src_dir, - expected_error): - mock_file_util.return_value = fake_src_dir - - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(plugin_config_file, - const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - - message = err_info.value.message - assert expected_error in message - assert '0 Warning(s). 1 Error(s).' in message - - @staticmethod - @pytest.mark.parametrize('entry_point', ['arbitrary_error:plugin']) - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_sdk_error(mock_file_util, plugin_config_file, fake_src_dir): - mock_file_util.return_value = fake_src_dir - - with pytest.raises(exceptions.SDKToolingError) as err_info: - validator = PluginValidator(plugin_config_file, - const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - - message = err_info.value.message - assert ('SDK Error: Got an arbitrary non-platforms error for testing.' - in message) - assert '0 Warning(s). 1 Error(s).' in message - @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) @pytest.mark.parametrize('build_number, expected', [('xxx', "'xxx' does not match"), ('1', None), ('1.x', "'1.x' does not match"), ('1.100', None), @@ -312,16 +159,13 @@ def test_sdk_error(mock_file_util, plugin_config_file, fake_src_dir): ('0', "'0' does not match"), ('0.0.00', "'0.0.00' does not match"), ('0.1', None)]) - def test_plugin_build_number_format(mock_import_plugin, src_dir, - plugin_config_file, + def test_plugin_build_number_format(src_dir, plugin_config_file, plugin_config_content, expected): - try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, const.PLUGIN_CONFIG_SCHEMA) - validator.validate_plugin_module() - mock_import_plugin.assert_called() + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message From 2ecdda020504daa9cc8147587033c6fa6a46ffbc Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Tue, 28 Jan 2020 16:59:05 -0800 Subject: [PATCH 14/25] PYT-1081 Merge SDK master to plugin-upgrade branch Reviewed at: http://reviews.delphix.com/r/55423/ --- .gitignore | 3 - .hooksconfig | 3 + CONTRIBUTING.md | 94 ++++ LICENSE | 202 ++++++++ README-dev.md | 142 ++++++ README.md | 149 +----- bin/build.sh | 43 -- bin/upload.sh | 14 +- build.gradle | 19 +- common/Pipfile.lock | 103 +++- common/build.gradle | 50 +- common/lock.dev-requirements.txt | 19 +- common/lock.requirements.txt | 5 +- .../proto/dlpx/virtualization/common.proto | 103 ---- .../virtualization/common/_common_classes.py | 2 +- common/src/test/java/NotUsed.java | 11 - .../common/test_common_classes.py | 10 +- .../virtualization/test_common_generated.py | 10 - dvp/build.gradle | 8 - libs/Pipfile.lock | 23 +- libs/build.gradle | 52 +- libs/lock.dev-requirements.txt | 4 +- libs/lock.requirements.txt | 5 +- .../main/proto/dlpx/virtualization/libs.proto | 122 ----- .../python/dlpx/virtualization/libs/libs.py | 2 +- libs/src/test/java/NotUsed.java | 11 - .../test_delphix_libs_generated.py | 4 +- .../python/dlpx/virtualization/test_libs.py | 2 +- .../dlpx/virtualization/test_logging.py | 6 +- platform/Pipfile.lock | 23 +- platform/build.gradle | 62 +-- platform/lock.dev-requirements.txt | 4 +- platform/lock.requirements.txt | 5 +- .../proto/dlpx/virtualization/platform.proto | 471 ------------------ .../dlpx/virtualization/platform/__init__.py | 1 + .../virtualization/platform/_discovery.py | 4 +- .../dlpx/virtualization/platform/_linked.py | 4 +- .../dlpx/virtualization/platform/_upgrade.py | 2 +- .../dlpx/virtualization/platform/_virtual.py | 4 +- .../dlpx/virtualization/platform/util.py | 11 + platform/src/test/java/NotUsed.java | 11 - .../test_delphix_platform_generated.py | 2 +- .../python/dlpx/virtualization/test_plugin.py | 5 +- .../dlpx/virtualization/test_upgrade.py | 2 +- tools/Pipfile.lock | 50 +- tools/build.gradle | 16 +- tools/lock.dev-requirements.txt | 11 +- tools/lock.requirements.txt | 4 +- .../virtualization/_internal/package_util.py | 17 +- .../_internal/plugin_importer.py | 1 - .../virtualization/_internal/settings.cfg | 6 - .../dlpx/virtualization/_internal/conftest.py | 2 +- .../_internal/test_package_util.py | 10 + 53 files changed, 726 insertions(+), 1223 deletions(-) create mode 100644 CONTRIBUTING.md create mode 100644 LICENSE create mode 100644 README-dev.md delete mode 100755 bin/build.sh delete mode 100644 common/src/main/proto/dlpx/virtualization/common.proto delete mode 100644 common/src/test/java/NotUsed.java delete mode 100644 common/src/test/python/dlpx/virtualization/test_common_generated.py delete mode 100644 libs/src/main/proto/dlpx/virtualization/libs.proto delete mode 100644 libs/src/test/java/NotUsed.java delete mode 100644 platform/src/main/proto/dlpx/virtualization/platform.proto create mode 100644 platform/src/main/python/dlpx/virtualization/platform/util.py delete mode 100644 platform/src/test/java/NotUsed.java diff --git a/.gitignore b/.gitignore index 403bc11b..62718d20 100644 --- a/.gitignore +++ b/.gitignore @@ -38,9 +38,6 @@ venv/ # Python cache __pycache__ -# Generated protobuf files -*_pb2.py - # Generated python build files Pipfile setup.py diff --git a/.hooksconfig b/.hooksconfig index 364afa5e..e5b34a31 100644 --- a/.hooksconfig +++ b/.hooksconfig @@ -9,3 +9,6 @@ [branch "master"] gate-allowed-issuetypes = 1,3,4,5,10001,10302 + + [branch "projects/plugin-upgrade"] + gate-allowed-issuetypes = 1,3,4,5,10001,10302 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..b721d1a9 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,94 @@ +# Virtualization SDK Contribution Guide + +*First of all, thanks for taking the time to contribute to the virtualization-sdk project!* + +By following these guidelines you can help us make this project even better. + +# Table of Contents +[Getting Started](#getting-started) + +[How to Build the SDK from Source](#how-to-build-the-sdk-from-source) + +[Asking for Help](#asking-for-help) + +[How to Contribute](#how-to-contribute) + + * [How to Raise Pull Requests](#how-to-raise-pull-requests) + * [Code Owners](#code-owners) + +[Testing and CI/CD](#testing-and-ci/cd) + +[Coding Guidelines](#coding-guidelines) + + * [Commit Message Format](#commit-message-format) + + +## Getting Started +The virtualization-sdk is distributed as a Python package called [dvp](https://pypi.org/project/dvp/). Install it in your local development environment so that you can build and upload a plugin. + + +## How to Build the SDK from Source +The virtualization-sdk repository can be built from source on GitHub as described below. + +### Fork the virtualization-sdk Repository + +First step is to fork the virtualization-sdk repository. Please refer to [Forking a GitHub Repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) for instructions. + +### Clone the virtualization-sdk Repository + +Once the virtualization-sdk repository is forked, clone the forked repository into a local copy on your computer. Please refer to [Cloning a Forked Repository](https://help.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository) for instructions. + +`git clone git@github.com:your-username/virtualization-sdk.git` + +### Build the virtualization-sdk Repository + +The virtualization-sdk repository has a Gradle task that creates a virtualenv and builds the code locally. It can be triggered from the root directory of the source tree using the command: + +`./gradlew build` + +### Run Unit Tests + +Unit tests for the virtualization-sdk repository can be triggered from the root directory of the source tree using Gradle with the command: + +`./gradlew test` + +## Asking for Help +Please raise a GitHub issue to ask for help with appropriate GitHub tag . + +## How to Contribute + +### How to Raise Pull Requests +This repository uses GitHub standard pull request model. Once the changes are made locally and committed to the forked repository and tested, a pull request can be raised using the pull request template for the changes to be reviewed. + +Some guidelines for Pull Requests: + +* All pull requests must be based on the current master branch and apply without conflicts. +* All GitHub Actions checks should succeed. Please refer to [Testing and CI/CD](#testing-and-cicd) for details. +* Please attempt to limit pull requests to a single commit which resolves one specific issue. +* Make sure your commit messages are in the correct format as specified at [Commit Message Format](#commit-message-format) +* When updating a pull request squash multiple commits into one and perform a rebase. You want all your changes to be included in one commit replayed on top of master branch of the virtualization-sdk. +* For large pull requests consider structuring your changes as a stack of logically independent patches which build on each other. This makes large changes easier to review and approve which speeds up the merging process. +* Try to keep pull requests simple. Simple code with comments is much easier to review and approve. +* Test cases should be provided when appropriate. + +Once the pull request has required approvals from code owners of the repository, the code owner will merge the pull request into the actual virtualization-sdk repository. + +### Code Owners +Code owners defined by the codeowners file in the repository are the gatekeepers of the repository. For a pull request to be merged, it requires approval from at least 2 codeowners. + +## Testing and CI/CD +CI/CD for this repository is managed through GitHub Actions. All the checks need to succeed for the pull request to be merged. + +## Coding Guidelines +### Commit Message Format +Commit messages for new changes must meet the following guidelines: +* Every commit message should have a GitHub issue id that it addresses and its title. +* Every commit message can have an optional Description of the issue. Though it is optional it is highly recommended to provide one to summarize important information about the fix. +* Each line of the description must be 72 characters or less. +* Sample commit message to address issue 123 with title "Format of error is incorrect": + + `Fixes #123 Format of error is incorrect` + + `Optional Description of the issue` + + diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README-dev.md b/README-dev.md new file mode 100644 index 00000000..549c40ba --- /dev/null +++ b/README-dev.md @@ -0,0 +1,142 @@ +# Copyright (c) 2019 by Delphix. All rights reserved. + +# Delphix Virtualization SDK + +This README is for SDK developers. If you are a plugin developer please refer to [README.md](README.md). + +The artifact produced by this repository is a set of Python distributions that make up the SDK. + +# Development process + +This repository is going through a lot of changes. It is being migrated to GitHub and open sourced. The development process will change throughout this process so please refer back to this README regularly to understand what the current development process is. + +At a very high level, our development process usually looks like this: + +1. Make changes to SDK and appgate code. Test these changes manually. Iterate on this until you have everything working. +2. Publish a development build of the SDK to artifactory. +3. Update the version of the SDK specified in the app gate. +4. Publish a review for SDK code, and also publish a "provisional" review of appgate code. Address any feedback. +5. Push the SDK code and publish new SDK builds to our internal servers. +6. Finalize your appgate review. +7. Push the appgate changes + +Not every type of change requires every step. + +These steps are described in more detail below. + +## Background + +There are two parts of the SDK that are important to think about separately since they have slightly different workflows. + +1. The `tools` package is the SDK's CLI. This aids in plugin development, testing, and distribution. +2. `common`, `libs`, and `platform` contain what are collectively called the "wrappers". These are vanilla Python classes that abstract the Virtualization API protobuf messages (published by app-gate) away from plugin developers. These expose the API plugin developers write against. + +All dependencies of a plugin must be packaged with the plugin including the protobuf messages (`dvp-api`) and the wrappers. This is done automatically by `dvp build`. + +This is what causes the slightly different workflows in development. Changes to `tools` are completely isolated from the Delphix Engine and wrappers changes only impact the plugin build. + +Unfortunately, at the moment _all_ SDK changes require an app-gate change. Currently BlackBox looks at a property file in the app-gate to determine which version of the SDK to install during tests. This will eventually change, but at the moment any SDK change needs to be accompanied by an app-gate change that, at a minimum, bumps this version. + +## Local SDK Development + +To setup local development, refer to README-dev.md in the `tools` directory. This walks through the setup of a local virtualenv for development. This should be done for _all_ SDK changes. + +### Configure pip index + +`dvp build` executes `pip` to install the wrappers. By default `pip` looks at pypi.org for packages to install. Internal builds of the SDK are published to artifactory, not pypi. In order to configure pip to look at artifactory, create a file at `/pip.conf` that contains: + +``` +[install] +trusted-host=artifactory.delphix.com +index-url=http://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ +``` + +### CLI changes + +To better understand how to develop and test `tools` changes, see README-dev.md in the `tools` directory. + +### Wrappers changes + +Run `dvp build --dev` to build your plugin and then upload it to a Delphix Engine to test. + +The wrappers are built with the plugin. `dvp build` has a hidden `--dev` flag. This builds `common`, `libs`, and `platform` locally and bundles them with the plugin. A special configuration entry is needed in your dvp config file which is located at `~/.dvp/config`: + +``` +[dev] +vsdk_root = /path/to/vsdk_repo_root +``` + +### Versioning +The first thing to do is to change the version number. There are _two_ places where the version needs to be changed unfortunately. For almost all cases, this will simply involve incrementing the "build number", which is the three-digit number at the very end of the version. + +1. In the root of the SDK codebase, open the `build.gradle` file, and change the version. +2. In `tools/src/main/python/dlpx/virtualization/_internal/settings.cfg` change the `package_version` property. + +The two versions should be identical. + +This repository is going through a transition in which Gradle will eventually be removed. The version string will eventually only live in the individual packages themselves. + +### Testing + +Currently, there are three types of SDK testing: unit, manual, and blackbox. + +#### Unit Testing + +Running `./gradlew test` from the top level of the repository will run all SDK unit tests. Smaller sets of tests can be run from inside each directory (`common`, `platform`, etc.) by going into that directory and running `../gradlew test`. These can also be run through your IDE. + +#### Testing sdk-gate changes with app-gate code + +At the moment blackbox refers to a property file in the app-gate to determine the version of the SDK to install for tests so this property always needs to updated for automated testing. + +NOTE: The app-gate does not pull in the wrappers or CLI from this repository. + +The easiest way to do both of these is: + +1. Update the version of the SDK to something unique and clearly a development build. The standard is `x.y.z-internal-abc-`. For example, `1.1.0-internal-001-grant`. +2. Run `./gradlew publishDebug` from the root of this repository. +3. In `appliance/gradle.properties` in the app-gate update `virtualizationSdkVer` to match the SDK version. + +Run an appliance-update for manual testing and/or kick off automated blackbox tests by running `git blackbox -s appdata_python_samples` from your app-gate development branch. + + +## SDK Review and Provisional app-gate review + +Once you're finished with local development and testing, you can publish your final SDK review to reviewboard. + +In addition, it's customary to publish a "provisional" appgate review, so that people can get insight into how the out-for-review SDK changes will actually be used by the appgate. Of course, this review will contain all your temporary local-build changes mentioned above. So, in your review, you'll want to mention that these temporary changes will be reverted before the review is finalized. + +## Pushing and Deploying SDK Code + + +### Publishing + +There are two Gradle tasks that do publishing: `publishDebug` and `publishProd`. They differ in two ways: + +1. They publish the Python distributions to separate repositories on Artifactory. `publishDebug` uploads to `dvp-local-pypi`. This is a special repository that has been setup to test the SDK. It falls back our our production PyPI repository, but artifacts uploaded to `dvp-local-pypi` do not impact production artifacts. This should be used for testing. `publishProd` does upload the Python distributions to our production Artifactory PyPI repository, `delphix-local`. + +2. `publishProd` runs tests, formatting, and linting while `publishDebug` does not. + +NOTE: The external release to `pypi.org` is done outside of the build system. + +#### Setup + +1. There are two environment variables that need to be set in order to publish: `ARTIFACTORY_PYPI_USER` and `ARTIFACTORY_PYPI_PASS`. + + `ARTIFACTORY_PYPI_USER` and `ARTIFACTORY_PYPI_PASS` are one set of credentials used to upload the Python distributions to our internal PyPI repositories. The credentials are the same for both internal PyPI repositories mentioned above. + + - `ARTIFACTORY_PYPI_USER` and `ARTIFACTORY_PYPI_PASS` is the username/password combo given to you by whoever setup your Artifactory pypi account. This is an account separate from your Artifactory account. If you do not have one, please reach out to the `#artifactory` channel and request a `dvp-uploaders-python` account. See for directions on how to add the account. These are used to upload the Python distributions to our internal PyPI repositories. The credentials are the same for both internal PyPI repositories mentioned above. + +2. `twine` needs to be installed. This is a Python package that is used to upload Python distributions. If it's not installed, install it by running `pip install twine`. + +#### Final Publishing + +Once you are absolutely certain all changes have been made run `./gradlew publishProd`. This will run checks, create the Python distributions, and upload all of them to Artifactory with the Python distributions going to `delphix-local`. + +## Using Newly-Deployed SDK Build + +Now, we have to go back to our `appgate` code and make it point to the newly-deployed build on artifactory, instead of the local build we used to test. To achieve that, +modify `appliance/gradle.properties` and change `virtualizationSdkVer` to refer to your new version number. + +## Finalizing Appgate Review + +Once you've got the above changes completed, tested, and checked into git, you can update your appgate review. Now, your review will be ready for final ship-its. diff --git a/README.md b/README.md index b5372cf3..9299bf8b 100644 --- a/README.md +++ b/README.md @@ -1,152 +1,53 @@ -# Copyright (c) 2019 by Delphix. All rights reserved. - # Delphix Virtualization SDK This repository contains the Virtualization SDK for building custom data source integrations for the Delphix Dynamic Data Platform. -There are two sets of artifacts produced by this repository: a JAR containing autogenerated Java protobuf classes and a set of Python distributions that, together, make up the SDK. - -# Development process - -This repository is going through a lot of changes. It is being migrated to GitHub and open sourced. The development process will change throughout this process so please refer back to this README regularly to understand what the current development process is. - -At a very high level, our development process usually looks like this: - -1. Make changes to SDK and appgate code. Test these changes manually. Iterate on this until you have everything working. -2. Publish a development build of the SDK and the Java protobuf classes to artifactory. -3. Update the version of the SDK and Java protobuf classes specified in the app gate. -4. Publish a review for SDK code, and also publish a "provisional" review of appgate code. Address any feedback. -5. Push the SDK code and publish new SDK builds and Java protobuf classes to our internal servers. -6. Finalize your appgate review. -7. Push the appgate changes. - -Not every type of change requires every step. - -These steps are described in more detail below. - -## Background - -There are three parts of the SDK that are important to think about separately since they have slightly different workflows. - -1. The `tools` package is the SDK's CLI. This aids in plugin development, testing, and distribution. -2. There are three sets of protobuf messages: `common`, `libs`, and `platform`. These define the Virtualization API that the SDK are built against. These will eventually be moved out of this repository. -3. `common`, `libs`, and `platform` also contain what are collectively called the "wrappers". These are vanilla Python classes that abstract the protobuf messages away from plugin developers. These expose the API plugin developers write against. - -All dependencies of a plugin must be packaged with the plugin including the protobuf messages and the wrappers. This is done automatically by `dvp build`. The Delphix Engine only needs the autogenerated Java protobuf classes to be able to communicate with a plugin. - -This is what causes the slightly different workflows in development. Changes to `tools` are completely isolated from the Delphix Engine, wrappers changes only impact the plugin build, and protobuf changes need to be pulled into the Delphix Engine. -Unfortunately, at the moment _all_ SDK changes require an app-gate change. Currently BlackBox looks at a property file in the app-gate to determine which version of the SDK to install during tests. This will eventually change, but at the moment any SDK change needs to be accompanied by an app-gate change that, at a minimum, bumps this version. +The latest user documentation can be found [here](https://developer.delphix.com). -## Local SDK Development +## Getting Started -To setup local development, refer to README-dev.md in the `tools` directory. This walks through the setup of a local virtualenv for development. This should be done for _all_ SDK changes. +### Prerequisites -### CLI changes +- macOS 10.14+, Ubuntu 16.04+, or Windows 10 +- Python 2.7 (Python 3 is not supported) +- Java 7+ +- Delphix Engine 5.3.5.0 or above -To better understand how to develop and test `tools` changes, see README-dev.md in the `tools` directory. +### Installing -### Wrappers-only changes - -Run `dvp build --dev` to build your plugin and then upload it to a Delphix Engine to test. - -The wrappers are built with the plugin. `dvp build` has a hidden `--dev` flag. This builds `common`, `libs`, and `platform` locally and bundles them with the plugin. A special configuration entry is needed in your dvp config file which is located at `~/.dvp/config`: +To install the latest version of the SDK run: ``` -[dev] -vsdk_root = /path/to/vsdk_repo_root +$ pip install dvp ``` -### Protobuf changes - -Protobuf changes are needed by both the plugin and the app-gate. Once the protobuf message changes have been made: - -1. Build the plugin with `dvp build --dev` -2. Build the JAR and update the app-gate to pull it in (instructions below) - -### Versioning -The first thing to do is to change the version number. There are _two_ places where the version needs to be changed unfortunately. For almost all cases, this will simply involve incrementing the "build number", which is the three-digit number at the very end of the version. - -1. In the root of the SDK codebase, open the `build.gradle` file, and change the version. -2. In `tools/src/main/python/dlpx/virtualization/_internal/settings.cfg` change the `package_version` property. - -The two versions should be identical. - -This repository is going through a transition in which Gradle will eventually be removed. The version string will eventually only live in the individual packages themselves. - -#### Using Local SDK Build (For Eclipse use) - -Eclipse does not use gradle to build, so you have to follow special steps if you're using Eclipse. Update this line to the appropriate version: +To install a specific version of the SDK run: ``` - +$ pip install dvp== ``` -### Testing - -Currently, there are three types of SDK testing: unit, manual, and blackbox. +To upgrade an existing installation of the SDK run: -#### Unit Testing - -Running `./gradlew test` from the top level of the repository will run all SDK unit tests. Smaller sets of tests can be run from inside each directory (`common`, `platform`, etc.) by going into that directory and running `../gradlew test`. These can also be run through your IDE. - -#### Testing sdk-gate changes with app-gate code - -There are two pieces here. First, if there are any protobuf changes they need to be pulled in by the app-gate. Second, at the moment blackbox refers to a property file in the app-gate to determine the version of the SDK to install for tests so this property always needs to updated for automated testing. - -NOTE: The app-gate does not pull in the wrappers or CLI from this repository. - -The easiest way to do both of these is: - -1. Update the version of the SDK to something unique and clearly a development build. The standard is `x.y.z-internal-abc-`. For example, `1.1.0-internal-001-grant`. -2. Run `./gradlew publishDebug` from the root of this repository. -3. In `appliance/gradle.properties` in the app-gate update `virtualizationSdkVer` and `virtualizationApiVer` to both match the SDK version. - -Run an appliance-update for manual testing and/or kick off automated blackbox tests. - - -## SDK Review and Provisional app-gate review - -Once you're finished with local development and testing, you can publish your final SDK review to reviewboard. - -In addition, it's customary to publish a "provisional" appgate review, so that people can get insight into how the out-for-review SDK changes will actually be used by the appgate. Of course, this review will contain all your temporary local-build changes mentioned above. So, in your review, you'll want to mention that these temporary changes will be reverted before the review is finalized. - -## Pushing and Deploying SDK Code - - -### Publishing - -There are two Gradle tasks that do publishing: `publishDebug` and `publishProd`. They differ in two ways: - -1. They publish the Python distributions to separate repositories on Artifactory (the jar is always published to the same one.). `publishDebug` uploads to `dvp-local-pypi`. This is a special repository that has been setup to test the SDK. It falls back our our production PyPI repository, but artifacts uploaded to `dvp-local-pypi` do not impact production artifacts. This should be used for testing. `publishProd` does upload the Python distributions to our production Artifactory PyPI repository, `delphix-local`. - -2. `publishProd` runs tests, formatting, and linting while `publishDebug` does not. - -NOTE: The external release to `pypi.org` is done outside of the build system. - -#### Setup - -1. There are three environment variables that need to be set in order to publish: `ARTIFACTORY_PYPI_USER`, `ARTIFACTORY_PYPI_PASS`, and `ARTIFACTORY_JAR_KEY`. - - `ARTIFACTORY_PYPI_USER` and `ARTIFACTORY_PYPI_PASS` are one set of credentials used to upload the Python distributions to our internal PyPI repositories. The credentials are the same for both internal PyPI repositories mentioned above. - `ARTIFACTORY_JAR_KEY` +``` +$ pip install dvp --upgrade +``` - - `ARTIFACTORY_PYPI_USER` and `ARTIFACTORY_PYPI_PASS` is the username/password combo given to you by whoever setup your Artifactory pypi account. This is an account separate from your Artifactory account. If you do not have one, please reach out to the `#artifactory` channel and request a `dvp-uploaders-python` account. See for directions on how to add the account. These are used to upload the Python distributions to our internal PyPI repositories. The credentials are the same for both internal PyPI repositories mentioned above. - - `ARTIFACTORY_JAR_KEY` is your Artifactory API key and is used to upload the jar. It can be retreived from http://artifactory.delphix.com/artifactory/webapp/#/profile. You may have to login. This is different from the PyPI credentials because the artifacts are being uploaded to different repositories on Artifactory. +## Reporting a bug -2. `twine` needs to be installed. This is a Python package that is used to upload Python distributions. If it's not installed, install it by running `pip install twine`. +If you run into a problem, please search the [existing issues](https://github.com/delphix/virtualization-sdk/issues) first to ensure the issue hasn't been reported before. Open a new issue only if you haven't found anything similar to your issue. -#### Final Publishing +If the issue is not being tracked, please file an issue using the `Bug Report` issue template [here](https://github.com/delphix/virtualization-sdk/issues/new/choose). -Once you are absolutely certain all changes have been made run `./gradlew publishProd`. This will run checks, build the jar, create the Python distributions, and upload all of them to Artifactory with the Python distributions going to `delphix-local`. +## Requesting a feature -## Using Newly-Deployed SDK Build +To request a feature, file a GitHub issue on this repository using the `Feature Request` issue template [here](https://github.com/delphix/virtualization-sdk/issues/new/choose). -Now, we have to go back to our `appgate` code and make it point to the newly-deployed build on artifactory, instead of the local build we used to test. +## Contributing -1. Modify `appliance/gradle.properties` and change `virtualizationSdkVer` and `virtualizationApiVer` to refer to your new version number. -2. Modify `ivy-eclipse-deps.xml` and change the `com.delphix.virtualization` line to refer to your new version number. +Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on the process for submitting pull requests to us. -## Finalizing Appgate Review +## License -Once you've got the above changes completed, tested, and checked into git, you can update your appgate review. Now, your review will be ready for final ship-its. +This project is licensed under the Apache 2.0 License - see the [LICENSE](LICENSE) file for details. \ No newline at end of file diff --git a/bin/build.sh b/bin/build.sh deleted file mode 100755 index 57af5ecd..00000000 --- a/bin/build.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# -# Copyright (c) 2018, 2019 by Delphix. All rights reserved. -# - -SCRIPT_DIR=`dirname $0` -source ${SCRIPT_DIR}/common.sh - -# This script must be executed from the root directory of the virtualization-sdk repo. -ROOT=`git rev-parse --show-toplevel` -cd $ROOT - -mkdir -p build/libs -cd build/libs - -echo "Preparing Virtualization SDK jar directory..." -JAR_DIRECTORY=virtualization-sdk-jar -mkdir -p ${JAR_DIRECTORY} - -echo "Copying Virtualization SDK binaries from NAS..." -scp -r delphix@support-tools:/nas/engineering/fdrozdowski/virtualization-sdk/bin/protoc-3.6.1-osx-x86_64 . -scp delphix@support-tools:/nas/engineering/fdrozdowski/virtualization-sdk/bin/protobuf-java-3.6.1.jar . - -mkdir -p dlpx/virtualization -cp ${ROOT}/common/src/main/proto/dlpx/virtualization/common.proto dlpx/virtualization/common.proto -cp ${ROOT}/platform/src/main/proto/dlpx/virtualization/platform.proto dlpx/virtualization/platform.proto -cp ${ROOT}/libs/src/main/proto/dlpx/virtualization/libs.proto dlpx/virtualization/libs.proto - -echo "Compiling protobuf definitions to Java and Python classes..." -protoc-3.6.1-osx-x86_64/bin/protoc -I=. --java_out=${JAR_DIRECTORY} dlpx/virtualization/common.proto dlpx/virtualization/platform.proto dlpx/virtualization/libs.proto - -echo "Compiling Java source files to Java classes..." -find ${JAR_DIRECTORY} -name "*.java" > sources.txt -javac -classpath protobuf-java-3.6.1.jar @sources.txt - -VERSION=`cat "${ROOT}/build.gradle" | grep '^\s*version\s*=\s*"*"'| sed -E 's/.*"(.*)".*/\1/g'` -[ -z "$VERSION" ] && die "Failed to retrieve SDK version from build.gradle." - -echo "Creating a Virtualization SDK jar..." -JAR_FILE_NAME="api-java-$VERSION.jar" -jar cvf ${JAR_FILE_NAME} -C ${JAR_DIRECTORY} . > /dev/null - -exit 0 diff --git a/bin/upload.sh b/bin/upload.sh index 46b90f95..a6bc7dfb 100755 --- a/bin/upload.sh +++ b/bin/upload.sh @@ -3,7 +3,7 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -# This script uploads the SDK jar as well as all Python distributions created by the SDK. It assumes that all artifacts +# This script uploads all Python distributions created by the SDK. It assumes that all artifacts # already exist. This is not intended to be a long term solution. Instead, this is a dirty way to abstract away # some of this logic. It should instead be rolled directly into the Gradle build and our future CI pipeline. # @@ -19,8 +19,8 @@ if [[ $# -gt 1 ]]; then die $USAGE elif [[ $# -eq 1 && $1 != "--prod" ]]; then die $USAGE -elif [[ -z ${ARTIFACTORY_PYPI_USER} || -z ${ARTIFACTORY_PYPI_PASS} || -z ${ARTIFACTORY_JAR_KEY} ]]; then - die "ARTIFACTORY_PYPI_USER, ARTIFACTORY_PYPI_PASS, and/or ARTIFACTORY_JAR_KEY environment variables are not set. Set them or pass them in as arguments to upload.sh." +elif [[ -z ${ARTIFACTORY_PYPI_USER} || -z ${ARTIFACTORY_PYPI_PASS} ]]; then + die "ARTIFACTORY_PYPI_USER and/or ARTIFACTORY_PYPI_PASS environment variables are not set. Set them or pass them in as arguments to upload.sh." fi # dvp-local-pypi is used for testing and is the default. delphix-local is our internal production PyPI repository and @@ -42,14 +42,6 @@ ROOT=`git rev-parse --show-toplevel` VERSION=`cat "${ROOT}/build.gradle" | grep '^\s*version\s*=\s*"*"'| sed -E 's/.*"(.*)".*/\1/g'` [ -z "$VERSION" ] && die "Failed to retrieve SDK version from build.gradle." -echo "Uploading custom build jar..." -RESPONSE=`curl --silent --write-out "%{http_code}" -H "X-JFrog-Art-Api: ${ARTIFACTORY_JAR_KEY}" -T "${ROOT}/build/libs/api-java-${VERSION}.jar" "http://artifactory.delphix.com/artifactory/virtualization-sdk/com/delphix/virtualization/platform/api-java/${VERSION}/api-java-${VERSION}.jar"` - -# The above 'curl' command writes out "${http_code}" so the last three characters of the output will be the HTTP -# response code. If that response code is not "201", it is a failure so die and then print the response. ${REPONSE%????} -# prints $REPONSE without the last 4 characters which are the HTTP exit code and an 'n'. -[ ${RESPONSE: -3} -ne "201" ] && die "Failed to upload ${ROOT}/build/libs/api-java-${VERSION}.jar to artifactory:\n" ${RESPONSE%????} - echo "Uploading 'common' Python distribution..." twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/common/build/python-dist/*${VERSION}.tar.gz" > /dev/null twine upload --repository-url ${REPO} -u ${ARTIFACTORY_PYPI_USER} -p ${ARTIFACTORY_PYPI_PASS} "${ROOT}/common/build/python-dist/*${VERSION//-/_}*.whl" > /dev/null diff --git a/build.gradle b/build.gradle index a8988cc3..12e1c6b0 100644 --- a/build.gradle +++ b/build.gradle @@ -3,12 +3,16 @@ */ plugins { - id "com.google.protobuf" version "0.8.7" apply false id "delphix.python" version "0.0.7" apply false } subprojects { - version = "1.1.0-internal-upgrade-008" + version = "1.1.0-internal-upgrade-009" + /* + * dvp-api is a Python package that contains Python protobuf classes generated based on the Virtualization API. + * dvpApiVersion is the version of the Virtualization API that we want this version of the SDK to be built against. + */ + project.ext.dvpApiVersion = "1.1.0-master-003" } def binDir = "${rootProject.projectDir}/bin" @@ -18,14 +22,6 @@ def binDir = "${rootProject.projectDir}/bin" * are only intended to help abstract away this logic until we roll these into the actual build system. */ -/* - * The jar that is shipped with the Delphix Engine to support the Virtualization Platform is created by a shell script. - * This task wraps that shell script. - */ -task jar(type: Exec) { - commandLine "${binDir}/build.sh" -} - /* * This task wraps a shell script that checks if the version of the SDK has been bumped. Bumping the SDK version is * currently a manual step that is easy to forget. @@ -57,7 +53,7 @@ task build { } /* - * This task publishes the final version of the jar and all Python distributions to our internal production PyPI + * This task publishes the final version of all the Python distributions to our internal production PyPI * repository. This should be executed immediately before a change is pushed. * * NOTE: This DOES NOT publish to pypi.org. The external release process is done outside of the build system. @@ -78,7 +74,6 @@ task publishProd(type: Exec) { */ task publishDebug(type: Exec) { dependsOn 'checkVersionBump' - dependsOn 'jar' dependsOn 'buildPython' executable "${binDir}/upload.sh" diff --git a/common/Pipfile.lock b/common/Pipfile.lock index 2c3630c6..94bd022f 100644 --- a/common/Pipfile.lock +++ b/common/Pipfile.lock @@ -1,19 +1,27 @@ { "_meta": { "hash": { - "sha256": "6ad18e02013aa5f2d09b0aa3e376942da5f67ff5f0fb8b79b867bf032d066de5" + "sha256": "be793303ac161292c49bcac8c09e54f117048b6e1389404d82885ff7ce62eae3" }, "pipfile-spec": 6, "requires": {}, "sources": [ { "name": "delphix", - "url": "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/", + "url": "https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/", "verifySsl": true } ] }, "default": { + "dvp-api": { + "hashes": [ + "sha256:df435f1bb843703d1a93346781ebb1ae0b6f61e4722d90399f6b0a641ad73c5f", + "sha256:eff849f3681e0c429a339a96f2861c8923606058b2941e1103d20e842f39d377" + ], + "index": "delphix", + "version": "==1.1.0-master-003" + }, "protobuf": { "hashes": [ "sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4", @@ -37,10 +45,10 @@ }, "six": { "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" + "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", + "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" ], - "version": "==1.12.0" + "version": "==1.13.0" } }, "develop": { @@ -53,10 +61,26 @@ }, "attrs": { "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" ], - "version": "==19.1.0" + "version": "==19.3.0" + }, + "configparser": { + "hashes": [ + "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", + "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" + ], + "markers": "python_version < '3'", + "version": "==4.0.2" + }, + "contextlib2": { + "hashes": [ + "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", + "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" + ], + "markers": "python_version < '3'", + "version": "==0.6.0.post1" }, "funcsigs": { "hashes": [ @@ -66,6 +90,14 @@ "markers": "python_version < '3.0'", "version": "==1.0.2" }, + "importlib-metadata": { + "hashes": [ + "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45", + "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f" + ], + "markers": "python_version < '3.8'", + "version": "==1.3.0" + }, "more-itertools": { "hashes": [ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", @@ -75,20 +107,27 @@ "markers": "python_version <= '2.7'", "version": "==5.0.0" }, + "packaging": { + "hashes": [ + "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", + "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" + ], + "version": "==19.2" + }, "pathlib2": { "hashes": [ - "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", - "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7" + "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", + "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", - "version": "==2.3.3" + "markers": "python_version < '3'", + "version": "==2.3.5" }, "pluggy": { "hashes": [ - "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", - "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746" + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "version": "==0.9.0" + "version": "==0.13.1" }, "py": { "hashes": [ @@ -97,12 +136,20 @@ ], "version": "==1.8.0" }, + "pyparsing": { + "hashes": [ + "sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f", + "sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a" + ], + "version": "==2.4.5" + }, "pytest": { "hashes": [ - "sha256:592eaa2c33fae68c7d75aacf042efc9f77b27c08a6224a4f59beab8d9a420523", - "sha256:ad3ad5c450284819ecde191a654c09b0ec72257a2c711b9633d677c71c9850c4" + "sha256:65e92898fb5b61d0a1d7319c3e6dcf97e599e331cfdc2b27f20c0d87ece19239", + "sha256:9ea149066f566c943d3122f4b1cf1b577cab73189d11f490b54703fa5fa9df50" ], - "version": "==4.3.1" + "index": "delphix", + "version": "==4.6.7" }, "scandir": { "hashes": [ @@ -123,10 +170,24 @@ }, "six": { "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" + "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", + "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + ], + "version": "==1.13.0" + }, + "wcwidth": { + "hashes": [ + "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", + "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" + ], + "version": "==0.1.7" + }, + "zipp": { + "hashes": [ + "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", + "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" ], - "version": "==1.12.0" + "version": "==0.6.0" } } } diff --git a/common/build.gradle b/common/build.gradle index faace22b..963eceab 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -4,38 +4,9 @@ plugins { id "java" - id "com.google.protobuf" id "delphix.python" } -repositories { - mavenCentral() -} - -dependencies { - // Necessary to compile generated java protocol buffer libraries. - compile 'com.google.protobuf:protobuf-java:3.6.1' -} - -protobuf { - - protoc { - artifact = 'com.google.protobuf:protoc:3.6.1' - } - - // This activates other protoc language targets. - // https://github.com/google/protobuf-gradle-plugin#default-outputs - generateProtoTasks { - all().each { task -> - task.builtins { - python { - } - } - } - } - generatedFilesBaseDir = "$projectDir/src" -} - artifacts { python sdist.distFile } @@ -43,7 +14,7 @@ artifacts { dlpxPython { sources { delphix { - url = "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/" + url = "https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/" } } @@ -52,8 +23,9 @@ dlpxPython { } packages { - protobuf { - version = "==3.6.1" + + "dvp-api" { + version = "== $project.ext.dvpApiVersion" } } @@ -65,15 +37,6 @@ dlpxPython { } } -/* - * This is a temporary task. 'src/main/java' only contains compiled protobuf classes. Sometimes these get out of date - * and they need to be cleaned up manually. In the long term, everything should probably be built under 'build' so - * the 'clean' task automatically deletes them. In the short term though, this task cleans them up. - */ -task removeProtobufJava(type: Delete) { - delete "${projectDir}/src/main/java" -} - /* * As part of running a packages 'setup.py' file, setuptools creates 'egg-info' directories that contain information * about the build distribution. These can sometimes cause issues. We should probably build Python distributions in @@ -91,10 +54,9 @@ task wheel(type: SetupPyTask) { dependsOn makeSetupPy } -clean.dependsOn('removeProtobufJava') clean.dependsOn('removeEggInfo') project.afterEvaluate { - tasks["sdist"].dependsOn tasks["generateProto"] - tasks["test_python2.7"].dependsOn tasks["generateProto"] + tasks["sdist"] + tasks["test_python2.7"] } diff --git a/common/lock.dev-requirements.txt b/common/lock.dev-requirements.txt index bc763e37..b23a23a0 100644 --- a/common/lock.dev-requirements.txt +++ b/common/lock.dev-requirements.txt @@ -1,11 +1,18 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ +-i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ atomicwrites==1.3.0 -attrs==19.1.0 +attrs==19.3.0 +configparser==4.0.2 ; python_version < '3' +contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.0' +importlib-metadata==1.3.0 ; python_version < '3.8' more-itertools==5.0.0 ; python_version <= '2.7' -pathlib2==2.3.3 ; python_version < '3.6' -pluggy==0.9.0 +packaging==19.2 +pathlib2==2.3.5 ; python_version < '3' +pluggy==0.13.1 py==1.8.0 -pytest==4.3.1 +pyparsing==2.4.5 +pytest==4.6.7 scandir==1.10.0 ; python_version < '3.5' -six==1.12.0 +six==1.13.0 +wcwidth==0.1.7 +zipp==0.6.0 diff --git a/common/lock.requirements.txt b/common/lock.requirements.txt index f6490f34..6dc57a3f 100644 --- a/common/lock.requirements.txt +++ b/common/lock.requirements.txt @@ -1,3 +1,4 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ +-i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ +dvp-api==1.1.0-master-003 protobuf==3.6.1 -six==1.12.0 +six==1.13.0 diff --git a/common/src/main/proto/dlpx/virtualization/common.proto b/common/src/main/proto/dlpx/virtualization/common.proto deleted file mode 100644 index e1575044..00000000 --- a/common/src/main/proto/dlpx/virtualization/common.proto +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2018, 2019 by Delphix. All rights reserved. - */ - -syntax = "proto3"; - -option java_multiple_files = true; - -package com.delphix.virtualization.common; - -message RemoteConnection { - RemoteEnvironment environment = 1; - RemoteUser user = 2; -} - -message RemoteEnvironment { - string name = 1; - string reference = 2; - RemoteHost host = 3; -} - -message RemoteHost { - string name = 1; - string reference = 2; - string binary_path = 3; - string scratch_path = 4; -} - -message RemoteUser { - string name = 1; - string reference = 2; -} - -message LinkedSource { - string guid = 1; - PluginDefinedObject parameters = 2; -} - -message DirectSource { - RemoteConnection connection = 1; - LinkedSource linked_source = 2; -} - -// Stuff that always mounts the entire ZFS filesystem -message SingleEntireMount { - RemoteEnvironment remote_environment = 1; - string mount_path = 2; - string shared_path = 3; -} - -// Stuff that can optionally mount only a subset of the ZFS filesystem -message SingleSubsetMount { - RemoteEnvironment remote_environment = 1; - string mount_path = 2; - string shared_path = 3; // not supported on Windows -} - -message StagedSource { - LinkedSource linked_source = 1; - RemoteConnection source_connection = 2; - SingleEntireMount staged_mount = 3; - RemoteConnection staged_connection = 4; -} - -message VirtualSource { - string guid = 1; - RemoteConnection connection = 2; - repeated SingleSubsetMount mounts = 3; - PluginDefinedObject parameters = 4; -} - -message SourceConfig { - string name = 1; - PluginDefinedObject parameters = 2; -} - -message Repository { - string name = 1; - PluginDefinedObject parameters = 2; -} - -message Snapshot { - PluginDefinedObject parameters = 1; -} - -message SnapshotParameters { - PluginDefinedObject parameters = 1; -} - -/* - * A PluginDefinedObject is a serialized JSON representation of an object that conforms to a schema provided by the - * plugin. Typically, this will be deserialized to a language-specific object for use by the plugin. For example, - * deserialization of snapshot metadata for a Python plugin would use the Python - * generated.definitions.SnapshotDefinition class. - */ -message PluginDefinedObject { - string json = 1; -} - -message OwnershipSpec { - int32 uid = 1; - int32 gid = 2; -} diff --git a/common/src/main/python/dlpx/virtualization/common/_common_classes.py b/common/src/main/python/dlpx/virtualization/common/_common_classes.py index 2cc9622f..c5d71fe3 100644 --- a/common/src/main/python/dlpx/virtualization/common/_common_classes.py +++ b/common/src/main/python/dlpx/virtualization/common/_common_classes.py @@ -2,7 +2,7 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # -from dlpx.virtualization import common_pb2 +from dlpx.virtualization.api import common_pb2 from dlpx.virtualization.common.exceptions import IncorrectTypeError """Classes used for Plugin Operations diff --git a/common/src/test/java/NotUsed.java b/common/src/test/java/NotUsed.java deleted file mode 100644 index 6608cff8..00000000 --- a/common/src/test/java/NotUsed.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -/** - * Gradle will fail when running the test task if there are not classes in the - * Java test jar. This class is simply here to prevent that from happening. - * If a test is introduced in the future this file will be deleted. - */ -public class NotUsed { -} diff --git a/common/src/test/python/dlpx/virtualization/common/test_common_classes.py b/common/src/test/python/dlpx/virtualization/common/test_common_classes.py index 4e2a2862..d0ede0f7 100644 --- a/common/src/test/python/dlpx/virtualization/common/test_common_classes.py +++ b/common/src/test/python/dlpx/virtualization/common/test_common_classes.py @@ -3,7 +3,7 @@ # import pytest -from dlpx.virtualization import common_pb2 +from dlpx.virtualization.api import common_pb2 from dlpx.virtualization.common._common_classes import (RemoteConnection, RemoteEnvironment, RemoteHost, RemoteUser) from dlpx.virtualization.common.exceptions import IncorrectTypeError @@ -63,7 +63,7 @@ def test_remote_connection_from_proto_fail(): RemoteConnection.from_proto('') assert err_info.value.message == ( "RemoteConnection's parameter 'connection' was" - " type 'str' but should be of class 'dlpx.virtualization" + " type 'str' but should be of class 'dlpx.virtualization.api" ".common_pb2.RemoteConnection'.") @@ -115,7 +115,7 @@ def test_remote_environment_from_proto_fail(): RemoteEnvironment.from_proto('') assert err_info.value.message == ( "RemoteEnvironment's parameter 'environment' was" - " type 'str' but should be of class 'dlpx.virtualization" + " type 'str' but should be of class 'dlpx.virtualization.api" ".common_pb2.RemoteEnvironment'.") @@ -177,7 +177,7 @@ def test_remote_host_from_proto_fail(): RemoteHost.from_proto('') assert err_info.value.message == ( "RemoteHost's parameter 'host' was" - " type 'str' but should be of class 'dlpx.virtualization" + " type 'str' but should be of class 'dlpx.virtualization.api" ".common_pb2.RemoteHost'.") @@ -220,5 +220,5 @@ def test_remote_user_from_proto_fail(): RemoteUser.from_proto('') assert err_info.value.message == ( "RemoteUser's parameter 'user' was" - " type 'str' but should be of class 'dlpx.virtualization" + " type 'str' but should be of class 'dlpx.virtualization.api" ".common_pb2.RemoteUser'.") diff --git a/common/src/test/python/dlpx/virtualization/test_common_generated.py b/common/src/test/python/dlpx/virtualization/test_common_generated.py deleted file mode 100644 index b7264066..00000000 --- a/common/src/test/python/dlpx/virtualization/test_common_generated.py +++ /dev/null @@ -1,10 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -from google.protobuf import message - - -def test_import_common(): - from dlpx.virtualization import common_pb2 - assert issubclass(common_pb2.Repository, message.Message) diff --git a/dvp/build.gradle b/dvp/build.gradle index 6810040e..71588982 100644 --- a/dvp/build.gradle +++ b/dvp/build.gradle @@ -10,14 +10,6 @@ artifacts { python sdist.distFile } -dependencies { - // Necessary to compile generated python protocol buffer libraries. - python project(path: ":common", configuration: "python") - python project(path: ":platform", configuration: "python") - python project(path: ":libs", configuration: "python") - python project(path: ":tools", configuration: "python") -} - dlpxPython { sources { delphix { diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index f0b768a3..070708a7 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,22 +1,30 @@ { "_meta": { "hash": { - "sha256": "36907c9dc35ef41d53048ab38a5a5c05e03f64caa33795342572cc04eaf4eb28" + "sha256": "88a004aeaaed350e8fda61fa79aa62d7fb1dea765db1ec02d5d04603e65f0ebb" }, "pipfile-spec": 6, "requires": {}, "sources": [ { "name": "delphix", - "url": "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/", + "url": "https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/", "verifySsl": true } ] }, "default": { + "dvp-api": { + "hashes": [ + "sha256:df435f1bb843703d1a93346781ebb1ae0b6f61e4722d90399f6b0a641ad73c5f", + "sha256:eff849f3681e0c429a339a96f2861c8923606058b2941e1103d20e842f39d377" + ], + "index": "delphix", + "version": "==1.1.0-master-003" + }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz", - "version": "== 1.1.0-internal-upgrade-008" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz", + "version": "== 1.1.0-internal-upgrade-009" }, "protobuf": { "hashes": [ @@ -37,7 +45,6 @@ "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" ], - "index": "delphix", "version": "==3.6.1" }, "six": { @@ -114,10 +121,10 @@ }, "packaging": { "hashes": [ - "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", - "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" + "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73", + "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334" ], - "version": "==20.0" + "version": "==20.1" }, "pathlib2": { "hashes": [ diff --git a/libs/build.gradle b/libs/build.gradle index b4a205a1..30eed9ac 100644 --- a/libs/build.gradle +++ b/libs/build.gradle @@ -4,40 +4,9 @@ plugins { id "java" - id "com.google.protobuf" id "delphix.python" } -repositories { - mavenCentral() -} - -dependencies { - // Necessary to compile generated java protocol buffer libraries. - compile project(":common") - - // Necessary to compile generated python protocol buffer libraries. - python project(path: ":common", configuration: "python") -} - -protobuf { - protoc { - artifact = "com.google.protobuf:protoc:3.6.1" - } - - // This activates other protoc language targets. - // https://github.com/google/protobuf-gradle-plugin#default-outputs - generateProtoTasks { - all().each { task -> - task.builtins { - python { - } - } - } - } - generatedFilesBaseDir = "$projectDir/src" -} - artifacts { python sdist.distFile } @@ -45,7 +14,7 @@ artifacts { dlpxPython { sources { delphix { - url = "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/" + url = "https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/" } } @@ -54,8 +23,9 @@ dlpxPython { } packages { - protobuf { - version = "==3.6.1" + + "dvp-api" { + version = "== $project.ext.dvpApiVersion" } "dvp-common" { @@ -76,15 +46,6 @@ dlpxPython { } } -/* - * This is a temporary task. 'src/main/java' only contains compiled protobuf classes. Sometimes these get out of date - * and they need to be cleaned up manually. In the long term, everything should probably be built under 'build' so - * the 'clean' task automatically deletes them. In the short term though, this task cleans them up. - */ -task removeProtobufJava(type: Delete) { - delete "${projectDir}/src/main/java" -} - /* * As part of running a packages 'setup.py' file, setuptools creates 'egg-info' directories that contain information * about the build distribution. These can sometimes cause issues. We should probably build Python distributions in @@ -102,13 +63,12 @@ task wheel(type: SetupPyTask) { dependsOn makeSetupPy } -clean.dependsOn('removeProtobufJava') clean.dependsOn('removeEggInfo') // sdist and the python tests depend on the generated python libraries // from the protoc plugin. Must manually specify as plugins are not aware // of each other. project.afterEvaluate { - tasks["sdist"].dependsOn tasks["generateProto"] - tasks["test_python2.7"].dependsOn tasks["generateProto"] + tasks["sdist"] + tasks["test_python2.7"] } diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 23715d75..48fbcad1 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -1,4 +1,4 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ +-i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' @@ -7,7 +7,7 @@ funcsigs==1.0.2 ; python_version < '3.0' importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' -packaging==20.0 +packaging==20.1 pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.1 py==1.8.1 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 0aa55070..bef5995f 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,4 +1,5 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz +-i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz +dvp-api==1.1.0-master-003 protobuf==3.6.1 six==1.14.0 diff --git a/libs/src/main/proto/dlpx/virtualization/libs.proto b/libs/src/main/proto/dlpx/virtualization/libs.proto deleted file mode 100644 index 5dfabcf4..00000000 --- a/libs/src/main/proto/dlpx/virtualization/libs.proto +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2018, 2019 by Delphix. All rights reserved. - */ - -syntax = "proto3"; - -import "dlpx/virtualization/common.proto"; - -option java_multiple_files = true; - -package com.delphix.virtualization.libs; - - -message ActionableLibraryError { - int32 id = 1; - string message = 2; -} - -message NonActionableLibraryError { -} - -message LibraryErrorResult { - oneof error { - ActionableLibraryError actionable_error = 1; - NonActionableLibraryError non_actionable_error = 2; - } -} - -message RunBashRequest { - com.delphix.virtualization.common.RemoteConnection remote_connection = 1; - string command = 2; - map variables = 3; - bool use_login_shell = 4; -} - -message RunBashResult { - int32 exit_code = 1; - string stdout = 2; - string stderr = 3; -} - -message RunBashResponse { - oneof result { - RunBashResult return_value = 1; - LibraryErrorResult error = 2; - } -} - -message RunPowerShellRequest { - com.delphix.virtualization.common.RemoteConnection remote_connection = 1; - string command = 2; - map variables = 3; -} - -message RunPowerShellResult { - int32 exit_code = 1; - string stdout = 2; - string stderr = 3; -} - -message RunPowerShellResponse { - oneof result { - RunPowerShellResult return_value = 1; - LibraryErrorResult error = 2; - } -} - -message RunSyncRequest { - com.delphix.virtualization.common.RemoteConnection remote_connection = 1; - string source_directory = 2; - string rsync_user = 3; - repeated string exclude_paths = 4; - repeated string sym_links_to_follow = 5; - -} - -message RunSyncResult { } - -message RunSyncResponse { - oneof result { - RunSyncResult return_value = 1; - LibraryErrorResult error = 2; - } -} - -message RunExpectRequest { - com.delphix.virtualization.common.RemoteConnection remote_connection = 1; - string command = 2; - map variables = 3; -} - -message RunExpectResult { - int32 exit_code = 1; - string stdout = 2; - string stderr = 3; -} - -message RunExpectResponse { - oneof result { - RunExpectResult return_value = 1; - LibraryErrorResult error = 2; - } -} - -message LogRequest { - string message = 1; - enum LogLevel { - DEBUG = 0; - INFO = 1; - ERROR = 2; - } - LogLevel level = 2; -} - -message LogResult { } - -message LogResponse { - oneof result { - LogResult return_value = 1; - LibraryErrorResult error = 2; - } -} diff --git a/libs/src/main/python/dlpx/virtualization/libs/libs.py b/libs/src/main/python/dlpx/virtualization/libs/libs.py index c8f042a2..2e0fb4bd 100644 --- a/libs/src/main/python/dlpx/virtualization/libs/libs.py +++ b/libs/src/main/python/dlpx/virtualization/libs/libs.py @@ -26,7 +26,7 @@ import sys -from dlpx.virtualization import libs_pb2 +from dlpx.virtualization.api import libs_pb2 from dlpx.virtualization.libs.exceptions import (IncorrectArgumentTypeError, LibraryError, PluginScriptError) diff --git a/libs/src/test/java/NotUsed.java b/libs/src/test/java/NotUsed.java deleted file mode 100644 index 6608cff8..00000000 --- a/libs/src/test/java/NotUsed.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -/** - * Gradle will fail when running the test task if there are not classes in the - * Java test jar. This class is simply here to prevent that from happening. - * If a test is introduced in the future this file will be deleted. - */ -public class NotUsed { -} diff --git a/libs/src/test/python/dlpx/virtualization/test_delphix_libs_generated.py b/libs/src/test/python/dlpx/virtualization/test_delphix_libs_generated.py index 00f726fa..b1575da1 100644 --- a/libs/src/test/python/dlpx/virtualization/test_delphix_libs_generated.py +++ b/libs/src/test/python/dlpx/virtualization/test_delphix_libs_generated.py @@ -6,10 +6,10 @@ def test_import_common(): - from dlpx.virtualization import common_pb2 + from dlpx.virtualization.api import common_pb2 assert issubclass(common_pb2.Repository, message.Message) def test_import_libs(): - from dlpx.virtualization import libs_pb2 + from dlpx.virtualization.api import libs_pb2 assert issubclass(libs_pb2.RunSyncRequest, message.Message) diff --git a/libs/src/test/python/dlpx/virtualization/test_libs.py b/libs/src/test/python/dlpx/virtualization/test_libs.py index 10dd8dd6..83364932 100644 --- a/libs/src/test/python/dlpx/virtualization/test_libs.py +++ b/libs/src/test/python/dlpx/virtualization/test_libs.py @@ -5,7 +5,7 @@ import mock import pytest -from dlpx.virtualization import libs_pb2 +from dlpx.virtualization.api import libs_pb2 from dlpx.virtualization import libs from dlpx.virtualization.libs.exceptions import ( IncorrectArgumentTypeError, LibraryError, PluginScriptError) diff --git a/libs/src/test/python/dlpx/virtualization/test_logging.py b/libs/src/test/python/dlpx/virtualization/test_logging.py index 88e51608..022cffa8 100644 --- a/libs/src/test/python/dlpx/virtualization/test_logging.py +++ b/libs/src/test/python/dlpx/virtualization/test_logging.py @@ -7,9 +7,9 @@ import pytest from dlpx.virtualization.libs import PlatformHandler -from dlpx.virtualization.libs_pb2 import LogRequest -from dlpx.virtualization.libs_pb2 import LogResult -from dlpx.virtualization.libs_pb2 import LogResponse +from dlpx.virtualization.api.libs_pb2 import LogRequest +from dlpx.virtualization.api.libs_pb2 import LogResult +from dlpx.virtualization.api.libs_pb2 import LogResponse class TestPythonHandler: diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 581b13f9..7e5267cd 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,22 +1,30 @@ { "_meta": { "hash": { - "sha256": "9f3db23b7533c52560570d11d7ad0c200856a00c14d6a968233ed6d4238269f8" + "sha256": "a67caeec42ad44ac52845a5102dc30573211e22d3cc0b209f809da101ddac803" }, "pipfile-spec": 6, "requires": {}, "sources": [ { "name": "delphix", - "url": "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/", + "url": "https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/", "verifySsl": true } ] }, "default": { + "dvp-api": { + "hashes": [ + "sha256:df435f1bb843703d1a93346781ebb1ae0b6f61e4722d90399f6b0a641ad73c5f", + "sha256:eff849f3681e0c429a339a96f2861c8923606058b2941e1103d20e842f39d377" + ], + "index": "delphix", + "version": "==1.1.0-master-003" + }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz", - "version": "== 1.1.0-internal-upgrade-008" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz", + "version": "== 1.1.0-internal-upgrade-009" }, "enum34": { "hashes": [ @@ -48,7 +56,6 @@ "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" ], - "index": "delphix", "version": "==3.6.1" }, "six": { @@ -125,10 +132,10 @@ }, "packaging": { "hashes": [ - "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", - "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" + "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73", + "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334" ], - "version": "==20.0" + "version": "==20.1" }, "pathlib2": { "hashes": [ diff --git a/platform/build.gradle b/platform/build.gradle index 988066d1..06d20368 100644 --- a/platform/build.gradle +++ b/platform/build.gradle @@ -4,41 +4,9 @@ plugins { id "java" - id "com.google.protobuf" id "delphix.python" } -repositories { - mavenCentral() -} - -dependencies { - // Necessary to compile generated java protocol buffer libraries. - compile project(":common") - - // Necessary to compile generated python protocol buffer libraries. - python project(path: ":common", configuration: "python") -} - -protobuf { - - protoc { - artifact = "com.google.protobuf:protoc:3.6.1" - } - - // This activates other protoc language targets. - // https://github.com/google/protobuf-gradle-plugin#default-outputs - generateProtoTasks { - all().each { task -> - task.builtins { - python { - } - } - } - } - generatedFilesBaseDir = "$projectDir/src" -} - artifacts { python sdist.distFile } @@ -46,7 +14,7 @@ artifacts { dlpxPython { sources { delphix { - url = "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/" + url = "https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/" } } @@ -55,17 +23,18 @@ dlpxPython { } packages { - protobuf { - version = "==3.6.1" - } - enum34 { - markers = "python_version < '3.4'" + "dvp-api" { + version = "== $project.ext.dvpApiVersion" } "dvp-common" { version = "== $project.version" path = file(tasks.getByPath(":common:sdist").getDistFile().toString()) } + + enum34 { + markers = "python_version < '3.4'" + } } devPackages { @@ -79,15 +48,6 @@ dlpxPython { } } -/* - * This is a temporary task. 'src/main/java' only contains compiled protobuf classes. Sometimes these get out of date - * and they need to be cleaned up manually. In the long term, everything should probably be built under 'build' so - * the 'clean' task automatically deletes them. In the short term though, this task cleans them up. - */ -task removeProtobufJava(type: Delete) { - delete "${projectDir}/src/main/java" -} - /* * As part of running a packages 'setup.py' file, setuptools creates 'egg-info' directories that contain information * about the build distribution. These can sometimes cause issues. We should probably build Python distributions in @@ -105,13 +65,9 @@ task wheel(type: SetupPyTask) { dependsOn makeSetupPy } -clean.dependsOn('removeProtobufJava') clean.dependsOn('removeEggInfo') -// sdist and the python tests depend on the generated python libraries -// from the protoc plugin. Must manually specify as plugins are not aware -// of each other. project.afterEvaluate { - tasks["sdist"].dependsOn tasks["generateProto"] - tasks["test_python2.7"].dependsOn tasks["generateProto"] + tasks["sdist"] + tasks["test_python2.7"] } diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index 23715d75..48fbcad1 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -1,4 +1,4 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ +-i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' @@ -7,7 +7,7 @@ funcsigs==1.0.2 ; python_version < '3.0' importlib-metadata==1.4.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' -packaging==20.0 +packaging==20.1 pathlib2==2.3.5 ; python_version < '3.6' pluggy==0.13.1 py==1.8.1 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 33cf4c0a..0ea4b9a9 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,6 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz +-i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz +dvp-api==1.1.0-master-003 enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 six==1.14.0 diff --git a/platform/src/main/proto/dlpx/virtualization/platform.proto b/platform/src/main/proto/dlpx/virtualization/platform.proto deleted file mode 100644 index 8c51fdc2..00000000 --- a/platform/src/main/proto/dlpx/virtualization/platform.proto +++ /dev/null @@ -1,471 +0,0 @@ -/* - * Copyright (c) 2018, 2019 by Delphix. All rights reserved. - */ - -syntax = "proto3"; - -import "dlpx/virtualization/common.proto"; - -option java_multiple_files = true; - -package com.delphix.virtualization.platform; - -/* - * NOTE: - * Currently, there are some operations that do not expect a return value - - * e.g - pre-snapshot (direct and staged), start staging, stop staging etc. - * So this file contains empty result types for all such operations as a - * placeholder. Defining a valid return type as a placeholder and still return - * empty message helps if something needs to be added as part of the result in - * future. - */ - -/* ERROR REPORTING */ - -/* - * This indicates that a generic problem has been detected. - * These include syntax errors and any other kind of errors. - */ -message GenericPluginError { - string message = 1; - string type = 2; - string call_stack = 3; -} - -/* - * This indicates that a plugin elected not to handle an error raised by a - * library call. - * - error_id can be used to look up the original exception from the callback - worker. - * - call_stack gives the location in Python code whence the erroring library - call was executed. - */ -message UnhandledLibraryError { - uint32 error_id = 1; - string message = 2; - string call_stack = 3; -} - -/* - * This indicates that there was a Plugin runtime error for example if the - * implemented operation returns incorrect typed object back. - */ -message PluginRuntimeError { - string message = 1; - string call_stack = 2; -} - -/* - * This indicates that one of our generated classes raised an error. For - * example, such an error will happen if the schema defines a string property, - * but the plugin code tries to set it to an integer. - */ -message GeneratedClassesError { - string message = 1; - string call_stack = 2; -} - -/* - * This indicates that there was a UserError raised by plugin author. - */ -message UserError { - string message = 1; - string action = 2; - string output = 3; -} - -/* - * More error types will get inserted here as more specific errors are created. - * When a new error is added, they should be put to the end of the list, - * incrementing the field tag by one. The number should never be modified to - * guarantee backwards compatibility. - */ -message PluginErrorResult { - oneof error { - GenericPluginError generic_plugin_error = 1; - UnhandledLibraryError unhandled_library_error = 2; - PluginRuntimeError plugin_runtime_error = 3; - GeneratedClassesError generated_classes_error = 4; - UserError user_error = 5; - } -} - -/* DISCOVERY */ - -message RepositoryDiscoveryRequest { - com.delphix.virtualization.common.RemoteConnection source_connection = 1; -} - -message RepositoryDiscoveryResult { - repeated com.delphix.virtualization.common.Repository repositories = 1; -} - -message RepositoryDiscoveryResponse { - oneof result { - RepositoryDiscoveryResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message SourceConfigDiscoveryRequest { - com.delphix.virtualization.common.RemoteConnection source_connection = 1; - com.delphix.virtualization.common.Repository repository = 2; -} - -message SourceConfigDiscoveryResult { - repeated com.delphix.virtualization.common.SourceConfig source_configs = 1; -} - -message SourceConfigDiscoveryResponse { - oneof result { - SourceConfigDiscoveryResult return_value = 1; - PluginErrorResult error = 2; - } -} - -/* DIRECT LINKING */ - -message DirectPreSnapshotRequest { - com.delphix.virtualization.common.DirectSource direct_source = 1; - com.delphix.virtualization.common.Repository repository = 2; - com.delphix.virtualization.common.SourceConfig source_config = 3; -} - -message DirectPreSnapshotResult { } - -message DirectPreSnapshotResponse { - oneof result { - DirectPreSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message DirectPostSnapshotRequest { - com.delphix.virtualization.common.DirectSource direct_source = 1; - com.delphix.virtualization.common.Repository repository = 2; - com.delphix.virtualization.common.SourceConfig source_config = 3; -} - -message DirectPostSnapshotResult { - com.delphix.virtualization.common.Snapshot snapshot = 1; -} - -message DirectPostSnapshotResponse { - oneof result { - DirectPostSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -/* STAGED LINKING */ - -message StagedPreSnapshotRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; - com.delphix.virtualization.common.SnapshotParameters snapshot_parameters = 4; -} - -message StagedPreSnapshotResult { } - -message StagedPreSnapshotResponse { - oneof result { - StagedPreSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StagedPostSnapshotRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; - com.delphix.virtualization.common.SnapshotParameters snapshot_parameters = 4; -} - -message StagedPostSnapshotResult { - com.delphix.virtualization.common.Snapshot snapshot = 1; -} - -message StagedPostSnapshotResponse { - oneof result { - StagedPostSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StartStagingRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; -} - -message StartStagingResult { } - -message StartStagingResponse { - oneof result { - StartStagingResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StopStagingRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; -} - -message StopStagingResult { } - -message StopStagingResponse { - oneof result { - StopStagingResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StagedStatusRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; -} - -message StagedStatusResult { - enum Status { - ACTIVE = 0; - INACTIVE = 1; - } - Status status = 1; -} - -message StagedStatusResponse { - oneof result { - StagedStatusResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StagedWorkerRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.StagedSource staged_source = 3; -} - -message StagedWorkerResult { } - -message StagedWorkerResponse { - oneof result { - StagedWorkerResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StagedMountSpecRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.StagedSource staged_source = 2; -} - -message StagedMountSpecResult { - com.delphix.virtualization.common.SingleEntireMount staged_mount = 1; - com.delphix.virtualization.common.OwnershipSpec ownership_spec = 2; -} - -message StagedMountSpecResponse { - oneof result { - StagedMountSpecResult return_value = 1; - PluginErrorResult error = 2; - } -} - -/* PROVISIONING */ - -message ConfigureRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.Snapshot snapshot = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message ConfigureResult { - com.delphix.virtualization.common.SourceConfig source_config = 1; -} - -message ConfigureResponse { - oneof result { - ConfigureResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message UnconfigureRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; - bool deleteFlag = 4; -} - -message UnconfigureResult { } - -message UnconfigureResponse { - oneof result { - UnconfigureResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message ReconfigureRequest { - com.delphix.virtualization.common.Snapshot snapshot = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; - com.delphix.virtualization.common.Repository repository = 4; -} - -message ReconfigureResult { - com.delphix.virtualization.common.SourceConfig source_config = 1; -} - -message ReconfigureResponse { - oneof result { - ReconfigureResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StartRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message StartResult { } - -message StartResponse { - oneof result { - StartResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message StopRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message StopResult { } - -message StopResponse { - oneof result { - StopResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message VirtualPreSnapshotRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message VirtualPreSnapshotResult { } - -message VirtualPreSnapshotResponse { - oneof result { - VirtualPreSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message VirtualPostSnapshotRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message VirtualPostSnapshotResult { - com.delphix.virtualization.common.Snapshot snapshot = 1; -} - -message VirtualPostSnapshotResponse { - oneof result { - VirtualPostSnapshotResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message VirtualStatusRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message VirtualStatusResult { - enum Status { - ACTIVE = 0; - INACTIVE = 1; - } - Status status = 1; -} - -message VirtualStatusResponse { - oneof result { - VirtualStatusResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message InitializeRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.SourceConfig source_config = 2; - com.delphix.virtualization.common.VirtualSource virtual_source = 3; -} - -message InitializeResult { } - -message InitializeResponse { - oneof result { - InitializeResult return_value = 1; - PluginErrorResult error = 2; - } -} - -message VirtualMountSpecRequest { - com.delphix.virtualization.common.Repository repository = 1; - com.delphix.virtualization.common.VirtualSource virtual_source = 2; -} - -message VirtualMountSpecResult { - com.delphix.virtualization.common.OwnershipSpec ownership_spec = 1; - repeated com.delphix.virtualization.common.SingleSubsetMount mounts = 2; -} - -message VirtualMountSpecResponse { - oneof result { - VirtualMountSpecResult return_value = 1; - PluginErrorResult error = 2; - } -} - -/* UPGRADE */ - -message UpgradeRequest { - enum Type { - SOURCECONFIG = 0; - REPOSITORY = 1; - LINKEDSOURCE = 2; - VIRTUALSOURCE = 3; - SNAPSHOT = 4; - } - map pre_upgrade_parameters = 1; - Type type = 2; - repeated string migration_ids = 3; -} - -message UpgradeResult { - map post_upgrade_parameters = 1; -} - -message UpgradeResponse { - oneof result { - UpgradeResult return_value = 1; - PluginErrorResult error = 2; - } -} diff --git a/platform/src/main/python/dlpx/virtualization/platform/__init__.py b/platform/src/main/python/dlpx/virtualization/platform/__init__.py index 866d0169..fe3b144f 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/__init__.py +++ b/platform/src/main/python/dlpx/virtualization/platform/__init__.py @@ -13,5 +13,6 @@ from dlpx.virtualization.platform._upgrade import * from dlpx.virtualization.platform._virtual import * from dlpx.virtualization.platform._plugin import * +from dlpx.virtualization.platform.util import * from dlpx.virtualization.platform.import_util import * from dlpx.virtualization.platform.import_validations import * diff --git a/platform/src/main/python/dlpx/virtualization/platform/_discovery.py b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py index 06f72348..0bcbd0f6 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_discovery.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py @@ -9,8 +9,8 @@ """ import json from dlpx.virtualization.common import RemoteConnection -from dlpx.virtualization import common_pb2 -from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.api import common_pb2 +from dlpx.virtualization.api import platform_pb2 from dlpx.virtualization.platform import validation_util as v from dlpx.virtualization.platform.operation import Operation as Op from dlpx.virtualization.platform.exceptions import ( diff --git a/platform/src/main/python/dlpx/virtualization/platform/_linked.py b/platform/src/main/python/dlpx/virtualization/platform/_linked.py index 01e16296..e06094cc 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_linked.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_linked.py @@ -9,8 +9,8 @@ """ import json from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment -from dlpx.virtualization import common_pb2 -from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.api import common_pb2 +from dlpx.virtualization.api import platform_pb2 from dlpx.virtualization.common.exceptions import PluginRuntimeError from dlpx.virtualization.platform import Status from dlpx.virtualization.platform import DirectSource diff --git a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py index 390bf121..a538fc24 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py @@ -14,7 +14,7 @@ will be the function that was implemented. """ import logging -from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.api import platform_pb2 from dlpx.virtualization.platform import MigrationIdSet from dlpx.virtualization.platform import validation_util as v from dlpx.virtualization.platform.operation import Operation as Op diff --git a/platform/src/main/python/dlpx/virtualization/platform/_virtual.py b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py index dab3bec9..76976c2a 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_virtual.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py @@ -9,8 +9,8 @@ """ import json from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment -from dlpx.virtualization import common_pb2 -from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.api import common_pb2 +from dlpx.virtualization.api import platform_pb2 from dlpx.virtualization.platform import VirtualSource from dlpx.virtualization.platform import Status from dlpx.virtualization.platform import Mount diff --git a/platform/src/main/python/dlpx/virtualization/platform/util.py b/platform/src/main/python/dlpx/virtualization/platform/util.py new file mode 100644 index 00000000..5e0a15c3 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/util.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +import dlpx.virtualization.api + +def get_virtualization_api_version(): + """Returns the Virutalization API version string. + + :return: version string + """ + return dlpx.virtualization.api.__version__ \ No newline at end of file diff --git a/platform/src/test/java/NotUsed.java b/platform/src/test/java/NotUsed.java deleted file mode 100644 index 6608cff8..00000000 --- a/platform/src/test/java/NotUsed.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2019 by Delphix. All rights reserved. - */ - -/** - * Gradle will fail when running the test task if there are not classes in the - * Java test jar. This class is simply here to prevent that from happening. - * If a test is introduced in the future this file will be deleted. - */ -public class NotUsed { -} diff --git a/platform/src/test/python/dlpx/virtualization/test_delphix_platform_generated.py b/platform/src/test/python/dlpx/virtualization/test_delphix_platform_generated.py index b7264066..dae4325a 100644 --- a/platform/src/test/python/dlpx/virtualization/test_delphix_platform_generated.py +++ b/platform/src/test/python/dlpx/virtualization/test_delphix_platform_generated.py @@ -6,5 +6,5 @@ def test_import_common(): - from dlpx.virtualization import common_pb2 + from dlpx.virtualization.api import common_pb2 assert issubclass(common_pb2.Repository, message.Message) diff --git a/platform/src/test/python/dlpx/virtualization/test_plugin.py b/platform/src/test/python/dlpx/virtualization/test_plugin.py index 91fe0469..4f5203ce 100755 --- a/platform/src/test/python/dlpx/virtualization/test_plugin.py +++ b/platform/src/test/python/dlpx/virtualization/test_plugin.py @@ -3,9 +3,10 @@ # import pytest -from dlpx.virtualization import platform_pb2 +import sys +from dlpx.virtualization.api import platform_pb2 from dlpx.virtualization.common import (RemoteConnection, RemoteEnvironment, RemoteHost, RemoteUser) -from dlpx.virtualization import common_pb2 +from dlpx.virtualization.api import common_pb2 from dlpx.virtualization.platform.exceptions import ( IncorrectReturnTypeError, OperationAlreadyDefinedError, PluginRuntimeError) diff --git a/platform/src/test/python/dlpx/virtualization/test_upgrade.py b/platform/src/test/python/dlpx/virtualization/test_upgrade.py index edaba017..1ede1e5e 100755 --- a/platform/src/test/python/dlpx/virtualization/test_upgrade.py +++ b/platform/src/test/python/dlpx/virtualization/test_upgrade.py @@ -4,7 +4,7 @@ import pytest import logging -from dlpx.virtualization import platform_pb2 +from dlpx.virtualization.api import platform_pb2 from dlpx.virtualization.platform.exceptions import ( DecoratorNotFunctionError, MigrationIdAlreadyUsedError) from dlpx.virtualization.platform.operation import Operation as Op diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index fb65f349..a4c27915 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,14 +1,14 @@ { "_meta": { "hash": { - "sha256": "afaf41071782a6af61cacb8ff605fba417d37cd93cc79d17726ecb0388430f99" + "sha256": "7a9d640c544b1aa44fbb7e746b9d5cf81e56206b6458c69b69ca83d9bf525c45" }, "pipfile-spec": 6, "requires": {}, "sources": [ { "name": "delphix", - "url": "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/", + "url": "https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/", "verifySsl": true } ] @@ -67,6 +67,10 @@ "markers": "python_version < '3'", "version": "==0.6.0.post1" }, + "dvp-platform": { + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-009.tar.gz", + "version": "== 1.1.0-internal-upgrade-009" + }, "entrypoints": { "hashes": [ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", @@ -188,28 +192,6 @@ "markers": "python_version < '3'", "version": "==2.3.5" }, - "protobuf": { - "hashes": [ - "sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4", - "sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811", - "sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444", - "sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96", - "sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2", - "sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef", - "sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e", - "sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995", - "sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed", - "sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9", - "sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90", - "sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19", - "sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625", - "sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9", - "sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e", - "sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10" - ], - "index": "delphix", - "version": "==3.6.1" - }, "pycodestyle": { "hashes": [ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", @@ -380,16 +362,12 @@ "version": "==5.0.3" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz", - "version": "== 1.1.0-internal-upgrade-008" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz", + "version": "== 1.1.0-internal-upgrade-009" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-008.tar.gz", - "version": "== 1.1.0-internal-upgrade-008" - }, - "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-008.tar.gz", - "version": "== 1.1.0-internal-upgrade-008" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-009.tar.gz", + "version": "== 1.1.0-internal-upgrade-009" }, "entrypoints": { "hashes": [ @@ -422,7 +400,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "functools32": { @@ -489,10 +467,10 @@ }, "packaging": { "hashes": [ - "sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb", - "sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8" + "sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73", + "sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334" ], - "version": "==20.0" + "version": "==20.1" }, "pathlib2": { "hashes": [ diff --git a/tools/build.gradle b/tools/build.gradle index 6b0172b5..da460a24 100644 --- a/tools/build.gradle +++ b/tools/build.gradle @@ -140,7 +140,7 @@ artifacts { dlpxPython { sources { delphix { - url = "https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/" + url = "https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/" } } @@ -157,12 +157,13 @@ dlpxPython { version = "== 0.2.3" } - flake8 { - version = ">= 3.6" + "dvp-platform" { + version = "== $project.version" + path = file(tasks.getByPath(":platform:sdist").getDistFile().toString()) } - protobuf { - version = "== 3.6.1" + flake8 { + version = ">= 3.6" } pyyaml { @@ -228,11 +229,6 @@ dlpxPython { path = file(tasks.getByPath(":common:sdist").getDistFile().toString()) } - "dvp-platform" { - version = "== $project.version" - path = file(tasks.getByPath(":platform:sdist").getDistFile().toString()) - } - "dvp-libs" { version = "== $project.version" path = file(tasks.getByPath(":libs:sdist").getDistFile().toString()) diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index 6f607495..4f5eeefe 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,7 +1,6 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-008.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-008.tar.gz -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-008.tar.gz +-i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-009.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' @@ -11,7 +10,7 @@ coverage==5.0.3 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -funcsigs==1.0.2 ; python_version < '3.0' +funcsigs==1.0.2 ; python_version < '3.3' functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 @@ -20,7 +19,7 @@ isort==4.3.21 mccabe==0.6.1 mock==3.0.5 more-itertools==5.0.0 -packaging==20.0 +packaging==20.1 pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.1 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 17e5ab15..0916ee1c 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,4 +1,5 @@ --i https://artifactory.delphix.com/artifactory/api/pypi/delphix-virtual-pypi/simple/ +-i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-009.tar.gz attrs==19.3.0 certifi==2019.11.28 chardet==3.0.4 @@ -18,7 +19,6 @@ markupsafe==1.1.1 mccabe==0.6.1 more-itertools==5.0.0 pathlib2==2.3.5 ; python_version < '3' -protobuf==3.6.1 pycodestyle==2.5.0 pyflakes==2.1.1 pyrsistent==0.15.7 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/package_util.py b/tools/src/main/python/dlpx/virtualization/_internal/package_util.py index 318e19f5..a45e23ef 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/package_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/package_util.py @@ -5,8 +5,10 @@ import functools import logging import os +import re from dlpx.virtualization import _internal as virtualization_internal +from dlpx.virtualization.platform import util from six.moves import configparser logger = logging.getLogger(__name__) @@ -47,9 +49,22 @@ def get_version(): return _get_settings().get('General', 'package_version') +def get_external_version_string(version_string): + """Returns the external version string given an external or internal + (development) version. An external version string contains only digits and + dots, and follows the following format: "1.1.0". The internal version + string might include the development build suffix of the following format: + "1.0.0-internal-001". + + :param version_string: version string in either internal or external format + :return: version string in external format + """ + return re.search(r'([0-9]\.[0-9]\.[0-9])', version_string).group(0) + + @_run_once def get_virtualization_api_version(): - return _get_settings().get('General', 'virtualization_api_version') + return get_external_version_string(util.get_virtualization_api_version()) def get_build_api_version(): diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index b38ebfb3..0d24dd9b 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -210,7 +210,6 @@ def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): # error = exceptions.SDKToolingError(str(err)) queue.put({'sdk exception': error}) - finally: sys.path.remove(src_dir) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index a1a62b6c..fcde2397 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -11,11 +11,6 @@ # is used to install the wrappers that are bundled with the plugin. # # package_version is the version of the tools package. -# virtualization_api_version is the version of the Virtualization API -# that this version of the SDK is built against. This currently lives -# in tools for ease, but should move to platform, libs, and common -# at some point since those packages are the ones that are actually -# dependent on the Virtualization API. # # This package follows semantic versioning. # More can be read here: https://semver.org/ @@ -27,7 +22,6 @@ [General] engine_api_version = 1.10.5 package_version = 1.1.0-internal-upgrade-008 -virtualization_api_version = 1.1.0 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index fd7aed87..65215c15 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -16,7 +16,7 @@ # automatically get discovered in the test class if the figure name is used # as the input variable. The idea of fixtures is to define certain object # configs and allow them to get used in different tests but also being allowed -# to set certain parts definated in other fixtures. Read more at: +# to set certain parts defined in other fixtures. Read more at: # https://docs.pytest.org/en/latest/fixture.html # diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 09ec8b4a..67989350 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -2,6 +2,7 @@ # Copyright (c) 2019, 2020 by Delphix. All rights reserved. # +import pytest from dlpx.virtualization._internal import package_util @@ -42,3 +43,12 @@ def test_get_engine_api_version_json(): def test_get_internal_package_root(): assert package_util.get_internal_package_root().endswith( 'main/python/dlpx/virtualization/_internal') + + @staticmethod + @pytest.mark.parametrize('version_string', [ + '1.1.0', ' 1.1.0', '1.1.0-internal-001', ' 1.1.0-internal-001', + ' 1.1.0-internal-002 ', '1.1.0whatever' + ]) + def test_get_external_version_string(version_string): + assert package_util.get_external_version_string( + version_string) == '1.1.0' From bdf052e4646c7069fd99a69998f572cc0ae7a104 Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Wed, 29 Jan 2020 21:58:15 +0000 Subject: [PATCH 15/25] Update .hooksconfig --- .hooksconfig | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.hooksconfig b/.hooksconfig index 364afa5e..e5b34a31 100644 --- a/.hooksconfig +++ b/.hooksconfig @@ -9,3 +9,6 @@ [branch "master"] gate-allowed-issuetypes = 1,3,4,5,10001,10302 + + [branch "projects/plugin-upgrade"] + gate-allowed-issuetypes = 1,3,4,5,10001,10302 From 49fb696f5f9e4fe35bd23b401574c1f957f6ff1d Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Wed, 29 Jan 2020 14:51:37 -0800 Subject: [PATCH 16/25] PYT-1081 Merge SDK master to plugin-upgrade branch (follow-on, update settings.cfg) --- .../src/main/python/dlpx/virtualization/_internal/settings.cfg | 2 +- .../python/dlpx/virtualization/_internal/test_package_util.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index fcde2397..c6afd32c 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -21,7 +21,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-upgrade-008 +package_version = 1.1.0-internal-upgrade-009 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 67989350..c0072127 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -9,7 +9,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-upgrade-008' + assert package_util.get_version() == '1.1.0-internal-upgrade-009' @staticmethod def test_get_virtualization_api_version(): From 6461c5f4f22f9d2c4374ebbd7be4c7d85743269f Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Thu, 30 Jan 2020 02:19:05 +0000 Subject: [PATCH 17/25] Update .hooksconfig --- .hooksconfig | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.hooksconfig b/.hooksconfig index e5b34a31..96d8241e 100644 --- a/.hooksconfig +++ b/.hooksconfig @@ -1,7 +1,7 @@ [gate] name = virtualization-sdk shortname = sdk - slack-url = https://hooks.slack.com/services/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 + slack-url = https://hooks.slack.com/services/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 slack-color = 1AD6F5 slack-notify = virt-sdk-pushes approvers = gatekeepers-virtualization-sdk @@ -12,3 +12,4 @@ [branch "projects/plugin-upgrade"] gate-allowed-issuetypes = 1,3,4,5,10001,10302 + gate-comment-check = on From b103fb07981cb800d574ebafe216310b6643fef4 Mon Sep 17 00:00:00 2001 From: jeff ngo Date: Thu, 30 Jan 2020 16:30:15 -0800 Subject: [PATCH 18/25] PYT-1086 Fix DeprecationWarning in unit tests PYT-831 Multi-step Upgrade - Invoke upgrade migration functions from upgrade wrappers Reviewed at: http://reviews.delphix.com/r/55381/ --- build.gradle | 2 +- libs/Pipfile.lock | 20 +- libs/lock.dev-requirements.txt | 6 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 22 +- platform/lock.dev-requirements.txt | 8 +- platform/lock.requirements.txt | 2 +- .../dlpx/virtualization/platform/_upgrade.py | 35 ++- .../python/dlpx/virtualization/test_plugin.py | 243 +++++++++++++++++- tools/Pipfile.lock | 66 +++-- tools/lock.dev-requirements.txt | 14 +- tools/lock.requirements.txt | 11 +- .../_internal/test_file_util.py | 2 +- .../_internal/test_plugin_dependency_util.py | 5 +- 14 files changed, 341 insertions(+), 97 deletions(-) diff --git a/build.gradle b/build.gradle index 12e1c6b0..a2c25760 100644 --- a/build.gradle +++ b/build.gradle @@ -7,7 +7,7 @@ plugins { } subprojects { - version = "1.1.0-internal-upgrade-009" + version = "1.1.0-internal-upgrade-010" /* * dvp-api is a Python package that contains Python protobuf classes generated based on the Virtualization API. * dvpApiVersion is the version of the Virtualization API that we want this version of the SDK to be built against. diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index 070708a7..da965a0e 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "88a004aeaaed350e8fda61fa79aa62d7fb1dea765db1ec02d5d04603e65f0ebb" + "sha256": "aae77ef38e48ba8e4a9d6dea9cc14a97aff5c0bd019293687163f2cc13c3f8a7" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz", - "version": "== 1.1.0-internal-upgrade-009" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz", + "version": "== 1.1.0-internal-upgrade-010" }, "protobuf": { "hashes": [ @@ -96,11 +96,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", - "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" + "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", + "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" ], "markers": "python_version < '3.8'", - "version": "==1.4.0" + "version": "==1.5.0" }, "mock": { "hashes": [ @@ -131,7 +131,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { @@ -196,10 +196,10 @@ }, "zipp": { "hashes": [ - "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", - "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" + "sha256:15428d652e993b6ce86694c3cccf0d71aa7afdc6ef1807fa25a920e9444e0281", + "sha256:d9d2efe11d3a3fb9184da550d35bd1319dc8e30a63255927c82bb42fca1f4f7c" ], - "version": "==1.0.0" + "version": "==1.1.0" } } } diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index 48fbcad1..d6bb1c79 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -4,11 +4,11 @@ attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.0' -importlib-metadata==1.4.0 ; python_version < '3.8' +importlib-metadata==1.5.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==20.1 -pathlib2==2.3.5 ; python_version < '3.6' +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.1 pyparsing==2.4.6 @@ -16,4 +16,4 @@ pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.14.0 wcwidth==0.1.8 -zipp==1.0.0 +zipp==1.1.0 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index bef5995f..50f1fcb9 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz dvp-api==1.1.0-master-003 protobuf==3.6.1 six==1.14.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index 7e5267cd..c46d0f04 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a67caeec42ad44ac52845a5102dc30573211e22d3cc0b209f809da101ddac803" + "sha256": "2c73e08acb2f4c61ae3bb1c90a8a887d8fe961c22ad0aa1f1b99a2fa5ac6b78b" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz", - "version": "== 1.1.0-internal-upgrade-009" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz", + "version": "== 1.1.0-internal-upgrade-010" }, "enum34": { "hashes": [ @@ -102,16 +102,16 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "importlib-metadata": { "hashes": [ - "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", - "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" + "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", + "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" ], "markers": "python_version < '3.8'", - "version": "==1.4.0" + "version": "==1.5.0" }, "mock": { "hashes": [ @@ -142,7 +142,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { @@ -207,10 +207,10 @@ }, "zipp": { "hashes": [ - "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", - "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" + "sha256:15428d652e993b6ce86694c3cccf0d71aa7afdc6ef1807fa25a920e9444e0281", + "sha256:d9d2efe11d3a3fb9184da550d35bd1319dc8e30a63255927c82bb42fca1f4f7c" ], - "version": "==1.0.0" + "version": "==1.1.0" } } } diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index 48fbcad1..3072dacc 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -3,12 +3,12 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' -importlib-metadata==1.4.0 ; python_version < '3.8' +funcsigs==1.0.2 ; python_version < '3.3' +importlib-metadata==1.5.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==20.1 -pathlib2==2.3.5 ; python_version < '3.6' +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.1 pyparsing==2.4.6 @@ -16,4 +16,4 @@ pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.14.0 wcwidth==0.1.8 -zipp==1.0.0 +zipp==1.1.0 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 0ea4b9a9..31e6aabc 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz dvp-api==1.1.0-master-003 enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 diff --git a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py index a538fc24..fb5297cb 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py @@ -39,7 +39,6 @@ def __init__(self): def repository(self, migration_id): def repository_decorator(repository_impl): - std_mig_id = self.__migration_id_set.add( migration_id, repository_impl.__name__) self.repository_id_to_impl[std_mig_id] = v.check_function( @@ -94,21 +93,35 @@ def _success_upgrade_response(upgraded_dict): post_upgrade_parameters=upgraded_dict)) return upgrade_response + def __process_upgrade_request(self, request, id_to_impl): + """Iterate through all objects in the pre_upgrade_parameters map, + invoke all available migrations on each object and its metadata, + and return a map containing the updated metadata for each object. + """ + return_parameters = {} + for (object_ref, metadata) in request.pre_upgrade_parameters.items(): + current_metadata = metadata + for migration_id in request.migration_ids: + current_metadata = id_to_impl[migration_id]( + pre_upgrade_parameters=current_metadata, + type=request.type, + migration_ids=request.migration_ids + ) + return_parameters[object_ref] = current_metadata + + return self._success_upgrade_response(return_parameters) + def _internal_repository(self, request): """Upgrade repositories for plugins. """ if request.type != platform_pb2.UpgradeRequest.REPOSITORY: raise IncorrectUpgradeObjectTypeError( request.type, platform_pb2.UpgradeRequest.REPOSITORY) - # - # Then loop through each object and upgrade the object reference's - # payload through all migrations. For now we just want to print - # all object references passed in. - # + logger.debug('Upgrade repositories [{}]'.format( ', '.join(sorted(request.pre_upgrade_parameters.keys())))) - return self._success_upgrade_response(request.pre_upgrade_parameters) + return self.__process_upgrade_request(request, self.repository_id_to_impl) def _internal_source_config(self, request): """Upgrade source configs for plugins. @@ -120,7 +133,7 @@ def _internal_source_config(self, request): logger.debug('Upgrade source configs [{}]'.format( ', '.join(sorted(request.pre_upgrade_parameters.keys())))) - return self._success_upgrade_response(request.pre_upgrade_parameters) + return self.__process_upgrade_request(request, self.source_config_id_to_impl) def _internal_linked_source(self, request): """Upgrade linked source for plugins. @@ -132,7 +145,7 @@ def _internal_linked_source(self, request): logger.debug('Upgrade linked sources [{}]'.format( ', '.join(sorted(request.pre_upgrade_parameters.keys())))) - return self._success_upgrade_response(request.pre_upgrade_parameters) + return self.__process_upgrade_request(request, self.linked_source_id_to_impl) def _internal_virtual_source(self, request): """Upgrade virtual sources for plugins. @@ -144,7 +157,7 @@ def _internal_virtual_source(self, request): logger.debug('Upgrade virtual sources [{}]'.format( ', '.join(sorted(request.pre_upgrade_parameters.keys())))) - return self._success_upgrade_response(request.pre_upgrade_parameters) + return self.__process_upgrade_request(request, self.virtual_source_id_to_impl) def _internal_snapshot(self, request): """Upgrade snapshots for plugins. @@ -156,4 +169,4 @@ def _internal_snapshot(self, request): logger.debug('Upgrade snapshots [{}]'.format( ', '.join(sorted(request.pre_upgrade_parameters.keys())))) - return self._success_upgrade_response(request.pre_upgrade_parameters) + return self.__process_upgrade_request(request, self.snapshot_id_to_impl) diff --git a/platform/src/test/python/dlpx/virtualization/test_plugin.py b/platform/src/test/python/dlpx/virtualization/test_plugin.py index 4f5203ce..0433afbc 100755 --- a/platform/src/test/python/dlpx/virtualization/test_plugin.py +++ b/platform/src/test/python/dlpx/virtualization/test_plugin.py @@ -2,13 +2,12 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # +import json import pytest -import sys -from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.api import (platform_pb2, common_pb2) from dlpx.virtualization.common import (RemoteConnection, RemoteEnvironment, RemoteHost, RemoteUser) -from dlpx.virtualization.api import common_pb2 from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationAlreadyDefinedError, PluginRuntimeError) + IncorrectReturnTypeError, IncorrectUpgradeObjectTypeError, OperationAlreadyDefinedError, PluginRuntimeError) from mock import MagicMock, patch import fake_generated_definitions @@ -48,6 +47,14 @@ TEST_STAGED_SOURCE_JSON = SIMPLE_JSON.format(TEST_STAGED_SOURCE) TEST_VIRTUAL_SOURCE_JSON = SIMPLE_JSON.format(TEST_VIRTUAL_SOURCE) TEST_SNAPSHOT_PARAMS_JSON = '{"resync": false}' +TEST_PRE_UPGRADE_PARAMS = {'obj': json.dumps({'name': 'upgrade'})} +TEST_POST_MIGRATION_METADATA_1 = ( + json.dumps({'obj': {'name': 'upgrade', 'prettyName': 'prettyUpgrade'}})) +TEST_POST_MIGRATION_METADATA_2 = ( + json.dumps({'obj': {'name': 'upgrade', 'prettyName': 'prettyUpgrade', + 'metadata': 'metadata'}})) +TEST_POST_UPGRADE_PARAMS = {'obj': TEST_POST_MIGRATION_METADATA_2} +MIGRATION_IDS = ('2020.1.1', '2020.2.2') class TestPlugin: @@ -1087,3 +1094,231 @@ def staged_mount_spec_impl(staged_source, repository): message = err_info.value.message assert message == 'Shared path is not supported for linked sources.' + + @staticmethod + def test_upgrade_repository_success(my_plugin): + + @my_plugin.upgrade.repository('2020.1.1') + def upgrade_repository_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.repository('2020.2.2') + def upgrade_repository_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.REPOSITORY + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_repository(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters\ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_source_config_success(my_plugin): + + @my_plugin.upgrade.source_config('2020.1.1') + def upgrade_source_config_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.source_config('2020.2.2') + def upgrade_source_config_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.SOURCECONFIG + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_source_config(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_linked_source_success(my_plugin): + + @my_plugin.upgrade.linked_source('2020.1.1') + def upgrade_linked_source_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.linked_source('2020.2.2') + def upgrade_linked_source_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.LINKEDSOURCE + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_linked_source(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_virtual_source_success(my_plugin): + + @my_plugin.upgrade.virtual_source('2020.1.1') + def upgrade_virtual_source_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.virtual_source('2020.2.2') + def upgrade_virtual_source_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.VIRTUALSOURCE + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_virtual_source(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_snapshot_success(my_plugin): + + @my_plugin.upgrade.snapshot('2020.1.1') + def upgrade_snapshot_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.snapshot('2020.2.2') + def upgrade_snapshot_impl(pre_upgrade_parameters, + type, + migration_ids): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.SNAPSHOT + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_snapshot(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_repository_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_repository(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 1.") + + @staticmethod + def test_upgrade_source_config_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_source_config(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 0.") + + @staticmethod + def test_upgrade_linked_source_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_linked_source(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 2.") + + @staticmethod + def test_upgrade_virtual_source_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_virtual_source(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 3.") + + @staticmethod + def test_upgrade_snapshot_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SOURCECONFIG + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_snapshot(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 0 type" + " but should have had type 4.") + + @staticmethod + def test_upgrade_snapshot_fail_with_runtime_exception(my_plugin): + + @my_plugin.upgrade.snapshot('2020.1.1') + def upgrade_snapshot_impl(pre_upgrade_parameters, + type, + migration_ids): + raise RuntimeError('RuntimeError in snapshot migration') + + @my_plugin.upgrade.snapshot('2020.2.2') + def upgrade_snapshot_impl(pre_upgrade_parameters, + type, + migration_ids): + raise RuntimeError('RuntimeError in snapshot migration') + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.SNAPSHOT + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + with pytest.raises(RuntimeError): + my_plugin.upgrade._internal_snapshot(upgrade_request) diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index a4c27915..b5fa30e1 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "7a9d640c544b1aa44fbb7e746b9d5cf81e56206b6458c69b69ca83d9bf525c45" + "sha256": "fc57c7baad001587adfbd3afda90b074558a10fdd30a68af0a3d6a0800a2bd6f" }, "pipfile-spec": 6, "requires": {}, @@ -64,12 +64,12 @@ "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.4'", "version": "==0.6.0.post1" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-009.tar.gz", - "version": "== 1.1.0-internal-upgrade-009" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-010.tar.gz", + "version": "== 1.1.0-internal-upgrade-010" }, "entrypoints": { "hashes": [ @@ -114,19 +114,19 @@ }, "importlib-metadata": { "hashes": [ - "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", - "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" + "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", + "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" ], "markers": "python_version < '3.8'", - "version": "==1.4.0" + "version": "==1.5.0" }, "jinja2": { "hashes": [ - "sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f", - "sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de" + "sha256:6e7a3c2934694d59ad334c93dd1b6c96699cf24c53fdb8ec848ac6b23e685734", + "sha256:d6609ae5ec3d56212ca7d802eda654eaf2310000816ce815361041465b108be4" ], "index": "delphix", - "version": "==2.10.3" + "version": "==2.11.0" }, "jsonschema": { "hashes": [ @@ -142,13 +142,16 @@ "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", + "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", + "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", + "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", @@ -165,7 +168,9 @@ "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" + "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", + "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", + "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" ], "version": "==1.1.1" }, @@ -176,14 +181,6 @@ ], "version": "==0.6.1" }, - "more-itertools": { - "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" - ], - "version": "==5.0.0" - }, "pathlib2": { "hashes": [ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", @@ -279,10 +276,10 @@ }, "zipp": { "hashes": [ - "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", - "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" + "sha256:15428d652e993b6ce86694c3cccf0d71aa7afdc6ef1807fa25a920e9444e0281", + "sha256:d9d2efe11d3a3fb9184da550d35bd1319dc8e30a63255927c82bb42fca1f4f7c" ], - "version": "==1.0.0" + "version": "==1.1.0" } }, "develop": { @@ -321,7 +318,7 @@ "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.4'", "version": "==0.6.0.post1" }, "coverage": { @@ -362,12 +359,12 @@ "version": "==5.0.3" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz", - "version": "== 1.1.0-internal-upgrade-009" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz", + "version": "== 1.1.0-internal-upgrade-010" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-009.tar.gz", - "version": "== 1.1.0-internal-upgrade-009" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-010.tar.gz", + "version": "== 1.1.0-internal-upgrade-010" }, "entrypoints": { "hashes": [ @@ -400,7 +397,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "functools32": { @@ -428,11 +425,11 @@ }, "importlib-metadata": { "hashes": [ - "sha256:bdd9b7c397c273bcc9a11d6629a38487cd07154fa255a467bf704cd2c258e359", - "sha256:f17c015735e1a88296994c0697ecea7e11db24290941983b08c9feb30921e6d8" + "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", + "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" ], "markers": "python_version < '3.8'", - "version": "==1.4.0" + "version": "==1.5.0" }, "isort": { "hashes": [ @@ -463,6 +460,7 @@ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" ], + "markers": "python_version <= '2.7'", "version": "==5.0.0" }, "packaging": { @@ -581,10 +579,10 @@ }, "zipp": { "hashes": [ - "sha256:8dda78f06bd1674bd8720df8a50bb47b6e1233c503a4eed8e7810686bde37656", - "sha256:d38fbe01bbf7a3593a32bc35a9c4453c32bc42b98c377f9bff7e9f8da157786c" + "sha256:15428d652e993b6ce86694c3cccf0d71aa7afdc6ef1807fa25a920e9444e0281", + "sha256:d9d2efe11d3a3fb9184da550d35bd1319dc8e30a63255927c82bb42fca1f4f7c" ], - "version": "==1.0.0" + "version": "==1.1.0" } } } diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index 4f5eeefe..c0260095 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,24 +1,24 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-009.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-009.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-010.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' configparser==4.0.2 ; python_version < '3.2' -contextlib2==0.6.0.post1 ; python_version < '3' +contextlib2==0.6.0.post1 ; python_version < '3.4' coverage==5.0.3 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -funcsigs==1.0.2 ; python_version < '3.3' +funcsigs==1.0.2 ; python_version < '3.0' functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 -importlib-metadata==1.4.0 ; python_version < '3.8' +importlib-metadata==1.5.0 ; python_version < '3.8' isort==4.3.21 mccabe==0.6.1 mock==3.0.5 -more-itertools==5.0.0 +more-itertools==5.0.0 ; python_version <= '2.7' packaging==20.1 pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 @@ -33,4 +33,4 @@ six==1.14.0 typing==3.7.4.1 ; python_version < '3.5' wcwidth==0.1.8 yapf==0.28 -zipp==1.0.0 +zipp==1.1.0 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 0916ee1c..d411c801 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,23 +1,22 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-009.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-010.tar.gz attrs==19.3.0 certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 configparser==4.0.2 ; python_version < '3.2' -contextlib2==0.6.0.post1 ; python_version < '3' +contextlib2==0.6.0.post1 ; python_version < '3.4' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 functools32==3.2.3.post2 ; python_version < '3.2' idna==2.8 -importlib-metadata==1.4.0 ; python_version < '3.8' -jinja2==2.10.3 +importlib-metadata==1.5.0 ; python_version < '3.8' +jinja2==2.11.0 jsonschema==3.2.0 markupsafe==1.1.1 mccabe==0.6.1 -more-itertools==5.0.0 pathlib2==2.3.5 ; python_version < '3' pycodestyle==2.5.0 pyflakes==2.1.1 @@ -28,4 +27,4 @@ scandir==1.10.0 ; python_version < '3.5' six==1.14.0 typing==3.7.4.1 ; python_version < '3.5' urllib3==1.25.8 -zipp==1.0.0 +zipp==1.1.0 diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py index 8b530043..4de22e21 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py @@ -239,5 +239,5 @@ def test_tmpdir_with_raised_exception(): raise RuntimeError('test') except RuntimeError as e: - assert e.message == 'test' + assert str(e) == 'test' assert not os.path.exists(d) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py index 5bd894a8..fc679d52 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py @@ -134,9 +134,8 @@ def test_build_wheel_fails_with_no_setup_file(tmp_path): with pytest.raises(RuntimeError) as excinfo: pdu._build_wheel(tmp_path.as_posix()) - assert excinfo.value.message == ( - 'No setup.py file exists in directory ' - '{}'.format(tmp_path.as_posix())) + assert str(excinfo.value) == ('No setup.py file exists in directory ' + '{}'.format(tmp_path.as_posix())) @staticmethod @mock.patch.object(subprocess, 'Popen') From 1a02565d85a7dfab248aea3df6546e16805eaf60 Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Fri, 31 Jan 2020 02:58:41 +0000 Subject: [PATCH 19/25] Update .hooksconfig --- .hooksconfig | 1 + 1 file changed, 1 insertion(+) diff --git a/.hooksconfig b/.hooksconfig index 96d8241e..42afa5fc 100644 --- a/.hooksconfig +++ b/.hooksconfig @@ -13,3 +13,4 @@ [branch "projects/plugin-upgrade"] gate-allowed-issuetypes = 1,3,4,5,10001,10302 gate-comment-check = on + gate-review-check = on From 8db3703db84476b3b5415dab859217681d6511a8 Mon Sep 17 00:00:00 2001 From: jeff ngo Date: Tue, 4 Feb 2020 11:14:19 -0800 Subject: [PATCH 20/25] PYT-1098 User side hook implementation does not execute. Reviewed at: http://reviews.delphix.com/r/55634/ --- .../dlpx/virtualization/platform/_upgrade.py | 31 +++++++---- .../python/dlpx/virtualization/test_plugin.py | 55 ++++++------------- .../virtualization/_internal/settings.cfg | 2 +- .../fake_plugin/direct/successful.py | 2 +- .../fake_plugin/staged/successful.py | 2 +- .../_internal/test_package_util.py | 2 +- 6 files changed, 40 insertions(+), 54 deletions(-) diff --git a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py index fb5297cb..db13d731 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py @@ -13,6 +13,7 @@ operation of the same schema, the key will be the migration id, and the value will be the function that was implemented. """ +import json import logging from dlpx.virtualization.api import platform_pb2 from dlpx.virtualization.platform import MigrationIdSet @@ -88,9 +89,10 @@ def migration_id_list(self): @staticmethod def _success_upgrade_response(upgraded_dict): + upgrade_result = platform_pb2.UpgradeResult( + post_upgrade_parameters=upgraded_dict) upgrade_response = platform_pb2.UpgradeResponse( - return_value=platform_pb2.UpgradeResult( - post_upgrade_parameters=upgraded_dict)) + return_value=upgrade_result) return upgrade_response def __process_upgrade_request(self, request, id_to_impl): @@ -98,18 +100,23 @@ def __process_upgrade_request(self, request, id_to_impl): invoke all available migrations on each object and its metadata, and return a map containing the updated metadata for each object. """ - return_parameters = {} + post_upgrade_parameters = {} for (object_ref, metadata) in request.pre_upgrade_parameters.items(): - current_metadata = metadata + # Load the object metadata into a dictionary + current_metadata = json.loads(metadata) + # + # Loop through all migrations that were passed into the upgrade + # request. Protobuf will preserve the ordering of repeated + # elements, so we can rely on the backend to sort the migration + # ids before packing them into the request. + # for migration_id in request.migration_ids: - current_metadata = id_to_impl[migration_id]( - pre_upgrade_parameters=current_metadata, - type=request.type, - migration_ids=request.migration_ids - ) - return_parameters[object_ref] = current_metadata - - return self._success_upgrade_response(return_parameters) + # Only try to execute the function if the id exists in the map. + if migration_id in id_to_impl: + current_metadata = id_to_impl[migration_id](current_metadata) + post_upgrade_parameters[object_ref] = json.dumps(current_metadata) + + return self._success_upgrade_response(post_upgrade_parameters) def _internal_repository(self, request): """Upgrade repositories for plugins. diff --git a/platform/src/test/python/dlpx/virtualization/test_plugin.py b/platform/src/test/python/dlpx/virtualization/test_plugin.py index 0433afbc..a71fa26c 100755 --- a/platform/src/test/python/dlpx/virtualization/test_plugin.py +++ b/platform/src/test/python/dlpx/virtualization/test_plugin.py @@ -53,7 +53,10 @@ TEST_POST_MIGRATION_METADATA_2 = ( json.dumps({'obj': {'name': 'upgrade', 'prettyName': 'prettyUpgrade', 'metadata': 'metadata'}})) -TEST_POST_UPGRADE_PARAMS = {'obj': TEST_POST_MIGRATION_METADATA_2} +TEST_POST_UPGRADE_PARAMS = ( + {u'obj': '"{\\"obj\\": {\\"prettyName\\": \\"prettyUpgrade\\", ' + '\\"name\\": \\"upgrade\\", \\"metadata\\": \\"metadata\\"}}"'} +) MIGRATION_IDS = ('2020.1.1', '2020.2.2') @@ -1099,15 +1102,11 @@ def staged_mount_spec_impl(staged_source, repository): def test_upgrade_repository_success(my_plugin): @my_plugin.upgrade.repository('2020.1.1') - def upgrade_repository_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_repository(old_repository): return TEST_POST_MIGRATION_METADATA_1 @my_plugin.upgrade.repository('2020.2.2') - def upgrade_repository_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_repository(old_repository): return TEST_POST_MIGRATION_METADATA_2 upgrade_request = platform_pb2.UpgradeRequest() @@ -1128,15 +1127,11 @@ def upgrade_repository_impl(pre_upgrade_parameters, def test_upgrade_source_config_success(my_plugin): @my_plugin.upgrade.source_config('2020.1.1') - def upgrade_source_config_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_source_config(old_source_config): return TEST_POST_MIGRATION_METADATA_1 @my_plugin.upgrade.source_config('2020.2.2') - def upgrade_source_config_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_source_config(old_source_config): return TEST_POST_MIGRATION_METADATA_2 upgrade_request = platform_pb2.UpgradeRequest() @@ -1157,15 +1152,11 @@ def upgrade_source_config_impl(pre_upgrade_parameters, def test_upgrade_linked_source_success(my_plugin): @my_plugin.upgrade.linked_source('2020.1.1') - def upgrade_linked_source_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_linked_source(old_linked_source): return TEST_POST_MIGRATION_METADATA_1 @my_plugin.upgrade.linked_source('2020.2.2') - def upgrade_linked_source_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_linked_source(old_linked_source): return TEST_POST_MIGRATION_METADATA_2 upgrade_request = platform_pb2.UpgradeRequest() @@ -1186,15 +1177,11 @@ def upgrade_linked_source_impl(pre_upgrade_parameters, def test_upgrade_virtual_source_success(my_plugin): @my_plugin.upgrade.virtual_source('2020.1.1') - def upgrade_virtual_source_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_virtual_source(old_virtual_source): return TEST_POST_MIGRATION_METADATA_1 @my_plugin.upgrade.virtual_source('2020.2.2') - def upgrade_virtual_source_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_virtual_source(old_virtual_source): return TEST_POST_MIGRATION_METADATA_2 upgrade_request = platform_pb2.UpgradeRequest() @@ -1215,15 +1202,11 @@ def upgrade_virtual_source_impl(pre_upgrade_parameters, def test_upgrade_snapshot_success(my_plugin): @my_plugin.upgrade.snapshot('2020.1.1') - def upgrade_snapshot_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_snapshot(old_snapshot): return TEST_POST_MIGRATION_METADATA_1 @my_plugin.upgrade.snapshot('2020.2.2') - def upgrade_snapshot_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_snapshot(old_snapshot): return TEST_POST_MIGRATION_METADATA_2 upgrade_request = platform_pb2.UpgradeRequest() @@ -1301,18 +1284,14 @@ def test_upgrade_snapshot_incorrect_upgrade_object_type(my_plugin): " but should have had type 4.") @staticmethod - def test_upgrade_snapshot_fail_with_runtime_exception(my_plugin): + def test_upgrade_snapshot_fail_with_runtime_error(my_plugin): @my_plugin.upgrade.snapshot('2020.1.1') - def upgrade_snapshot_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_snapshot(old_snapshot): raise RuntimeError('RuntimeError in snapshot migration') @my_plugin.upgrade.snapshot('2020.2.2') - def upgrade_snapshot_impl(pre_upgrade_parameters, - type, - migration_ids): + def upgrade_snapshot(old_snapshot): raise RuntimeError('RuntimeError in snapshot migration') upgrade_request = platform_pb2.UpgradeRequest() diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index c6afd32c..88bf3a89 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -21,7 +21,7 @@ # [General] engine_api_version = 1.10.5 -package_version = 1.1.0-internal-upgrade-009 +package_version = 1.1.0-internal-upgrade-010 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py index baf0d855..010c705a 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py @@ -80,5 +80,5 @@ def repo_upgrade(old_repository): @direct.upgrade.snapshot('2019.11.30') -def repo_upgrade(old_snapshot): +def snap_upgrade(old_snapshot): return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py index 52da22cb..31ae1151 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py @@ -97,5 +97,5 @@ def repo_upgrade(old_repository): @staged.upgrade.snapshot('2019.11.30') -def repo_upgrade(old_snapshot): +def snap_upgrade(old_snapshot): return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index c0072127..c6bc23ed 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -9,7 +9,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-upgrade-009' + assert package_util.get_version() == '1.1.0-internal-upgrade-010' @staticmethod def test_get_virtualization_api_version(): From 598e9c602fb92ebde8ce8bd27c72329d8a6efccc Mon Sep 17 00:00:00 2001 From: Ravi Mukkamala Date: Wed, 5 Feb 2020 10:23:02 -0800 Subject: [PATCH 21/25] =?UTF-8?q?PYT-835=20Plugin=20Versioning=20-=20Renam?= =?UTF-8?q?e=20=E2=80=9Cversion=E2=80=9D=20to=20=E2=80=9Cexternal=20versio?= =?UTF-8?q?n=E2=80=9D=20and=20make=20optional?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Reviewed at: http://reviews.delphix.com/r/55561/ --- build.gradle | 2 +- libs/Pipfile.lock | 8 +-- libs/lock.dev-requirements.txt | 2 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 6 +- platform/lock.requirements.txt | 2 +- tools/Pipfile.lock | 22 +++--- tools/lock.dev-requirements.txt | 8 +-- tools/lock.requirements.txt | 6 +- .../_internal/commands/build.py | 9 ++- .../_internal/commands/initialize.py | 10 +-- .../_internal/plugin_importer.py | 69 +++++++++++++------ .../virtualization/_internal/settings.cfg | 4 +- .../plugin_config_schema.json | 7 +- ...plugin_config_schema_no_id_validation.json | 7 +- .../_internal/commands/test_initialize.py | 3 +- .../dlpx/virtualization/_internal/conftest.py | 14 ++-- .../_internal/test_package_util.py | 8 +-- .../_internal/test_plugin_importer.py | 22 +++--- .../_internal/test_plugin_validator.py | 6 +- 20 files changed, 122 insertions(+), 95 deletions(-) diff --git a/build.gradle b/build.gradle index a2c25760..2939446f 100644 --- a/build.gradle +++ b/build.gradle @@ -7,7 +7,7 @@ plugins { } subprojects { - version = "1.1.0-internal-upgrade-010" + version = "1.1.0-internal-upgrade-011" /* * dvp-api is a Python package that contains Python protobuf classes generated based on the Virtualization API. * dvpApiVersion is the version of the Virtualization API that we want this version of the SDK to be built against. diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index da965a0e..38776491 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "aae77ef38e48ba8e4a9d6dea9cc14a97aff5c0bd019293687163f2cc13c3f8a7" + "sha256": "07421bb3754a3fdcdbb102693d61f09797525ebc34a5f4eafbadcfd3207f33dc" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz", - "version": "== 1.1.0-internal-upgrade-010" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-011.tar.gz", + "version": "== 1.1.0-internal-upgrade-011" }, "protobuf": { "hashes": [ @@ -91,7 +91,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.0'", + "markers": "python_version < '3.3'", "version": "==1.0.2" }, "importlib-metadata": { diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index d6bb1c79..3072dacc 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -3,7 +3,7 @@ atomicwrites==1.3.0 attrs==19.3.0 configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3' -funcsigs==1.0.2 ; python_version < '3.0' +funcsigs==1.0.2 ; python_version < '3.3' importlib-metadata==1.5.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 50f1fcb9..3633ef91 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-011.tar.gz dvp-api==1.1.0-master-003 protobuf==3.6.1 six==1.14.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index c46d0f04..3865d606 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "2c73e08acb2f4c61ae3bb1c90a8a887d8fe961c22ad0aa1f1b99a2fa5ac6b78b" + "sha256": "bea146d48c0c9216a05ea0cc053145db320fad09e35fcc7a394ce352ff98f282" }, "pipfile-spec": 6, "requires": {}, @@ -23,8 +23,8 @@ "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz", - "version": "== 1.1.0-internal-upgrade-010" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-011.tar.gz", + "version": "== 1.1.0-internal-upgrade-011" }, "enum34": { "hashes": [ diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 31e6aabc..f501ac71 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-011.tar.gz dvp-api==1.1.0-master-003 enum34==1.1.6 ; python_version < '3.4' protobuf==3.6.1 diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index b5fa30e1..fe387568 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "fc57c7baad001587adfbd3afda90b074558a10fdd30a68af0a3d6a0800a2bd6f" + "sha256": "a53d1ddd678257dfdad4d2d1ada8d8790e256ac31b2f2bfa6fa79a26fb11dc42" }, "pipfile-spec": 6, "requires": {}, @@ -56,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==4.0.2" }, "contextlib2": { @@ -68,8 +68,8 @@ "version": "==0.6.0.post1" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-010.tar.gz", - "version": "== 1.1.0-internal-upgrade-010" + "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-011.tar.gz", + "version": "== 1.1.0-internal-upgrade-011" }, "entrypoints": { "hashes": [ @@ -102,7 +102,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "idna": { @@ -310,7 +310,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==4.0.2" }, "contextlib2": { @@ -359,12 +359,12 @@ "version": "==5.0.3" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz", - "version": "== 1.1.0-internal-upgrade-010" + "path": "../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-011.tar.gz", + "version": "== 1.1.0-internal-upgrade-011" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-010.tar.gz", - "version": "== 1.1.0-internal-upgrade-010" + "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-011.tar.gz", + "version": "== 1.1.0-internal-upgrade-011" }, "entrypoints": { "hashes": [ @@ -405,7 +405,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3.2'", + "markers": "python_version < '3'", "version": "==3.2.3.post2" }, "futures": { diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index c0260095..97ecd7d6 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,17 +1,17 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-010.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-010.tar.gz +./../common/build/python-dist/dvp-common-1.1.0-internal-upgrade-011.tar.gz +./../libs/build/python-dist/dvp-libs-1.1.0-internal-upgrade-011.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version < '3.2' +configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3.4' coverage==5.0.3 entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 funcsigs==1.0.2 ; python_version < '3.0' -functools32==3.2.3.post2 ; python_version < '3.2' +functools32==3.2.3.post2 ; python_version < '3' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 importlib-metadata==1.5.0 ; python_version < '3.8' diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index d411c801..adb17fe4 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,16 +1,16 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-010.tar.gz +./../platform/build/python-dist/dvp-platform-1.1.0-internal-upgrade-011.tar.gz attrs==19.3.0 certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version < '3.2' +configparser==4.0.2 ; python_version < '3' contextlib2==0.6.0.post1 ; python_version < '3.4' entrypoints==0.3 enum34==1.1.6 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3.2' +functools32==3.2.3.post2 ; python_version < '3' idna==2.8 importlib-metadata==1.5.0 ; python_version < '3.8' jinja2==2.11.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py index 41fc5ede..79c353a5 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py @@ -159,7 +159,7 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): # This is the output dictionary that will be written # to the upload_artifact. # - return { + artifact = { # Hard code the type to a set default. 'type': TYPE, @@ -174,8 +174,6 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): plugin_config_content['id'].lower(), 'prettyName': plugin_config_content['name'], - 'version': - plugin_config_content['version'], # set default value of locale to en-us 'defaultLocale': plugin_config_content.get('defaultLocale', LOCALE_DEFAULT), @@ -213,6 +211,11 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): manifest } + if plugin_config_content.get('externalVersion'): + artifact['externalVersion'] = plugin_config_content['externalVersion'] + + return artifact + def get_linked_source_definition_type(plugin_config_content): if 'STAGED' == plugin_config_content['pluginType'].upper(): diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py index 5bb1e955..7d6d04b5 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py @@ -23,7 +23,7 @@ DEFAULT_ENTRY_POINT_SYMBOL = 'plugin' DEFAULT_ENTRY_POINT = '{}:{}'.format(DEFAULT_ENTRY_POINT_FILE[:-3], DEFAULT_ENTRY_POINT_SYMBOL) -DEFAULT_VERSION = '0.1.0' +DEFAULT_BUILD_NUMBER = '0.1.0' # Internal constants for the template directory. ENTRY_POINT_TEMPLATE_NAME = 'entry_point.py.template' @@ -124,7 +124,7 @@ def init(root, ingestion_strategy, name, host_type): name, DEFAULT_ENTRY_POINT, DEFAULT_SRC_DIRECTORY, DEFAULT_SCHEMA_FILE, host_type, - DEFAULT_VERSION) + DEFAULT_BUILD_NUMBER) yaml.dump(config, f, default_flow_style=False) # @@ -189,7 +189,7 @@ def _get_entry_point_contents(plugin_name, ingestion_strategy, host_type): def _get_default_plugin_config(plugin_id, ingestion_strategy, name, entry_point, src_dir_path, schema_file_path, - host_type, default_version): + host_type, default_build_number): """ Returns a valid plugin configuration as an OrderedDict. @@ -211,13 +211,13 @@ def _get_default_plugin_config(plugin_id, ingestion_strategy, name, # them with '!!python/unicode' prepended to the value. config = OrderedDict([('id', plugin_id.encode('utf-8')), ('name', name.encode('utf-8')), - ('version', default_version.encode('utf-8')), ('language', 'PYTHON27'), ('hostTypes', ['UNIX']), ('pluginType', ingestion_strategy.encode('utf-8')), ('entryPoint', entry_point.encode('utf-8')), ('srcDir', src_dir_path.encode('utf-8')), ('schemaFile', schema_file_path.encode('utf-8')), ('hostTypes', [host_type.encode('utf-8')]), - ('buildNumber', default_version.encode('utf-8'))]) + ('buildNumber', default_build_number.encode('utf-8')) + ]) return config diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 0d24dd9b..9f58feb1 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -185,8 +185,51 @@ def __check_for_required_methods(self): def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): - manifest = {} + """ + Imports the plugin module, runs validations and returns the manifest. + """ + module_content = None + + try: + module_content = _import_helper(queue, src_dir, module) + except exceptions.UserError: + # + # Exception here means there was an error importing the module and + # queue is updated with the exception details inside _import_helper. + # + return + + # + # Create an instance of plugin module with associated state to pass around + # to the validation code. + # + plugin_module = import_util.PluginModule(src_dir, module, entry_point, + plugin_type, module_content, + PluginImporter.v_maps, validate) + + # Validate if the module imported fine and is the expected one. + warnings = import_util.validate_import(plugin_module) + _process_warnings(queue, warnings) + + # If the import itself had issues, no point validating further. + if warnings and len(warnings) > 0: + return + + # Run post import validations and consolidate issues. + warnings = import_util.validate_post_import(plugin_module) + _process_warnings(queue, warnings) + + manifest = _prepare_manifest(entry_point, module_content) + queue.put({'manifest': manifest}) + + +def _import_helper(queue, src_dir, module): + """Helper method to import the module and handle any import time + exceptions. + """ + module_content = None sys.path.append(src_dir) + try: module_content = importlib.import_module(module) except (ImportError, TypeError) as err: @@ -213,28 +256,10 @@ def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): finally: sys.path.remove(src_dir) - # - # Create an instance of plugin module with associated state to pass around - # to the validation code. - # - plugin_module = import_util.PluginModule(src_dir, module, entry_point, - plugin_type, module_content, - PluginImporter.v_maps, validate) + if not module_content: + raise exceptions.UserError("Plugin module content is None") - # Validate if the module imported fine and is the expected one. - warnings = import_util.validate_import(plugin_module) - _process_warnings(queue, warnings) - - # If the import itself had issues, no point validating further. - if warnings and len(warnings) > 0: - return - - # Run post import validations and consolidate issues. - warnings = import_util.validate_post_import(plugin_module) - _process_warnings(queue, warnings) - - manifest = _prepare_manifest(entry_point, module_content) - queue.put({'manifest': manifest}) + return module_content def _process_warnings(queue, warnings): diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 88bf3a89..5bd2e25d 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -20,8 +20,8 @@ # versions in those packages until they are shipped out of band. # [General] -engine_api_version = 1.10.5 -package_version = 1.1.0-internal-upgrade-010 +engine_api_version = 1.12.0 +package_version = 1.1.0-internal-upgrade-011 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json index dcd3e0da..5d49e478 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json @@ -8,9 +8,8 @@ "name": { "type": "string" }, - "version": { - "type": "string", - "pattern": "^([0-9]+)\\.([0-9]+)\\.([a-zA-Z0-9_]+)$" + "externalVersion": { + "type": "string" }, "hostTypes": { "type": "array", @@ -54,5 +53,5 @@ } }, "additionalProperties": false, - "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] + "required": ["id", "name", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] } diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json index b54ac778..703382fa 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json @@ -7,9 +7,8 @@ "name": { "type": "string" }, - "version": { - "type": "string", - "pattern": "^([0-9]+)\\.([0-9]+)\\.([a-zA-Z0-9_]+)$" + "externalVersion": { + "type": "string" }, "hostTypes": { "type": "array", @@ -53,5 +52,5 @@ } }, "additionalProperties": false, - "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] + "required": ["id", "name", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] } diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index 985765d4..5486eaa6 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -94,8 +94,7 @@ def test_init(tmpdir, ingestion_strategy, host_type, schema_template, assert config['entryPoint'] == init.DEFAULT_ENTRY_POINT assert config['srcDir'] == init.DEFAULT_SRC_DIRECTORY assert config['schemaFile'] == init.DEFAULT_SCHEMA_FILE - assert config['version'] == init.DEFAULT_VERSION - assert config['buildNumber'] == init.DEFAULT_VERSION + assert config['buildNumber'] == init.DEFAULT_BUILD_NUMBER # Validate the schema file is identical to the template. schema_file_path = os.path.join(tmpdir.strpath, config['schemaFile']) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index 65215c15..66ec17bf 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -168,7 +168,7 @@ def artifact_file_created(): @pytest.fixture -def plugin_config_content(plugin_id, plugin_name, version, language, +def plugin_config_content(plugin_id, plugin_name, external_version, language, host_types, plugin_type, entry_point, src_dir, schema_file, manual_discovery, build_number): """ @@ -186,8 +186,8 @@ def plugin_config_content(plugin_id, plugin_name, version, language, if plugin_name: config['name'] = plugin_name - if version: - config['version'] = version + if external_version: + config['externalVersion'] = external_version if language: config['language'] = language @@ -229,7 +229,7 @@ def plugin_name(): @pytest.fixture -def version(): +def external_version(): return '2.0.0' @@ -551,7 +551,7 @@ def basic_artifact_content(engine_api, virtual_source_definition, 'type': 'Plugin', 'name': '16bef554-9470-11e9-b2e3-8c8590d4a42c', 'prettyName': 'python_vfiles', - 'version': '2.0.0', + 'externalVersion': '2.0.0', 'defaultLocale': 'en-us', 'language': 'PYTHON27', 'hostTypes': ['UNIX'], @@ -598,7 +598,7 @@ def artifact_content(engine_api, virtual_source_definition, 'type': 'Plugin', 'name': '16bef554-9470-11e9-b2e3-8c8590d4a42c', 'prettyName': 'python_vfiles', - 'version': '2.0.0', + 'externalVersion': '2.0.0', 'defaultLocale': 'en-us', 'language': 'PYTHON27', 'hostTypes': ['UNIX'], @@ -636,7 +636,7 @@ def artifact_content(engine_api, virtual_source_definition, @pytest.fixture def engine_api(): - return {'type': 'APIVersion', 'major': 1, 'minor': 10, 'micro': 5} + return {'type': 'APIVersion', 'major': 1, 'minor': 12, 'micro': 0} @pytest.fixture diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index c6bc23ed..32079995 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -9,7 +9,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-upgrade-010' + assert package_util.get_version() == '1.1.0-internal-upgrade-011' @staticmethod def test_get_virtualization_api_version(): @@ -17,7 +17,7 @@ def test_get_virtualization_api_version(): @staticmethod def test_get_engine_api_version(): - assert package_util.get_engine_api_version_from_settings() == '1.10.5' + assert package_util.get_engine_api_version_from_settings() == '1.12.0' @staticmethod def test_get_build_api_version_json(): @@ -34,8 +34,8 @@ def test_get_engine_api_version_json(): engine_api_version = { 'type': 'APIVersion', 'major': 1, - 'minor': 10, - 'micro': 5 + 'minor': 12, + 'micro': 0 } assert package_util.get_engine_api_version() == engine_api_version diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py index 6949f2f5..64929bda 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py @@ -58,17 +58,17 @@ def test_plugin_module_content_none(mock_import, src_dir, plugin_type, entry_point_module, entry_point_object): mock_import.return_value = None - result = () - - with pytest.raises(exceptions.UserError) as err_info: - importer = PluginImporter(src_dir, entry_point_module, - entry_point_object, plugin_type, False) - importer.validate_plugin_module() - result = importer.result - - message = str(err_info) - assert result == () - assert 'Plugin module content is None.' in message + importer = PluginImporter(src_dir, entry_point_module, + entry_point_object, plugin_type, False) + importer.validate_plugin_module() + result = importer.result + + # + # If module_content is None, importer does not perform any validations + # and just does a return. So result should have an empty manifest and + # assert to make sure it is the case. + # + assert result.plugin_manifest == {} @staticmethod @mock.patch('importlib.import_module') diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index 35711d5e..52722bd4 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -62,8 +62,10 @@ def test_plugin_missing_field(plugin_config_file, plugin_config_content): @staticmethod @mock.patch('os.path.isabs', return_value=False) - @pytest.mark.parametrize('version,expected', - [('xxx', "'xxx' does not match"), ('1.0.0', None), + @pytest.mark.parametrize('external_version,expected', + [(1, "1 is not of type 'string'"), + (1.0, "1.0 is not of type 'string'"), + ('my_version', None), ('1.0.0', None), ('1.0.0_HF', None)]) def test_plugin_version_format(src_dir, plugin_config_file, plugin_config_content, expected): From 5b35b8f3fd81eacb6715aeefbe8ff493b0a13cf5 Mon Sep 17 00:00:00 2001 From: jeff ngo Date: Thu, 20 Feb 2020 10:51:11 -0800 Subject: [PATCH 22/25] PYT-1110 Generated Python classes do not work with unset non-required properties Reviewed at: http://reviews.delphix.com/r/55896/ --- .../codegen/templates/base_model_.mustache | 22 ++++++++++++++++ .../_internal/commands/test_templates.py | 26 +++++-------------- 2 files changed, 28 insertions(+), 20 deletions(-) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache b/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache index 73972217..80f49d0b 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache +++ b/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache @@ -39,6 +39,28 @@ class Model(object): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) + if value is None: + # Plugins use the JSON schema specification to define their + # datatypes. JSON schemas, and therefore plugin data + # definitions, distinguish between these two independent + # situations: + # - The property X exists, and has the value `null` + # - The property X does not exist + # + # Unfortunately, Swagger's generated code conflates these two + # cases together. In either case, we'll receive `None` here. + # + # We don't know of a way that we can 100% reliably know which of + # these two cases is what the plugin code intended. However, + # we expect that real-world plugin code will almost always + # intend the "does not exist" case. + # + # So, for now, we'll simply omit these properties from the dict. + # If we want to be more sophisticated in future, we could start + # analyzing the property's subschema, or we could perhaps + # customize Swagger's generated code so it can distinguish + # these two cases. + continue attr = self.attribute_map[attr] if isinstance(value, list): result[attr] = list(map( diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py index 7bea69b8..27f087b1 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py @@ -130,10 +130,7 @@ def test_success(module): assert not test_object.string_property test_dict = test_object.to_dict() - assert test_dict == { - 'requiredStringProperty': 'test string', - 'stringProperty': None - } + assert test_dict == {'requiredStringProperty': 'test string'} from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @@ -160,10 +157,7 @@ def test_unicode_success(module): assert not test_object.string_property test_dict = test_object.to_dict() - assert test_dict == { - 'requiredStringProperty': u'test\u2345\u2603', - 'stringProperty': None - } + assert test_dict == {'requiredStringProperty': u'test\u2345\u2603'} from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @@ -342,9 +336,7 @@ def test_success(module): test_dict = test_object.to_dict() assert test_dict == { 'requiredNumberProperty': 200.5, - 'numberProperty': None, - 'requiredIntegerProperty': -50, - 'integerProperty': None + 'requiredIntegerProperty': -50 } from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @@ -1269,20 +1261,17 @@ def test_successs(module): test_dict = test_object.to_dict() assert test_dict == { 'requiredStringProperty': 'A', - 'stringProperty': None, 'requiredObjectProperty': { 'TWO': 'dos', 'ONE': 'uno' }, - 'objectProperty': None, - 'requiredArrayProperty': ['DO', 'RE', 'MI'], - 'arrayProperty': None + 'requiredArrayProperty': ['DO', 'RE', 'MI'] } from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @staticmethod - def test_successs_setter(module): + def test_success_setter(module): test_object = module.TestDefinition(required_string_property='B', required_object_property={}, required_array_property=[]) @@ -1300,14 +1289,11 @@ def test_successs_setter(module): test_dict = test_object.to_dict() assert test_dict == { 'requiredStringProperty': 'A', - 'stringProperty': None, 'requiredObjectProperty': { 'TWO': 'dos', 'ONE': 'uno' }, - 'objectProperty': None, - 'requiredArrayProperty': ['DO', 'RE', 'MI'], - 'arrayProperty': None + 'requiredArrayProperty': ['DO', 'RE', 'MI'] } from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object From e5c1822dd93b6044765eec56550770c149c6c114 Mon Sep 17 00:00:00 2001 From: Filip Drozdowski Date: Mon, 2 Mar 2020 14:49:39 -0800 Subject: [PATCH 23/25] PYT-1157 Bump Virtualization SDK version to 2.0.0 Reviewed at: http://reviews.delphix.com/r/56293/ --- build.gradle | 2 +- libs/Pipfile.lock | 15 ++-- libs/lock.dev-requirements.txt | 2 +- libs/lock.requirements.txt | 2 +- platform/Pipfile.lock | 27 +++--- platform/lock.dev-requirements.txt | 4 +- platform/lock.requirements.txt | 4 +- tools/Pipfile.lock | 87 +++++++++++-------- tools/lock.dev-requirements.txt | 16 ++-- tools/lock.requirements.txt | 16 ++-- .../virtualization/_internal/settings.cfg | 2 +- .../_internal/test_package_util.py | 2 +- 12 files changed, 100 insertions(+), 79 deletions(-) diff --git a/build.gradle b/build.gradle index 463074ba..cb816d61 100644 --- a/build.gradle +++ b/build.gradle @@ -12,7 +12,7 @@ subprojects { * dvpApiVersion is the version of the Virtualization API that we want this version of the SDK to be built against. */ project.ext.dvpApiVersion = "1.1.0-master-003" - version = "1.1.0-internal-009" + version = "2.0.0-internal-001" } def binDir = "${rootProject.projectDir}/bin" diff --git a/libs/Pipfile.lock b/libs/Pipfile.lock index 7a54c2b0..b76c4dfe 100644 --- a/libs/Pipfile.lock +++ b/libs/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "96c143cc35c4ddbf14ef9338369004944fe83b92bdd506787f7e4249950c411c" + "sha256": "f0b64fa9ad09f81f67cb8acff855a0512430ef1de376a966fa030c8aeab82cf5" }, "pipfile-spec": 6, "requires": {}, @@ -19,11 +19,12 @@ "sha256:df435f1bb843703d1a93346781ebb1ae0b6f61e4722d90399f6b0a641ad73c5f", "sha256:eff849f3681e0c429a339a96f2861c8923606058b2941e1103d20e842f39d377" ], + "index": "delphix", "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-009.tar.gz", - "version": "== 1.1.0-internal-009" + "path": "../common/build/python-dist/dvp-common-2.0.0-internal-001.tar.gz", + "version": "== 2.0.0-internal-001" }, "protobuf": { "hashes": [ @@ -106,6 +107,7 @@ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" ], + "index": "delphix", "version": "==3.0.5" }, "more-itertools": { @@ -158,6 +160,7 @@ "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], + "index": "delphix", "version": "==4.6.9" }, "scandir": { @@ -193,10 +196,10 @@ }, "zipp": { "hashes": [ - "sha256:15428d652e993b6ce86694c3cccf0d71aa7afdc6ef1807fa25a920e9444e0281", - "sha256:d9d2efe11d3a3fb9184da550d35bd1319dc8e30a63255927c82bb42fca1f4f7c" + "sha256:c70410551488251b0fee67b460fb9a536af8d6f9f008ad10ac51f615b6a521b1", + "sha256:e0d9e63797e483a30d27e09fffd308c59a700d365ec34e93cc100844168bf921" ], - "version": "==1.1.0" + "version": "==1.2.0" } } } diff --git a/libs/lock.dev-requirements.txt b/libs/lock.dev-requirements.txt index ff2f2775..2e516b6f 100644 --- a/libs/lock.dev-requirements.txt +++ b/libs/lock.dev-requirements.txt @@ -16,4 +16,4 @@ pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.14.0 wcwidth==0.1.8 -zipp==1.1.0 +zipp==1.2.0 diff --git a/libs/lock.requirements.txt b/libs/lock.requirements.txt index 721f484a..3d4e49a9 100644 --- a/libs/lock.requirements.txt +++ b/libs/lock.requirements.txt @@ -1,5 +1,5 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-009.tar.gz +./../common/build/python-dist/dvp-common-2.0.0-internal-001.tar.gz dvp-api==1.1.0-master-003 protobuf==3.6.1 six==1.14.0 diff --git a/platform/Pipfile.lock b/platform/Pipfile.lock index e2eed857..719c5a21 100644 --- a/platform/Pipfile.lock +++ b/platform/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "460feeb50203ca480cdcdcb4eb5163c9b9f845d5bbf63fdb51b0938ef40c62d9" + "sha256": "28137b0ee9f526f93a9399ca61c34b87a9dd878e2440e6871409b10175cd77c7" }, "pipfile-spec": 6, "requires": {}, @@ -19,21 +19,22 @@ "sha256:df435f1bb843703d1a93346781ebb1ae0b6f61e4722d90399f6b0a641ad73c5f", "sha256:eff849f3681e0c429a339a96f2861c8923606058b2941e1103d20e842f39d377" ], + "index": "delphix", "version": "==1.1.0-master-003" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-009.tar.gz", - "version": "== 1.1.0-internal-009" + "path": "../common/build/python-dist/dvp-common-2.0.0-internal-001.tar.gz", + "version": "== 2.0.0-internal-001" }, "enum34": { "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" + "sha256:13ef9a1c478203252107f66c25b99b45b1865693ca1284aab40dafa7e1e7ac17", + "sha256:708aabfb3d5898f99674c390d360d59efdd08547019763622365f19e84a7fef4", + "sha256:98df1f1937840b7d8012fea7f0b36392a3e6fd8a2f429c48a3ff4b1aad907f3f" ], + "index": "delphix", "markers": "python_version < '3.4'", - "version": "==1.1.6" + "version": "==1.1.9" }, "protobuf": { "hashes": [ @@ -116,6 +117,7 @@ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" ], + "index": "delphix", "version": "==3.0.5" }, "more-itertools": { @@ -139,7 +141,7 @@ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db", "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868" ], - "markers": "python_version < '3.6'", + "markers": "python_version < '3'", "version": "==2.3.5" }, "pluggy": { @@ -168,6 +170,7 @@ "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], + "index": "delphix", "version": "==4.6.9" }, "scandir": { @@ -203,10 +206,10 @@ }, "zipp": { "hashes": [ - "sha256:15428d652e993b6ce86694c3cccf0d71aa7afdc6ef1807fa25a920e9444e0281", - "sha256:d9d2efe11d3a3fb9184da550d35bd1319dc8e30a63255927c82bb42fca1f4f7c" + "sha256:c70410551488251b0fee67b460fb9a536af8d6f9f008ad10ac51f615b6a521b1", + "sha256:e0d9e63797e483a30d27e09fffd308c59a700d365ec34e93cc100844168bf921" ], - "version": "==1.1.0" + "version": "==1.2.0" } } } diff --git a/platform/lock.dev-requirements.txt b/platform/lock.dev-requirements.txt index ff2f2775..bcb20b73 100644 --- a/platform/lock.dev-requirements.txt +++ b/platform/lock.dev-requirements.txt @@ -8,7 +8,7 @@ importlib-metadata==1.5.0 ; python_version < '3.8' mock==3.0.5 more-itertools==5.0.0 ; python_version <= '2.7' packaging==20.1 -pathlib2==2.3.5 ; python_version < '3.6' +pathlib2==2.3.5 ; python_version < '3' pluggy==0.13.1 py==1.8.1 pyparsing==2.4.6 @@ -16,4 +16,4 @@ pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.14.0 wcwidth==0.1.8 -zipp==1.1.0 +zipp==1.2.0 diff --git a/platform/lock.requirements.txt b/platform/lock.requirements.txt index 0757dac2..00d1d934 100644 --- a/platform/lock.requirements.txt +++ b/platform/lock.requirements.txt @@ -1,6 +1,6 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-009.tar.gz +./../common/build/python-dist/dvp-common-2.0.0-internal-001.tar.gz dvp-api==1.1.0-master-003 -enum34==1.1.6 ; python_version < '3.4' +enum34==1.1.9 ; python_version < '3.4' protobuf==3.6.1 six==1.14.0 diff --git a/tools/Pipfile.lock b/tools/Pipfile.lock index 4d5f8509..1c412ba6 100644 --- a/tools/Pipfile.lock +++ b/tools/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "118dbac5b390b788d092f96c1ea86e6ba156593b873ab47324f53fd37b890bb2" + "sha256": "e4120cdf58551cb6888ad7d4750a36d71fc634cf1c899832606b8adb3e4d0b05" }, "pipfile-spec": 6, "requires": {}, @@ -40,6 +40,7 @@ "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" ], + "index": "delphix", "version": "==7.0" }, "click-configfile": { @@ -47,6 +48,7 @@ "sha256:95beec13bee950e98f43c81dcdabef4f644091559ea66298f9dadf59351d90d1", "sha256:af2ae7123af57d850cd18edd915893e655b6b1bc30d1302fd040b1059bec073d" ], + "index": "delphix", "version": "==0.2.3" }, "configparser": { @@ -54,7 +56,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version == '2.7'", + "markers": "python_version < '3.2'", "version": "==4.0.2" }, "contextlib2": { @@ -62,12 +64,12 @@ "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.4'", "version": "==0.6.0.post1" }, "dvp-platform": { - "path": "../platform/build/python-dist/dvp-platform-1.1.0-internal-009.tar.gz", - "version": "== 1.1.0-internal-009" + "path": "../platform/build/python-dist/dvp-platform-2.0.0-internal-001.tar.gz", + "version": "== 2.0.0-internal-001" }, "entrypoints": { "hashes": [ @@ -78,19 +80,20 @@ }, "enum34": { "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" + "sha256:13ef9a1c478203252107f66c25b99b45b1865693ca1284aab40dafa7e1e7ac17", + "sha256:708aabfb3d5898f99674c390d360d59efdd08547019763622365f19e84a7fef4", + "sha256:98df1f1937840b7d8012fea7f0b36392a3e6fd8a2f429c48a3ff4b1aad907f3f" ], + "index": "delphix", "markers": null, - "version": "==1.1.6" + "version": "==1.1.9" }, "flake8": { "hashes": [ "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb", "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca" ], + "index": "delphix", "version": "==3.7.9" }, "functools32": { @@ -98,15 +101,15 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "idna": { "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" + "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", + "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" ], - "version": "==2.8" + "version": "==2.9" }, "importlib-metadata": { "hashes": [ @@ -121,6 +124,7 @@ "sha256:93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250", "sha256:b0eaf100007721b5c16c1fc1eecb87409464edc10469ddc9a22a27a99123be49" ], + "index": "delphix", "version": "==2.11.1" }, "jsonschema": { @@ -128,6 +132,7 @@ "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163", "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a" ], + "index": "delphix", "version": "==3.2.0" }, "markupsafe": { @@ -217,14 +222,16 @@ "sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994", "sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615" ], + "index": "delphix", "version": "==5.3" }, "requests": { "hashes": [ - "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", - "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31" + "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", + "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" ], - "version": "==2.22.0" + "index": "delphix", + "version": "==2.23.0" }, "scandir": { "hashes": [ @@ -268,10 +275,10 @@ }, "zipp": { "hashes": [ - "sha256:15428d652e993b6ce86694c3cccf0d71aa7afdc6ef1807fa25a920e9444e0281", - "sha256:d9d2efe11d3a3fb9184da550d35bd1319dc8e30a63255927c82bb42fca1f4f7c" + "sha256:c70410551488251b0fee67b460fb9a536af8d6f9f008ad10ac51f615b6a521b1", + "sha256:e0d9e63797e483a30d27e09fffd308c59a700d365ec34e93cc100844168bf921" ], - "version": "==1.1.0" + "version": "==1.2.0" } }, "develop": { @@ -302,7 +309,7 @@ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c", "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df" ], - "markers": "python_version == '2.7'", + "markers": "python_version < '3.2'", "version": "==4.0.2" }, "contextlib2": { @@ -310,7 +317,7 @@ "sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e", "sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.4'", "version": "==0.6.0.post1" }, "coverage": { @@ -347,15 +354,16 @@ "sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37", "sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0" ], + "index": "delphix", "version": "==5.0.3" }, "dvp-common": { - "path": "../common/build/python-dist/dvp-common-1.1.0-internal-009.tar.gz", - "version": "== 1.1.0-internal-009" + "path": "../common/build/python-dist/dvp-common-2.0.0-internal-001.tar.gz", + "version": "== 2.0.0-internal-001" }, "dvp-libs": { - "path": "../libs/build/python-dist/dvp-libs-1.1.0-internal-009.tar.gz", - "version": "== 1.1.0-internal-009" + "path": "../libs/build/python-dist/dvp-libs-2.0.0-internal-001.tar.gz", + "version": "== 2.0.0-internal-001" }, "entrypoints": { "hashes": [ @@ -366,19 +374,20 @@ }, "enum34": { "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" + "sha256:13ef9a1c478203252107f66c25b99b45b1865693ca1284aab40dafa7e1e7ac17", + "sha256:708aabfb3d5898f99674c390d360d59efdd08547019763622365f19e84a7fef4", + "sha256:98df1f1937840b7d8012fea7f0b36392a3e6fd8a2f429c48a3ff4b1aad907f3f" ], + "index": "delphix", "markers": null, - "version": "==1.1.6" + "version": "==1.1.9" }, "flake8": { "hashes": [ "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb", "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca" ], + "index": "delphix", "version": "==3.7.9" }, "funcsigs": { @@ -386,7 +395,7 @@ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "functools32": { @@ -394,7 +403,7 @@ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0", "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.2'", "version": "==3.2.3.post2" }, "futures": { @@ -409,6 +418,7 @@ "hashes": [ "sha256:66216f26b9d2c52e81808f3e674a6fb65d4bf719721394a1a9be926177e55fbe" ], + "index": "delphix", "version": "==0.9.7" }, "importlib-metadata": { @@ -424,6 +434,7 @@ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" ], + "index": "delphix", "version": "==4.3.21" }, "mccabe": { @@ -438,6 +449,7 @@ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3", "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8" ], + "index": "delphix", "version": "==3.0.5" }, "more-itertools": { @@ -504,6 +516,7 @@ "sha256:19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339", "sha256:c77a5f30a90e0ce24db9eaa14ddfd38d4afb5ea159309bdd2dae55b931bc9324" ], + "index": "delphix", "version": "==4.6.9" }, "pytest-cov": { @@ -511,6 +524,7 @@ "sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b", "sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626" ], + "index": "delphix", "version": "==2.8.1" }, "scandir": { @@ -558,14 +572,15 @@ "sha256:02ace10a00fa2e36c7ebd1df2ead91dbfbd7989686dc4ccbdc549e95d19f5780", "sha256:6f94b6a176a7c114cfa6bad86d40f259bbe0f10cf2fa7f2f4b3596fc5802a41b" ], + "index": "delphix", "version": "==0.28" }, "zipp": { "hashes": [ - "sha256:15428d652e993b6ce86694c3cccf0d71aa7afdc6ef1807fa25a920e9444e0281", - "sha256:d9d2efe11d3a3fb9184da550d35bd1319dc8e30a63255927c82bb42fca1f4f7c" + "sha256:c70410551488251b0fee67b460fb9a536af8d6f9f008ad10ac51f615b6a521b1", + "sha256:e0d9e63797e483a30d27e09fffd308c59a700d365ec34e93cc100844168bf921" ], - "version": "==1.1.0" + "version": "==1.2.0" } } } diff --git a/tools/lock.dev-requirements.txt b/tools/lock.dev-requirements.txt index 4b2ceb2d..5ac3058b 100644 --- a/tools/lock.dev-requirements.txt +++ b/tools/lock.dev-requirements.txt @@ -1,17 +1,17 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../common/build/python-dist/dvp-common-1.1.0-internal-009.tar.gz -./../libs/build/python-dist/dvp-libs-1.1.0-internal-009.tar.gz +./../common/build/python-dist/dvp-common-2.0.0-internal-001.tar.gz +./../libs/build/python-dist/dvp-libs-2.0.0-internal-001.tar.gz atomicwrites==1.3.0 attrs==19.3.0 backports.functools-lru-cache==1.6.1 ; python_version < '3.2' -configparser==4.0.2 ; python_version == '2.7' -contextlib2==0.6.0.post1 ; python_version < '3' +configparser==4.0.2 ; python_version < '3.2' +contextlib2==0.6.0.post1 ; python_version < '3.4' coverage==5.0.3 entrypoints==0.3 -enum34==1.1.6 +enum34==1.1.9 flake8==3.7.9 -funcsigs==1.0.2 ; python_version < '3.3' -functools32==3.2.3.post2 ; python_version < '3' +funcsigs==1.0.2 ; python_version < '3.0' +functools32==3.2.3.post2 ; python_version < '3.2' futures==3.3.0 ; python_version < '3.2' httpretty==0.9.7 importlib-metadata==1.5.0 ; python_version < '3.8' @@ -33,4 +33,4 @@ six==1.14.0 typing==3.7.4.1 ; python_version < '3.5' wcwidth==0.1.8 yapf==0.28 -zipp==1.1.0 +zipp==1.2.0 diff --git a/tools/lock.requirements.txt b/tools/lock.requirements.txt index 813df1a9..428dd528 100644 --- a/tools/lock.requirements.txt +++ b/tools/lock.requirements.txt @@ -1,17 +1,17 @@ -i https://artifactory.delphix.com/artifactory/api/pypi/dvp-virtual-pypi/simple/ -./../platform/build/python-dist/dvp-platform-1.1.0-internal-009.tar.gz +./../platform/build/python-dist/dvp-platform-2.0.0-internal-001.tar.gz attrs==19.3.0 certifi==2019.11.28 chardet==3.0.4 click-configfile==0.2.3 click==7.0 -configparser==4.0.2 ; python_version == '2.7' -contextlib2==0.6.0.post1 ; python_version < '3' +configparser==4.0.2 ; python_version < '3.2' +contextlib2==0.6.0.post1 ; python_version < '3.4' entrypoints==0.3 -enum34==1.1.6 +enum34==1.1.9 flake8==3.7.9 -functools32==3.2.3.post2 ; python_version < '3' -idna==2.8 +functools32==3.2.3.post2 ; python_version < '3.2' +idna==2.9 importlib-metadata==1.5.0 ; python_version < '3.8' jinja2==2.11.1 jsonschema==3.2.0 @@ -22,9 +22,9 @@ pycodestyle==2.5.0 pyflakes==2.1.1 pyrsistent==0.15.7 pyyaml==5.3 -requests==2.22.0 +requests==2.23.0 scandir==1.10.0 ; python_version < '3.5' six==1.14.0 typing==3.7.4.1 ; python_version < '3.5' urllib3==1.25.8 -zipp==1.1.0 +zipp==1.2.0 diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 23151425..81f05b59 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -21,7 +21,7 @@ # [General] engine_api_version = 1.12.0 -package_version = 1.1.0-internal-009 +package_version = 2.0.0-internal-001 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 9e99ad8a..1df04eab 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -9,7 +9,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-009' + assert package_util.get_version() == '2.0.0-internal-001' @staticmethod def test_get_virtualization_api_version(): From b4b02a2edf3d5a6ddfba9054d5fd92336554dab7 Mon Sep 17 00:00:00 2001 From: Filip Drozdowski Date: Mon, 2 Mar 2020 16:24:55 -0800 Subject: [PATCH 24/25] Bump version to 2.0.0-internal-001 --- .bumpversion.cfg | 2 +- common/src/main/python/dlpx/virtualization/common/VERSION | 2 +- dvp/src/main/python/dlpx/virtualization/VERSION | 2 +- libs/src/main/python/dlpx/virtualization/libs/VERSION | 2 +- platform/src/main/python/dlpx/virtualization/platform/VERSION | 2 +- tools/src/main/python/dlpx/virtualization/_internal/VERSION | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 89ad4961..b447cbc3 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.1.0-internal-7 +current_version = 2.0.0-internal-001 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+)\-(?P\d+))? diff --git a/common/src/main/python/dlpx/virtualization/common/VERSION b/common/src/main/python/dlpx/virtualization/common/VERSION index 04f83ef9..817c47e6 100644 --- a/common/src/main/python/dlpx/virtualization/common/VERSION +++ b/common/src/main/python/dlpx/virtualization/common/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file diff --git a/dvp/src/main/python/dlpx/virtualization/VERSION b/dvp/src/main/python/dlpx/virtualization/VERSION index 04f83ef9..817c47e6 100644 --- a/dvp/src/main/python/dlpx/virtualization/VERSION +++ b/dvp/src/main/python/dlpx/virtualization/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file diff --git a/libs/src/main/python/dlpx/virtualization/libs/VERSION b/libs/src/main/python/dlpx/virtualization/libs/VERSION index 04f83ef9..817c47e6 100644 --- a/libs/src/main/python/dlpx/virtualization/libs/VERSION +++ b/libs/src/main/python/dlpx/virtualization/libs/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/VERSION b/platform/src/main/python/dlpx/virtualization/platform/VERSION index 04f83ef9..817c47e6 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/VERSION +++ b/platform/src/main/python/dlpx/virtualization/platform/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file diff --git a/tools/src/main/python/dlpx/virtualization/_internal/VERSION b/tools/src/main/python/dlpx/virtualization/_internal/VERSION index 04f83ef9..817c47e6 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/VERSION +++ b/tools/src/main/python/dlpx/virtualization/_internal/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file From 8ca98d0db99d0e300bc25caa184752eec3e131f5 Mon Sep 17 00:00:00 2001 From: Lindsey Nguyen Date: Tue, 3 Mar 2020 18:41:32 -0800 Subject: [PATCH 25/25] PYT-1095 Bump up API version in SDK to support new backend upgrade related changes Reviewed at: http://reviews.delphix.com/r/56374/ --- .../main/python/dlpx/virtualization/_internal/settings.cfg | 2 +- .../test/python/dlpx/virtualization/_internal/conftest.py | 2 +- .../dlpx/virtualization/_internal/test_package_util.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index 81f05b59..955a239f 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -20,7 +20,7 @@ # versions in those packages until they are shipped out of band. # [General] -engine_api_version = 1.12.0 +engine_api_version = 1.11.2 package_version = 2.0.0-internal-001 distribution_name = dvp-tools package_author = Delphix diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index 66ec17bf..16293064 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -636,7 +636,7 @@ def artifact_content(engine_api, virtual_source_definition, @pytest.fixture def engine_api(): - return {'type': 'APIVersion', 'major': 1, 'minor': 12, 'micro': 0} + return {'type': 'APIVersion', 'major': 1, 'minor': 11, 'micro': 2} @pytest.fixture diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 1df04eab..bbfff990 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -17,7 +17,7 @@ def test_get_virtualization_api_version(): @staticmethod def test_get_engine_api_version(): - assert package_util.get_engine_api_version_from_settings() == '1.12.0' + assert package_util.get_engine_api_version_from_settings() == '1.11.2' @staticmethod def test_get_build_api_version_json(): @@ -34,8 +34,8 @@ def test_get_engine_api_version_json(): engine_api_version = { 'type': 'APIVersion', 'major': 1, - 'minor': 12, - 'micro': 0 + 'minor': 11, + 'micro': 2 } assert package_util.get_engine_api_version() == engine_api_version