diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 89ad4961..b447cbc3 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.1.0-internal-7 +current_version = 2.0.0-internal-001 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+)\-(?P\d+))? diff --git a/.hooksconfig b/.hooksconfig index 364afa5e..d4f11e7f 100644 --- a/.hooksconfig +++ b/.hooksconfig @@ -1,11 +1,16 @@ [gate] name = virtualization-sdk shortname = sdk - slack-url = https://hooks.slack.com/services/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 - slack-color = 1AD6F5 + slack-url = https://hooks.slack.com/services/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 + slack-color = 1AD6F5 slack-notify = virt-sdk-pushes approvers = gatekeepers-virtualization-sdk tags-allowed = true [branch "master"] gate-allowed-issuetypes = 1,3,4,5,10001,10302 + + [branch "projects/plugin-upgrade"] + gate-allowed-issuetypes = 1,3,4,5,10001,10302 + gate-comment-check = on + gate-review-check = on diff --git a/common/src/main/python/dlpx/virtualization/common/VERSION b/common/src/main/python/dlpx/virtualization/common/VERSION index 04f83ef9..817c47e6 100644 --- a/common/src/main/python/dlpx/virtualization/common/VERSION +++ b/common/src/main/python/dlpx/virtualization/common/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file diff --git a/dvp/requirements.txt b/dvp/requirements.txt index 28ecde71..e584d23d 100644 --- a/dvp/requirements.txt +++ b/dvp/requirements.txt @@ -1,15 +1,15 @@ bump2version==0.5.11 contextlib2==0.6.0.post1 ; python_version < '3' funcsigs==1.0.2 ; python_version < '3.0' -importlib-metadata==0.23 ; python_version < '3.8' +importlib-metadata==1.3.0 ; python_version < '3.8' more-itertools==5.0.0 ; python_version <= '2.7' -packaging==19.2 -pathlib2==2.3.5 ; python_version < '3.6' -pluggy==0.13.0 -py==1.8.0 -pyparsing==2.4.5 -pytest==4.6.6 +packaging==20.0 +pathlib2==2.3.5 ; python_version < '3' +pluggy==0.13.1 +py==1.8.1 +pyparsing==2.4.6 +pytest==4.6.9 scandir==1.10.0 ; python_version < '3.5' six==1.13.0 -wcwidth==0.1.7 +wcwidth==0.1.8 zipp==0.6.0 diff --git a/dvp/src/main/python/dlpx/virtualization/VERSION b/dvp/src/main/python/dlpx/virtualization/VERSION index 04f83ef9..817c47e6 100644 --- a/dvp/src/main/python/dlpx/virtualization/VERSION +++ b/dvp/src/main/python/dlpx/virtualization/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file diff --git a/libs/src/main/python/dlpx/virtualization/libs/VERSION b/libs/src/main/python/dlpx/virtualization/libs/VERSION index 04f83ef9..817c47e6 100644 --- a/libs/src/main/python/dlpx/virtualization/libs/VERSION +++ b/libs/src/main/python/dlpx/virtualization/libs/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file diff --git a/libs/src/test/python/dlpx/virtualization/test_libs.py b/libs/src/test/python/dlpx/virtualization/test_libs.py index adc2e29b..83364932 100644 --- a/libs/src/test/python/dlpx/virtualization/test_libs.py +++ b/libs/src/test/python/dlpx/virtualization/test_libs.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import mock @@ -189,11 +189,12 @@ def test_run_bash_bad_variables(remote_connection): with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_bash(remote_connection, command, variables, use_login_shell) - assert err_info.value.message == ( - "The function run_bash's argument 'variables' was" - " a dict of {type 'str':type 'int', type 'str':type 'str'}" - " but should be of" - " type 'dict of basestring:basestring' if defined.") + message = ("The function run_bash's argument 'variables' was" + " a dict of {{type 'str':type '{}', type 'str':type '{}'}}" + " but should be of" + " type 'dict of basestring:basestring' if defined.") + assert (err_info.value.message == message.format('int', 'str') or + err_info.value.message == message.format('str', 'int')) @staticmethod def test_run_bash_bad_use_login_shell(remote_connection): @@ -213,7 +214,6 @@ def test_run_bash_bad_use_login_shell(remote_connection): class TestLibsRunSync: @staticmethod def test_run_sync(remote_connection): - expected_run_sync_response = libs_pb2.RunSyncResponse() expected_source_directory = 'sourceDirectory' @@ -269,7 +269,6 @@ def test_run_sync_with_actionable_error(remote_connection): @staticmethod def test_run_sync_with_nonactionable_error(remote_connection): - response = libs_pb2.RunSyncResponse() na_error = libs_pb2.NonActionableLibraryError() response.error.non_actionable_error.CopyFrom(na_error) @@ -593,11 +592,12 @@ def test_run_powershell_bad_variables(remote_connection): with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_powershell(remote_connection, command, variables) - assert err_info.value.message == ( - "The function run_powershell's argument 'variables' was" - " a dict of {type 'str':type 'int', type 'str':type 'str'}" - " but should be of" - " type 'dict of basestring:basestring' if defined.") + message = ("The function run_powershell's argument 'variables' was" + " a dict of {{type 'str':type '{}', type 'str':type '{}'}}" + " but should be of" + " type 'dict of basestring:basestring' if defined.") + assert (err_info.value.message == message.format('int', 'str') or + err_info.value.message == message.format('str', 'int')) class TestLibsRunExpect: @@ -647,12 +647,12 @@ def test_run_expect_check_true_exitcode_success(remote_connection): def mock_run_expect(actual_run_expect_request): assert actual_run_expect_request.command == expected_command assert ( - actual_run_expect_request.remote_connection.environment.name - == remote_connection.environment.name + actual_run_expect_request.remote_connection.environment.name + == remote_connection.environment.name ) assert ( - actual_run_expect_request.remote_connection.environment.reference - == remote_connection.environment.reference + actual_run_expect_request.remote_connection.environment.reference + == remote_connection.environment.reference ) return expected_run_expect_response @@ -704,7 +704,6 @@ def test_run_expect_with_actionable_error(remote_connection): @staticmethod def test_run_expect_with_nonactionable_error(remote_connection): - response = libs_pb2.RunExpectResponse() na_error = libs_pb2.NonActionableLibraryError() response.error.non_actionable_error.CopyFrom(na_error) @@ -768,8 +767,9 @@ def test_run_expect_bad_variables(remote_connection): with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_expect(remote_connection, command, variables) - assert err_info.value.message == ( - "The function run_expect's argument 'variables' was" - " a dict of {type 'str':type 'int', type 'str':type 'str'}" - " but should be of" - " type 'dict of basestring:basestring' if defined.") + message = ("The function run_expect's argument 'variables' was" + " a dict of {{type 'str':type '{}', type 'str':type '{}'}}" + " but should be of" + " type 'dict of basestring:basestring' if defined.") + assert (err_info.value.message == message.format('int', 'str') or + err_info.value.message == message.format('str', 'int')) diff --git a/platform/src/main/python/dlpx/virtualization/platform/VERSION b/platform/src/main/python/dlpx/virtualization/platform/VERSION index 04f83ef9..817c47e6 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/VERSION +++ b/platform/src/main/python/dlpx/virtualization/platform/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/__init__.py b/platform/src/main/python/dlpx/virtualization/platform/__init__.py index 51c5d6aa..fe3b144f 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/__init__.py +++ b/platform/src/main/python/dlpx/virtualization/platform/__init__.py @@ -4,6 +4,15 @@ __path__ = __import__('pkgutil').extend_path(__path__, __name__) + +from dlpx.virtualization.platform.migration_id_set import * +from dlpx.virtualization.platform.validation_util import * from dlpx.virtualization.platform._plugin_classes import * +from dlpx.virtualization.platform._discovery import * +from dlpx.virtualization.platform._linked import * +from dlpx.virtualization.platform._upgrade import * +from dlpx.virtualization.platform._virtual import * from dlpx.virtualization.platform._plugin import * -from dlpx.virtualization.platform.util import * \ No newline at end of file +from dlpx.virtualization.platform.util import * +from dlpx.virtualization.platform.import_util import * +from dlpx.virtualization.platform.import_validations import * diff --git a/platform/src/main/python/dlpx/virtualization/platform/_discovery.py b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py new file mode 100644 index 00000000..0bcbd0f6 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_discovery.py @@ -0,0 +1,163 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""DiscoveryOperations for the Virtualization Platform + +""" +import json +from dlpx.virtualization.common import RemoteConnection +from dlpx.virtualization.api import common_pb2 +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectReturnTypeError, OperationNotDefinedError, + OperationAlreadyDefinedError) + + +__all__ = ['DiscoveryOperations'] + + +class DiscoveryOperations(object): + + def __init__(self): + self.repository_impl = None + self.source_config_impl = None + + def repository(self): + def repository_decorator(repository_impl): + if self.repository_impl: + raise OperationAlreadyDefinedError(Op.DISCOVERY_REPOSITORY) + + self.repository_impl = v.check_function(repository_impl, + Op.DISCOVERY_REPOSITORY) + return repository_impl + return repository_decorator + + def source_config(self): + def source_config_decorator(source_config_impl): + if self.source_config_impl: + raise OperationAlreadyDefinedError(Op.DISCOVERY_SOURCE_CONFIG) + self.source_config_impl = v.check_function( + source_config_impl, Op.DISCOVERY_SOURCE_CONFIG) + return source_config_impl + return source_config_decorator + + def _internal_repository(self, request): + """Repository discovery wrapper. + + Executed just after adding or refreshing an environment. This plugin + operation is run prior to discovering source configs. This plugin + operation returns a list of repositories installed on a environment. + + Discover the repositories on an environment given a source connection. + + Args: + request (RepositoryDiscoveryRequest): Repository + Discovery operation arguments. + + Returns: + RepositoryDiscoveryResponse: The return value of repository + discovery operation. + """ + from generated.definitions import RepositoryDefinition + + def to_protobuf(repository): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(repository.to_dict()) + repository_protobuf = common_pb2.Repository() + repository_protobuf.parameters.CopyFrom(parameters) + return repository_protobuf + + if not self.repository_impl: + raise OperationNotDefinedError(Op.DISCOVERY_REPOSITORY) + + repositories = self.repository_impl( + source_connection=RemoteConnection.from_proto(request.source_connection)) + + # Validate that this is a list of Repository objects + if not isinstance(repositories, list): + raise IncorrectReturnTypeError( + Op.DISCOVERY_REPOSITORY, + type(repositories), + [RepositoryDefinition]) + + if not all(isinstance(repo, RepositoryDefinition) + for repo in repositories): + raise IncorrectReturnTypeError( + Op.DISCOVERY_REPOSITORY, + [type(repo) for repo in repositories], + [RepositoryDefinition]) + + repository_discovery_response = ( + platform_pb2.RepositoryDiscoveryResponse()) + repository_protobuf_list = [to_protobuf(repo) for repo in repositories] + repository_discovery_response.return_value.repositories.extend( + repository_protobuf_list) + return repository_discovery_response + + def _internal_source_config(self, request): + """Source config discovery wrapper. + + Executed when adding or refreshing an environment. This plugin + operation is run after discovering repositories and before + persisting/updating repository and source config data in MDS. This + plugin operation returns a list of source configs from a discovered + repository. + + Discover the source configs on an environment given a discovered + repository. + + Args: + request (SourceConfigDiscoveryRequest): Source + Config Discovery arguments. + + Returns: + SourceConfigDiscoveryResponse: The return value of source config + discovery operation. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + def to_protobuf(source_config): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(source_config.to_dict()) + source_config_protobuf = common_pb2.SourceConfig() + source_config_protobuf.parameters.CopyFrom(parameters) + return source_config_protobuf + + if not self.source_config_impl: + raise OperationNotDefinedError(Op.DISCOVERY_SOURCE_CONFIG) + + repository_definition = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + source_configs = self.source_config_impl( + source_connection=RemoteConnection.from_proto(request.source_connection), + repository=repository_definition) + + # Validate that this is a list of SourceConfigDefinition objects + if not isinstance(source_configs, list): + raise IncorrectReturnTypeError( + Op.DISCOVERY_SOURCE_CONFIG, + type(source_configs), + [SourceConfigDefinition]) + + if not all(isinstance(config, SourceConfigDefinition) + for config in source_configs): + raise IncorrectReturnTypeError( + Op.DISCOVERY_SOURCE_CONFIG, + [type(config) for config in source_configs], + [SourceConfigDefinition]) + + source_config_discovery_response = ( + platform_pb2.SourceConfigDiscoveryResponse()) + source_config_protobuf_list = [to_protobuf(config) + for config in source_configs] + source_config_discovery_response.return_value.source_configs.extend( + source_config_protobuf_list) + return source_config_discovery_response diff --git a/platform/src/main/python/dlpx/virtualization/platform/_linked.py b/platform/src/main/python/dlpx/virtualization/platform/_linked.py new file mode 100644 index 00000000..e06094cc --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_linked.py @@ -0,0 +1,678 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""LinkedOperations for the Virtualization Platform + +""" +import json +from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment +from dlpx.virtualization.api import common_pb2 +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.common.exceptions import PluginRuntimeError +from dlpx.virtualization.platform import Status +from dlpx.virtualization.platform import DirectSource +from dlpx.virtualization.platform import StagedSource +from dlpx.virtualization.platform import Mount +from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectReturnTypeError, OperationNotDefinedError, + OperationAlreadyDefinedError) + + +__all__ = ['LinkedOperations'] + + +class LinkedOperations(object): + + def __init__(self): + self.pre_snapshot_impl = None + self.post_snapshot_impl = None + self.start_staging_impl = None + self.stop_staging_impl = None + self.status_impl = None + self.worker_impl = None + self.mount_specification_impl = None + + def pre_snapshot(self): + def pre_snapshot_decorator(pre_snapshot_impl): + if self.pre_snapshot_impl: + raise OperationAlreadyDefinedError(Op.LINKED_PRE_SNAPSHOT) + self.pre_snapshot_impl = v.check_function(pre_snapshot_impl, + Op.LINKED_PRE_SNAPSHOT) + return pre_snapshot_impl + return pre_snapshot_decorator + + def post_snapshot(self): + def post_snapshot_decorator(post_snapshot_impl): + if self.post_snapshot_impl: + raise OperationAlreadyDefinedError(Op.LINKED_POST_SNAPSHOT) + self.post_snapshot_impl = v.check_function(post_snapshot_impl, + Op.LINKED_POST_SNAPSHOT) + return post_snapshot_impl + return post_snapshot_decorator + + def start_staging(self): + def start_staging_decorator(start_staging_impl): + if self.start_staging_impl: + raise OperationAlreadyDefinedError(Op.LINKED_START_STAGING) + self.start_staging_impl = v.check_function(start_staging_impl, + Op.LINKED_START_STAGING) + return start_staging_impl + return start_staging_decorator + + def stop_staging(self): + def stop_staging_decorator(stop_staging_impl): + if self.stop_staging_impl: + raise OperationAlreadyDefinedError(Op.LINKED_STOP_STAGING) + self.stop_staging_impl = v.check_function(stop_staging_impl, + Op.LINKED_STOP_STAGING) + return stop_staging_impl + return stop_staging_decorator + + def status(self): + def status_decorator(status_impl): + if self.status_impl: + raise OperationAlreadyDefinedError(Op.LINKED_STATUS) + self.status_impl = v.check_function(status_impl, Op.LINKED_STATUS) + return status_impl + return status_decorator + + def worker(self): + def worker_decorator(worker_impl): + if self.worker_impl: + raise OperationAlreadyDefinedError(Op.LINKED_WORKER) + self.worker_impl = v.check_function(worker_impl, Op.LINKED_WORKER) + return worker_impl + return worker_decorator + + def mount_specification(self): + def mount_specification_decorator(mount_specification_impl): + if self.mount_specification_impl: + raise OperationAlreadyDefinedError( + Op.LINKED_MOUNT_SPEC) + self.mount_specification_impl = v.check_function( + mount_specification_impl, Op.LINKED_MOUNT_SPEC) + return mount_specification_impl + return mount_specification_decorator + + def _internal_direct_pre_snapshot(self, request): + """Pre Snapshot Wrapper for direct plugins. + + Executed before creating a snapshot. This plugin + operation is run prior to creating a snapshot for a direct source. + + Run pre-snapshot operation for a direct source. + + Args: + request (DirectPreSnapshotRequest): Pre Snapshot arguments. + + Returns: + DirectPreSnapshotResponse: A response containing + DirectPreSnapshotResult if successful or PluginErrorResult in case + of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.pre_snapshot() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.pre_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) + + direct_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.direct_source.linked_source.parameters.json)) + direct_source = DirectSource( + guid=request.direct_source.linked_source.guid, + connection=RemoteConnection.from_proto(request.direct_source.connection), + parameters=direct_source_definition) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.pre_snapshot_impl( + direct_source=direct_source, + repository=repository, + source_config=source_config) + + direct_pre_snapshot_response = platform_pb2.DirectPreSnapshotResponse() + direct_pre_snapshot_response.return_value.CopyFrom( + platform_pb2.DirectPreSnapshotResult()) + + return direct_pre_snapshot_response + + def _internal_direct_post_snapshot(self, request): + """Post Snapshot Wrapper for direct plugins. + + Executed after creating a snapshot. This plugin + operation is run after creating a snapshot for a direct source. + + Run post-snapshot operation for a direct source. + + Args: + request (DirectPostSnapshotRequest): Post Snapshot arguments. + + Returns: + DirectPostSnapshotResponse: A response containing the return value - + DirectPostSnapshotResult which has the snapshot metadata on success. + In case of errors, response object will contain PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import SnapshotDefinition + + def to_protobuf(snapshot): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(snapshot.to_dict()) + snapshot_protobuf = common_pb2.Snapshot() + snapshot_protobuf.parameters.CopyFrom(parameters) + return snapshot_protobuf + + if not self.post_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) + + direct_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.direct_source.linked_source.parameters.json)) + direct_source = DirectSource( + guid=request.direct_source.linked_source.guid, + connection=RemoteConnection.from_proto(request.direct_source.connection), + parameters=direct_source_definition) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + snapshot = self.post_snapshot_impl( + direct_source=direct_source, + repository=repository, + source_config=source_config) + + # Validate that this is a SnapshotDefinition object + if not isinstance(snapshot, SnapshotDefinition): + raise IncorrectReturnTypeError( + Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + + direct_post_snapshot_response = ( + platform_pb2.DirectPostSnapshotResponse()) + direct_post_snapshot_response.return_value.snapshot.CopyFrom( + to_protobuf(snapshot)) + + return direct_post_snapshot_response + + def _internal_staged_pre_snapshot(self, request): + """Pre Snapshot Wrapper for staged plugins. + + Executed before creating a snapshot. This plugin + operation is run prior to creating a snapshot for a staged source. + + Run pre-snapshot operation for a staged source. + + Args: + request (StagedPreSnapshotRequest): Pre Snapshot arguments. + + Returns: + StagedPreSnapshotResponse: A response containing + StagedPreSnapshotResult if successful or PluginErrorResult + in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import SnapshotParametersDefinition + + # + # While linked.pre_snapshot() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.pre_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) + + linked_source = request.staged_source.linked_source + staged_source_definition = (LinkedSourceDefinition.from_dict( + json.loads(linked_source.parameters.json))) + staged_mount = request.staged_source.staged_mount + mount = Mount( + remote_environment=RemoteEnvironment.from_proto(staged_mount.remote_environment), + mount_path=staged_mount.mount_path, + shared_path=staged_mount.shared_path) + staged_source = StagedSource( + guid=linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + snapshot_parameters = SnapshotParametersDefinition.from_dict( + json.loads(request.snapshot_parameters.parameters.json)) + + self.pre_snapshot_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config, + snapshot_parameters=snapshot_parameters) + + response = platform_pb2.StagedPreSnapshotResponse() + response.return_value.CopyFrom(platform_pb2.StagedPreSnapshotResult()) + + return response + + def _internal_staged_post_snapshot(self, request): + """Post Snapshot Wrapper for staged plugins. + + Executed after creating a snapshot. This plugin + operation is run after creating a snapshot for a staged source. + + Run post-snapshot operation for a staged source. + + Args: + request (StagedPostSnapshotRequest): Post Snapshot arguments. + + Returns: + StagedPostSnapshotResponse: A response containing the return value + StagedPostSnapshotResult which has the snapshot metadata on + success. In case of errors, response object will contain + PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SnapshotParametersDefinition + + def to_protobuf(snapshot): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(snapshot.to_dict()) + snapshot_protobuf = common_pb2.Snapshot() + snapshot_protobuf.parameters.CopyFrom(parameters) + return snapshot_protobuf + + if not self.post_snapshot_impl: + raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment= + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + snapshot_parameters = SnapshotParametersDefinition.from_dict( + json.loads(request.snapshot_parameters.parameters.json)) + + snapshot = self.post_snapshot_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config, + snapshot_parameters=snapshot_parameters) + + # Validate that this is a SnapshotDefinition object + if not isinstance(snapshot, SnapshotDefinition): + raise IncorrectReturnTypeError( + Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + + response = platform_pb2.StagedPostSnapshotResponse() + response.return_value.snapshot.CopyFrom(to_protobuf(snapshot)) + + return response + + def _internal_start_staging(self, request): + """Start staging Wrapper for staged plugins. + + Executed when enabling the staging source. This plugin + operation is run to start the staging source as part + of the enable operation. + + Run start operation for a staged source. + + Args: + request (StartStagingRequest): Start arguments. + + Returns: + StartStagingResponse: A response containing StartStagingResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.start_staging() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.start_staging_impl: + raise OperationNotDefinedError(Op.LINKED_START_STAGING) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.start_staging_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + start_staging_response = platform_pb2.StartStagingResponse() + start_staging_response.return_value.CopyFrom( + platform_pb2.StartStagingResult()) + + return start_staging_response + + def _internal_stop_staging(self, request): + """Stop staging Wrapper for staged plugins. + + Executed when disabling the staging source. This plugin + operation is run to stop the staging source as part + of the disable operation. + + Run stop operation for a staged source. + + Args: + request (StopStagingRequest): Stop arguments. + + Returns: + StopStagingResponse: A response containing StopStagingResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.stop_staging() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.stop_staging_impl: + raise OperationNotDefinedError(Op.LINKED_STOP_STAGING) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.stop_staging_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + stop_staging_response = platform_pb2.StopStagingResponse() + stop_staging_response.return_value.CopyFrom( + platform_pb2.StopStagingResult()) + + return stop_staging_response + + def _internal_status(self, request): + """Staged Status Wrapper for staged plugins. + + Executed as part of several operations to get the status + of a staged source - active or inactive. + + Run status operation for a staged source. + + Args: + request (StagedStatusRequest): Post Snapshot arguments. + + Returns: + StagedStatusResponse: A response containing the return value - + StagedStatusResult which has active or inactive status. In + case of errors, response object will contain PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.status() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.status_impl: + raise OperationNotDefinedError(Op.LINKED_STATUS) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + status = self.status_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + # Validate that this is a Status object. + if not isinstance(status, Status): + raise IncorrectReturnTypeError( + Op.LINKED_STATUS, type(status), Status) + + staged_status_response = platform_pb2.StagedStatusResponse() + staged_status_response.return_value.status = status.value + + return staged_status_response + + def _internal_worker(self, request): + """Staged Worker Wrapper for staged plugins. + + Executed as part of validated sync. This plugin + operation is run to sync staging source as part + of the validated sync operation. + + Run worker operation for a staged source. + + Args: + request (StagedWorkerRequest): Worker arguments. + + Returns: + StagedWorkerResponse: A response containing StagedWorkerResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + from generated.definitions import SourceConfigDefinition + + # + # While linked.worker() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.worker_impl: + raise OperationNotDefinedError(Op.LINKED_WORKER) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads( + request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.worker_impl( + staged_source=staged_source, + repository=repository, + source_config=source_config) + + staged_worker_response = platform_pb2.StagedWorkerResponse() + staged_worker_response.return_value.CopyFrom( + platform_pb2.StagedWorkerResult()) + + return staged_worker_response + + def _internal_mount_specification(self, request): + """Staged Mount/Ownership Spec Wrapper for staged plugins. + + Executed before creating a snapshot during sync or before + enable/disable. This plugin operation is run before mounting datasets + on staging to set the mount path and/or ownership. + + Run mount/ownership spec operation for a staged source. + + Args: + request (StagedMountSpecRequest): Mount Spec arguments. + + Returns: + StagedMountSpecResponse: A response containing the return value - + StagedMountSpecResult which has the mount/ownership metadata on + success. In case of errors, response object will contain + PluginErrorResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import RepositoryDefinition + from generated.definitions import LinkedSourceDefinition + + def to_protobuf_single_mount(single_mount): + if single_mount.shared_path: + raise PluginRuntimeError( + 'Shared path is not supported for linked sources.') + + single_mount_protobuf = common_pb2.SingleEntireMount() + single_mount_protobuf.mount_path = single_mount.mount_path + single_mount_protobuf.remote_environment.CopyFrom( + single_mount.remote_environment.to_proto()) + return single_mount_protobuf + + def to_protobuf_ownership_spec(ownership_spec): + ownership_spec_protobuf = common_pb2.OwnershipSpec() + ownership_spec_protobuf.uid = ownership_spec.uid + ownership_spec_protobuf.gid = ownership_spec.gid + return ownership_spec_protobuf + + if not self.mount_specification_impl: + raise OperationNotDefinedError(Op.LINKED_MOUNT_SPEC) + + staged_source_definition = LinkedSourceDefinition.from_dict( + json.loads(request.staged_source.linked_source.parameters.json)) + mount = Mount( + remote_environment=( + RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), + mount_path=request.staged_source.staged_mount.mount_path, + shared_path=request.staged_source.staged_mount.shared_path) + staged_source = StagedSource( + guid=request.staged_source.linked_source.guid, + source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), + parameters=staged_source_definition, + mount=mount, + staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + mount_spec = self.mount_specification_impl( + staged_source=staged_source, + repository=repository) + + # Validate that this is a MountSpecification object. + if not isinstance(mount_spec, MountSpecification): + raise IncorrectReturnTypeError( + Op.LINKED_MOUNT_SPEC, + type(mount_spec), + MountSpecification) + + # Only one mount is supported for linked sources. + mount_len = len(mount_spec.mounts) + if mount_len != 1: + raise PluginRuntimeError( + 'Exactly one mount must be provided for staging sources.' + ' Found {}'.format(mount_len)) + + staged_mount = to_protobuf_single_mount(mount_spec.mounts[0]) + + staged_mount_spec_response = platform_pb2.StagedMountSpecResponse() + staged_mount_spec_response.return_value.staged_mount.CopyFrom( + staged_mount) + + # Ownership spec is optional for linked sources. + if mount_spec.ownership_specification: + ownership_spec = to_protobuf_ownership_spec( + mount_spec.ownership_specification) + staged_mount_spec_response.return_value.ownership_spec.CopyFrom( + ownership_spec) + + return staged_mount_spec_response \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/_plugin.py b/platform/src/main/python/dlpx/virtualization/platform/_plugin.py index ac9df5e1..2758f34b 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/_plugin.py +++ b/platform/src/main/python/dlpx/virtualization/platform/_plugin.py @@ -80,1489 +80,21 @@ def my_configure_implementation(source, repository, snapshot): fail. The internal methods should only be called by the platform so it's safe to have the import in the methods as the objects will exist at runtime. """ -import json -from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment -from dlpx.virtualization.api import common_pb2 -from dlpx.virtualization.api import platform_pb2 -from dlpx.virtualization.common.exceptions import PluginRuntimeError -from dlpx.virtualization.platform import VirtualSource -from dlpx.virtualization.platform import DirectSource -from dlpx.virtualization.platform import StagedSource -from dlpx.virtualization.platform import Status -from dlpx.virtualization.platform import Mount -from dlpx.virtualization.platform import MountSpecification -from dlpx.virtualization.platform.operation import Operation as Op -from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationNotDefinedError, - OperationAlreadyDefinedError) +from dlpx.virtualization.platform import (DiscoveryOperations, + LinkedOperations, + VirtualOperations, + UpgradeOperations) __all__ = ['Plugin'] -class DiscoveryOperations(object): - - def __init__(self): - self.repository_impl = None - self.source_config_impl = None - - def repository(self): - def repository_decorator(repository_impl): - if self.repository_impl: - raise OperationAlreadyDefinedError(Op.DISCOVERY_REPOSITORY) - - self.repository_impl = repository_impl - return repository_impl - return repository_decorator - - def source_config(self): - def source_config_decorator(source_config_impl): - if self.source_config_impl: - raise OperationAlreadyDefinedError(Op.DISCOVERY_SOURCE_CONFIG) - self.source_config_impl = source_config_impl - return source_config_impl - return source_config_decorator - - def _internal_repository(self, request): - """Repository discovery wrapper. - - Executed just after adding or refreshing an environment. This plugin - operation is run prior to discovering source configs. This plugin - operation returns a list of repositories installed on a environment. - - Discover the repositories on an environment given a source connection. - - Args: - request (RepositoryDiscoveryRequest): Repository - Discovery operation arguments. - - Returns: - RepositoryDiscoveryResponse: The return value of repository - discovery operation. - """ - from generated.definitions import RepositoryDefinition - - def to_protobuf(repository): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(repository.to_dict()) - repository_protobuf = common_pb2.Repository() - repository_protobuf.parameters.CopyFrom(parameters) - return repository_protobuf - - if not self.repository_impl: - raise OperationNotDefinedError(Op.DISCOVERY_REPOSITORY) - - repositories = self.repository_impl( - source_connection=RemoteConnection.from_proto(request.source_connection)) - - # Validate that this is a list of Repository objects - if not isinstance(repositories, list): - raise IncorrectReturnTypeError( - Op.DISCOVERY_REPOSITORY, - type(repositories), - [RepositoryDefinition]) - - if not all(isinstance(repo, RepositoryDefinition) - for repo in repositories): - raise IncorrectReturnTypeError( - Op.DISCOVERY_REPOSITORY, - [type(repo) for repo in repositories], - [RepositoryDefinition]) - - repository_discovery_response = ( - platform_pb2.RepositoryDiscoveryResponse()) - repository_protobuf_list = [to_protobuf(repo) for repo in repositories] - repository_discovery_response.return_value.repositories.extend( - repository_protobuf_list) - return repository_discovery_response - - def _internal_source_config(self, request): - """Source config discovery wrapper. - - Executed when adding or refreshing an environment. This plugin - operation is run after discovering repositories and before - persisting/updating repository and source config data in MDS. This - plugin operation returns a list of source configs from a discovered - repository. - - Discover the source configs on an environment given a discovered - repository. - - Args: - request (SourceConfigDiscoveryRequest): Source - Config Discovery arguments. - - Returns: - SourceConfigDiscoveryResponse: The return value of source config - discovery operation. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - def to_protobuf(source_config): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(source_config.to_dict()) - source_config_protobuf = common_pb2.SourceConfig() - source_config_protobuf.parameters.CopyFrom(parameters) - return source_config_protobuf - - if not self.source_config_impl: - raise OperationNotDefinedError(Op.DISCOVERY_SOURCE_CONFIG) - - repository_definition = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - source_configs = self.source_config_impl( - source_connection=RemoteConnection.from_proto(request.source_connection), - repository=repository_definition) - - # Validate that this is a list of SourceConfigDefinition objects - if not isinstance(source_configs, list): - raise IncorrectReturnTypeError( - Op.DISCOVERY_SOURCE_CONFIG, - type(source_configs), - [SourceConfigDefinition]) - - if not all(isinstance(config, SourceConfigDefinition) - for config in source_configs): - raise IncorrectReturnTypeError( - Op.DISCOVERY_SOURCE_CONFIG, - [type(config) for config in source_configs], - [SourceConfigDefinition]) - - source_config_discovery_response = ( - platform_pb2.SourceConfigDiscoveryResponse()) - source_config_protobuf_list = [to_protobuf(config) - for config in source_configs] - source_config_discovery_response.return_value.source_configs.extend( - source_config_protobuf_list) - return source_config_discovery_response - - -class LinkedOperations(object): - - def __init__(self): - self.pre_snapshot_impl = None - self.post_snapshot_impl = None - self.start_staging_impl = None - self.stop_staging_impl = None - self.status_impl = None - self.worker_impl = None - self.mount_specification_impl = None - - def pre_snapshot(self): - def pre_snapshot_decorator(pre_snapshot_impl): - if self.pre_snapshot_impl: - raise OperationAlreadyDefinedError(Op.LINKED_PRE_SNAPSHOT) - self.pre_snapshot_impl = pre_snapshot_impl - return pre_snapshot_impl - return pre_snapshot_decorator - - def post_snapshot(self): - def post_snapshot_decorator(post_snapshot_impl): - if self.post_snapshot_impl: - raise OperationAlreadyDefinedError(Op.LINKED_POST_SNAPSHOT) - self.post_snapshot_impl = post_snapshot_impl - return post_snapshot_impl - return post_snapshot_decorator - - def start_staging(self): - def start_staging_decorator(start_staging_impl): - if self.start_staging_impl: - raise OperationAlreadyDefinedError(Op.LINKED_START_STAGING) - self.start_staging_impl = start_staging_impl - return start_staging_impl - return start_staging_decorator - - def stop_staging(self): - def stop_staging_decorator(stop_staging_impl): - if self.stop_staging_impl: - raise OperationAlreadyDefinedError(Op.LINKED_STOP_STAGING) - self.stop_staging_impl = stop_staging_impl - return stop_staging_impl - return stop_staging_decorator - - def status(self): - def status_decorator(status_impl): - if self.status_impl: - raise OperationAlreadyDefinedError(Op.LINKED_STATUS) - self.status_impl = status_impl - return status_impl - return status_decorator - - def worker(self): - def worker_decorator(worker_impl): - if self.worker_impl: - raise OperationAlreadyDefinedError(Op.LINKED_WORKER) - self.worker_impl = worker_impl - return worker_impl - return worker_decorator - - def mount_specification(self): - def mount_specification_decorator(mount_specification_impl): - if self.mount_specification_impl: - raise OperationAlreadyDefinedError( - Op.LINKED_MOUNT_SPEC) - self.mount_specification_impl = mount_specification_impl - return mount_specification_impl - return mount_specification_decorator - - def _internal_direct_pre_snapshot(self, request): - """Pre Snapshot Wrapper for direct plugins. - - Executed before creating a snapshot. This plugin - operation is run prior to creating a snapshot for a direct source. - - Run pre-snapshot operation for a direct source. - - Args: - request (DirectPreSnapshotRequest): Pre Snapshot arguments. - - Returns: - DirectPreSnapshotResponse: A response containing - DirectPreSnapshotResult if successful or PluginErrorResult in case - of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.pre_snapshot() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.pre_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) - - direct_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.direct_source.linked_source.parameters.json)) - direct_source = DirectSource( - guid=request.direct_source.linked_source.guid, - connection=RemoteConnection.from_proto(request.direct_source.connection), - parameters=direct_source_definition) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.pre_snapshot_impl( - direct_source=direct_source, - repository=repository, - source_config=source_config) - - direct_pre_snapshot_response = platform_pb2.DirectPreSnapshotResponse() - direct_pre_snapshot_response.return_value.CopyFrom( - platform_pb2.DirectPreSnapshotResult()) - - return direct_pre_snapshot_response - - def _internal_direct_post_snapshot(self, request): - """Post Snapshot Wrapper for direct plugins. - - Executed after creating a snapshot. This plugin - operation is run after creating a snapshot for a direct source. - - Run post-snapshot operation for a direct source. - - Args: - request (DirectPostSnapshotRequest): Post Snapshot arguments. - - Returns: - DirectPostSnapshotResponse: A response containing the return value - - DirectPostSnapshotResult which has the snapshot metadata on success. - In case of errors, response object will contain PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import SnapshotDefinition - - def to_protobuf(snapshot): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(snapshot.to_dict()) - snapshot_protobuf = common_pb2.Snapshot() - snapshot_protobuf.parameters.CopyFrom(parameters) - return snapshot_protobuf - - if not self.post_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) - - direct_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.direct_source.linked_source.parameters.json)) - direct_source = DirectSource( - guid=request.direct_source.linked_source.guid, - connection=RemoteConnection.from_proto(request.direct_source.connection), - parameters=direct_source_definition) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - snapshot = self.post_snapshot_impl( - direct_source=direct_source, - repository=repository, - source_config=source_config) - - # Validate that this is a SnapshotDefinition object - if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) - - direct_post_snapshot_response = ( - platform_pb2.DirectPostSnapshotResponse()) - direct_post_snapshot_response.return_value.snapshot.CopyFrom( - to_protobuf(snapshot)) - - return direct_post_snapshot_response - - def _internal_staged_pre_snapshot(self, request): - """Pre Snapshot Wrapper for staged plugins. - - Executed before creating a snapshot. This plugin - operation is run prior to creating a snapshot for a staged source. - - Run pre-snapshot operation for a staged source. - - Args: - request (StagedPreSnapshotRequest): Pre Snapshot arguments. - - Returns: - StagedPreSnapshotResponse: A response containing - StagedPreSnapshotResult if successful or PluginErrorResult - in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import SnapshotParametersDefinition - - # - # While linked.pre_snapshot() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.pre_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_PRE_SNAPSHOT) - - linked_source = request.staged_source.linked_source - staged_source_definition = (LinkedSourceDefinition.from_dict( - json.loads(linked_source.parameters.json))) - staged_mount = request.staged_source.staged_mount - mount = Mount( - remote_environment=RemoteEnvironment.from_proto(staged_mount.remote_environment), - mount_path=staged_mount.mount_path, - shared_path=staged_mount.shared_path) - staged_source = StagedSource( - guid=linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - snapshot_parameters = SnapshotParametersDefinition.from_dict( - json.loads(request.snapshot_parameters.parameters.json)) - - self.pre_snapshot_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config, - snapshot_parameters=snapshot_parameters) - - response = platform_pb2.StagedPreSnapshotResponse() - response.return_value.CopyFrom(platform_pb2.StagedPreSnapshotResult()) - - return response - - def _internal_staged_post_snapshot(self, request): - """Post Snapshot Wrapper for staged plugins. - - Executed after creating a snapshot. This plugin - operation is run after creating a snapshot for a staged source. - - Run post-snapshot operation for a staged source. - - Args: - request (StagedPostSnapshotRequest): Post Snapshot arguments. - - Returns: - StagedPostSnapshotResponse: A response containing the return value - StagedPostSnapshotResult which has the snapshot metadata on - success. In case of errors, response object will contain - PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SnapshotParametersDefinition - - def to_protobuf(snapshot): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(snapshot.to_dict()) - snapshot_protobuf = common_pb2.Snapshot() - snapshot_protobuf.parameters.CopyFrom(parameters) - return snapshot_protobuf - - if not self.post_snapshot_impl: - raise OperationNotDefinedError(Op.LINKED_POST_SNAPSHOT) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment= - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - snapshot_parameters = SnapshotParametersDefinition.from_dict( - json.loads(request.snapshot_parameters.parameters.json)) - - snapshot = self.post_snapshot_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config, - snapshot_parameters=snapshot_parameters) - - # Validate that this is a SnapshotDefinition object - if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.LINKED_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) - - response = platform_pb2.StagedPostSnapshotResponse() - response.return_value.snapshot.CopyFrom(to_protobuf(snapshot)) - - return response - - def _internal_start_staging(self, request): - """Start staging Wrapper for staged plugins. - - Executed when enabling the staging source. This plugin - operation is run to start the staging source as part - of the enable operation. - - Run start operation for a staged source. - - Args: - request (StartStagingRequest): Start arguments. - - Returns: - StartStagingResponse: A response containing StartStagingResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.start_staging() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.start_staging_impl: - raise OperationNotDefinedError(Op.LINKED_START_STAGING) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.start_staging_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - start_staging_response = platform_pb2.StartStagingResponse() - start_staging_response.return_value.CopyFrom( - platform_pb2.StartStagingResult()) - - return start_staging_response - - def _internal_stop_staging(self, request): - """Stop staging Wrapper for staged plugins. - - Executed when disabling the staging source. This plugin - operation is run to stop the staging source as part - of the disable operation. - - Run stop operation for a staged source. - - Args: - request (StopStagingRequest): Stop arguments. - - Returns: - StopStagingResponse: A response containing StopStagingResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.stop_staging() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.stop_staging_impl: - raise OperationNotDefinedError(Op.LINKED_STOP_STAGING) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.stop_staging_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - stop_staging_response = platform_pb2.StopStagingResponse() - stop_staging_response.return_value.CopyFrom( - platform_pb2.StopStagingResult()) - - return stop_staging_response - - def _internal_status(self, request): - """Staged Status Wrapper for staged plugins. - - Executed as part of several operations to get the status - of a staged source - active or inactive. - - Run status operation for a staged source. - - Args: - request (StagedStatusRequest): Post Snapshot arguments. - - Returns: - StagedStatusResponse: A response containing the return value - - StagedStatusResult which has active or inactive status. In - case of errors, response object will contain PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.status() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.status_impl: - raise OperationNotDefinedError(Op.LINKED_STATUS) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - status = self.status_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - # Validate that this is a Status object. - if not isinstance(status, Status): - raise IncorrectReturnTypeError( - Op.LINKED_STATUS, type(status), Status) - - staged_status_response = platform_pb2.StagedStatusResponse() - staged_status_response.return_value.status = status.value - - return staged_status_response - - def _internal_worker(self, request): - """Staged Worker Wrapper for staged plugins. - - Executed as part of validated sync. This plugin - operation is run to sync staging source as part - of the validated sync operation. - - Run worker operation for a staged source. - - Args: - request (StagedWorkerRequest): Worker arguments. - - Returns: - StagedWorkerResponse: A response containing StagedWorkerResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - from generated.definitions import SourceConfigDefinition - - # - # While linked.worker() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.worker_impl: - raise OperationNotDefinedError(Op.LINKED_WORKER) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads( - request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.worker_impl( - staged_source=staged_source, - repository=repository, - source_config=source_config) - - staged_worker_response = platform_pb2.StagedWorkerResponse() - staged_worker_response.return_value.CopyFrom( - platform_pb2.StagedWorkerResult()) - - return staged_worker_response - - def _internal_mount_specification(self, request): - """Staged Mount/Ownership Spec Wrapper for staged plugins. - - Executed before creating a snapshot during sync or before - enable/disable. This plugin operation is run before mounting datasets - on staging to set the mount path and/or ownership. - - Run mount/ownership spec operation for a staged source. - - Args: - request (StagedMountSpecRequest): Mount Spec arguments. - - Returns: - StagedMountSpecResponse: A response containing the return value - - StagedMountSpecResult which has the mount/ownership metadata on - success. In case of errors, response object will contain - PluginErrorResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import RepositoryDefinition - from generated.definitions import LinkedSourceDefinition - - def to_protobuf_single_mount(single_mount): - if single_mount.shared_path: - raise PluginRuntimeError( - 'Shared path is not supported for linked sources.') - - single_mount_protobuf = common_pb2.SingleEntireMount() - single_mount_protobuf.mount_path = single_mount.mount_path - single_mount_protobuf.remote_environment.CopyFrom( - single_mount.remote_environment.to_proto()) - return single_mount_protobuf - - def to_protobuf_ownership_spec(ownership_spec): - ownership_spec_protobuf = common_pb2.OwnershipSpec() - ownership_spec_protobuf.uid = ownership_spec.uid - ownership_spec_protobuf.gid = ownership_spec.gid - return ownership_spec_protobuf - - if not self.mount_specification_impl: - raise OperationNotDefinedError(Op.LINKED_MOUNT_SPEC) - - staged_source_definition = LinkedSourceDefinition.from_dict( - json.loads(request.staged_source.linked_source.parameters.json)) - mount = Mount( - remote_environment=( - RemoteEnvironment.from_proto(request.staged_source.staged_mount.remote_environment)), - mount_path=request.staged_source.staged_mount.mount_path, - shared_path=request.staged_source.staged_mount.shared_path) - staged_source = StagedSource( - guid=request.staged_source.linked_source.guid, - source_connection=RemoteConnection.from_proto(request.staged_source.source_connection), - parameters=staged_source_definition, - mount=mount, - staged_connection=RemoteConnection.from_proto(request.staged_source.staged_connection)) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - mount_spec = self.mount_specification_impl( - staged_source=staged_source, - repository=repository) - - # Validate that this is a MountSpecification object. - if not isinstance(mount_spec, MountSpecification): - raise IncorrectReturnTypeError( - Op.LINKED_MOUNT_SPEC, - type(mount_spec), - MountSpecification) - - # Only one mount is supported for linked sources. - mount_len = len(mount_spec.mounts) - if mount_len != 1: - raise PluginRuntimeError( - 'Exactly one mount must be provided for staging sources.' - ' Found {}'.format(mount_len)) - - staged_mount = to_protobuf_single_mount(mount_spec.mounts[0]) - - staged_mount_spec_response = platform_pb2.StagedMountSpecResponse() - staged_mount_spec_response.return_value.staged_mount.CopyFrom( - staged_mount) - - # Ownership spec is optional for linked sources. - if mount_spec.ownership_specification: - ownership_spec = to_protobuf_ownership_spec( - mount_spec.ownership_specification) - staged_mount_spec_response.return_value.ownership_spec.CopyFrom( - ownership_spec) - - return staged_mount_spec_response - - -class VirtualOperations(object): - - def __init__(self): - self.configure_impl = None - self.unconfigure_impl = None - self.reconfigure_impl = None - self.start_impl = None - self.stop_impl = None - self.pre_snapshot_impl = None - self.post_snapshot_impl = None - self.status_impl = None - self.initialize_impl = None - self.mount_specification_impl = None - - def configure(self): - def configure_decorator(configure_impl): - if self.configure_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_CONFIGURE) - self.configure_impl = configure_impl - return configure_impl - return configure_decorator - - def unconfigure(self): - def unconfigure_decorator(unconfigure_impl): - if self.unconfigure_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_UNCONFIGURE) - self.unconfigure_impl = unconfigure_impl - return unconfigure_impl - return unconfigure_decorator - - def reconfigure(self): - def reconfigure_decorator(reconfigure_impl): - if self.reconfigure_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_RECONFIGURE) - self.reconfigure_impl = reconfigure_impl - return reconfigure_impl - return reconfigure_decorator - - def start(self): - def start_decorator(start_impl): - if self.start_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_START) - self.start_impl = start_impl - return start_impl - return start_decorator - - def stop(self): - def stop_decorator(stop_impl): - if self.stop_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_STOP) - self.stop_impl = stop_impl - return stop_impl - return stop_decorator - - def pre_snapshot(self): - def pre_snapshot_decorator(pre_snapshot_impl): - if self.pre_snapshot_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) - self.pre_snapshot_impl = pre_snapshot_impl - return pre_snapshot_impl - return pre_snapshot_decorator - - def post_snapshot(self): - def post_snapshot_decorator(post_snapshot_impl): - if self.post_snapshot_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_POST_SNAPSHOT) - self.post_snapshot_impl = post_snapshot_impl - return post_snapshot_impl - return post_snapshot_decorator - - def status(self): - def status_decorator(status_impl): - if self.status_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_STATUS) - self.status_impl = status_impl - return status_impl - return status_decorator - - def initialize(self): - def initialize_decorator(initialize_impl): - if self.initialize_impl: - raise OperationAlreadyDefinedError(Op.VIRTUAL_INITIALIZE) - self.initialize_impl = initialize_impl - return initialize_impl - return initialize_decorator - - def mount_specification(self): - def mount_specification_decorator(mount_specification_impl): - if self.mount_specification_impl: - raise OperationAlreadyDefinedError( - Op.VIRTUAL_MOUNT_SPEC) - self.mount_specification_impl = mount_specification_impl - return mount_specification_impl - return mount_specification_decorator - - @staticmethod - def _from_protobuf_single_subset_mount(single_subset_mount): - return Mount( - remote_environment=RemoteEnvironment.from_proto(single_subset_mount.remote_environment), - mount_path=single_subset_mount.mount_path, - shared_path=single_subset_mount.shared_path) - - def _internal_configure(self, request): - """Configure operation wrapper. - - Executed just after cloning the captured data and mounting it to a - target environment. Specifically, this plugin operation is run during - provision and refresh, prior to taking the initial snapshot of the - clone. This plugin operation is run before the user-customizable - Configure Clone and Before Refresh operations are run. It must return - a sourceConfig object that represents the new dataset. - - Configure the data to be usable on the target environment. For database - data files, this may mean recovering from a crash consistent format or - backup. For application files, this may mean reconfiguring XML files or - rewriting hostnames and symlinks. - - Args: - request (ConfigureRequest): Configure operation arguments. - - Returns: - ConfigureResponse: A response containing the return value of the - configure operation, as a ConfigureResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SourceConfigDefinition - - if not self.configure_impl: - raise OperationNotDefinedError(Op.VIRTUAL_CONFIGURE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - snapshot = SnapshotDefinition.from_dict( - json.loads(request.snapshot.parameters.json)) - - config = self.configure_impl( - virtual_source=virtual_source, - repository=repository, - snapshot=snapshot) - - # Validate that this is a SourceConfigDefinition object. - if not isinstance(config, SourceConfigDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_CONFIGURE, type(config), SourceConfigDefinition) - - configure_response = platform_pb2.ConfigureResponse() - configure_response.return_value.source_config.parameters.json = ( - json.dumps(config.to_dict())) - return configure_response - - def _internal_unconfigure(self, request): - """Unconfigure operation wrapper. - - Executed when disabling or deleting an existing virtual source which - has already been mounted to a target environment. This plugin operation - is run before unmounting the virtual source from the target - environment. - - Args: - request (UnconfigureRequest): Unconfigure operation arguments. - - Returns: - UnconfigureResponse: A response containing UnconfigureResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.unconfigure() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.unconfigure_impl: - raise OperationNotDefinedError(Op.VIRTUAL_UNCONFIGURE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.unconfigure_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - unconfigure_response = platform_pb2.UnconfigureResponse() - unconfigure_response.return_value.CopyFrom( - platform_pb2.UnconfigureResult()) - return unconfigure_response - - def _internal_reconfigure(self, request): - """Reconfigure operation wrapper. - - Executed while attaching a VDB during a virtual source enable job and - returns a virtual source config. - - Args: - request (ReconfigureRequest): Reconfigure operation arguments. - - Returns: - ReconfigureResponse: A response containing the return value of the - reconfigure operation, as a ReconfigureResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SourceConfigDefinition - from generated.definitions import RepositoryDefinition - - if not self.reconfigure_impl: - raise OperationNotDefinedError(Op.VIRTUAL_RECONFIGURE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - snapshot = SnapshotDefinition.from_dict( - json.loads(request.snapshot.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - config = self.reconfigure_impl( - snapshot=snapshot, - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - # Validate that this is a SourceConfigDefinition object. - if not isinstance(config, SourceConfigDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_RECONFIGURE, type(config), SourceConfigDefinition) - - reconfigure_response = platform_pb2.ReconfigureResponse() - reconfigure_response.return_value.source_config.parameters.json = ( - json.dumps(config.to_dict())) - return reconfigure_response - - def _internal_start(self, request): - """Start operation wrapper. - - Executed after attaching a VDB during a virtual source enable job to - start the database. - - Args: - request (StartRequest): Start operation arguments. - - Returns: - StartResponse: A response containing StartResult if successful or - PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.start() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.start_impl: - raise OperationNotDefinedError(Op.VIRTUAL_START) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.start_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - start_response = platform_pb2.StartResponse() - start_response.return_value.CopyFrom(platform_pb2.StartResult()) - return start_response - - def _internal_stop(self, request): - """Stop operation wrapper. - - Executed before unmounting a VDB during a virtual source stop job. - - Args: - request (StopRequest): Stop operation arguments. - - Returns: - StopResponse: A response containing StopResult if successful or - PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.stop() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.stop_impl: - raise OperationNotDefinedError(Op.VIRTUAL_STOP) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.stop_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - stop_response = platform_pb2.StopResponse() - stop_response.return_value.CopyFrom(platform_pb2.StopResult()) - return stop_response - - def _internal_pre_snapshot(self, request): - """Virtual pre snapshot operation wrapper. - - Executed before creating a ZFS snapshot. This plugin operation is run - prior to creating a snapshot for a virtual source. - - Run pre-snapshot operation for a virtual source. - - Args: - virtual_pre_snapshot_request (VirtualPreSnapshotRequest): - Virtual pre snapshot operation arguments. - - Returns: - VirtualPreSnapshotResponse: A response containing - VirtualPreSnapshotResult if successful or PluginErrorResult in case - of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.pre_snapshot() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.pre_snapshot_impl: - raise OperationNotDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.pre_snapshot_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - virtual_pre_snapshot_response = ( - platform_pb2.VirtualPreSnapshotResponse()) - virtual_pre_snapshot_response.return_value.CopyFrom( - platform_pb2.VirtualPreSnapshotResult()) - return virtual_pre_snapshot_response - - def _internal_post_snapshot(self, request): - """Virtual post snapshot operation wrapper. - - Executed after creating a ZFS snapshot. This plugin operation is run - after creating a snapshot for a virtual source. - - Run post-snapshot operation for a virtual source. - - Args: - request (VirtualPostSnapshotRequest): Virtual post snapshot operation - arguments. - - Returns: - VirtualPostSnapshotResponse: A response containing the return value - of the virtual post snapshot operation, as a - VirtualPostSnapshotResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SnapshotDefinition - from generated.definitions import SourceConfigDefinition - - def to_protobuf(snapshot): - parameters = common_pb2.PluginDefinedObject() - parameters.json = json.dumps(snapshot.to_dict()) - snapshot_protobuf = common_pb2.Snapshot() - snapshot_protobuf.parameters.CopyFrom(parameters) - return snapshot_protobuf - - if not self.post_snapshot_impl: - raise OperationNotDefinedError(Op.VIRTUAL_POST_SNAPSHOT) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - snapshot = self.post_snapshot_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - # Validate that this is a SnapshotDefinition object - if not isinstance(snapshot, SnapshotDefinition): - raise IncorrectReturnTypeError( - Op.VIRTUAL_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) - - virtual_post_snapshot_response = ( - platform_pb2.VirtualPostSnapshotResponse()) - virtual_post_snapshot_response.return_value.snapshot.CopyFrom( - to_protobuf(snapshot)) - return virtual_post_snapshot_response - - def _internal_status(self, request): - """Virtual status operation wrapper. - - Executed to get the status of a virtual source - active or inactive. - - Run status operation for a virtual source. - - Args: - request (VirtualStatusRequest): - Virtual status operation arguments. - - Returns: - VirtualStatusResponse: A response containing VirtualStatusResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - # - # While virtual.status() is not a required operation, this should - # not be called if it wasn't implemented. - # - if not self.status_impl: - raise OperationNotDefinedError(Op.VIRTUAL_STATUS) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - virtual_status = self.status_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - - # Validate that this is a Status object. - if not isinstance(virtual_status, Status): - raise IncorrectReturnTypeError( - Op.VIRTUAL_STATUS, type(virtual_status), Status) - - virtual_status_response = platform_pb2.VirtualStatusResponse() - virtual_status_response.return_value.status = virtual_status.value - return virtual_status_response - - def _internal_initialize(self, request): - """Initialize operation wrapper. - - Executed during VDB creation after mounting onto the target - environment. - - Run initialize operation for an empty virtual source. - - Args: - request (InitializeRequest): Initialize operation arguments. - - Returns: - InitializeResponse: A response containing InitializeResult - if successful or PluginErrorResult in case of an error. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - from generated.definitions import SourceConfigDefinition - - if not self.initialize_impl: - raise OperationNotDefinedError(Op.VIRTUAL_INITIALIZE) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - source_config = SourceConfigDefinition.from_dict( - json.loads(request.source_config.parameters.json)) - - self.initialize_impl( - repository=repository, - source_config=source_config, - virtual_source=virtual_source) - initialize_response = platform_pb2.InitializeResponse() - initialize_response.return_value.CopyFrom( - platform_pb2.InitializeResult()) - return initialize_response - - def _internal_mount_specification(self, request): - """Virtual mount spec operation wrapper. - - Executed to fetch the ownership spec before mounting onto a target - environment. - - Run mount spec operation for a virtual source. - - Args: - virtual_mount_spec_request (VirtualMountSpecRequest): - Virtual mount spec operation arguments. - - Returns: - VirtualMountSpecResponse: A response containing the return value of - the virtual mount spec operation, as a VirtualMountSpecResult. - """ - # Reasoning for method imports are in this file's docstring. - from generated.definitions import VirtualSourceDefinition - from generated.definitions import RepositoryDefinition - - def to_protobuf_single_mount(single_mount): - single_mount_protobuf = common_pb2.SingleSubsetMount() - - environment_protobuf = single_mount.remote_environment.to_proto() - - single_mount_protobuf.remote_environment.CopyFrom( - environment_protobuf) - single_mount_protobuf.mount_path = single_mount.mount_path - - if single_mount.shared_path: - single_mount_protobuf.shared_path = single_mount.shared_path - - return single_mount_protobuf - - def to_protobuf_ownership_spec(ownership_spec): - ownership_spec_protobuf = common_pb2.OwnershipSpec() - ownership_spec_protobuf.uid = ownership_spec.uid - ownership_spec_protobuf.gid = ownership_spec.gid - return ownership_spec_protobuf - - if not self.mount_specification_impl: - raise OperationNotDefinedError(Op.VIRTUAL_MOUNT_SPEC) - - virtual_source_definition = VirtualSourceDefinition.from_dict( - json.loads(request.virtual_source.parameters.json)) - mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) - for m in request.virtual_source.mounts] - virtual_source = VirtualSource( - guid=request.virtual_source.guid, - connection=RemoteConnection.from_proto(request.virtual_source.connection), - parameters=virtual_source_definition, - mounts=mounts) - - repository = RepositoryDefinition.from_dict( - json.loads(request.repository.parameters.json)) - - virtual_mount_spec = self.mount_specification_impl( - repository=repository, - virtual_source=virtual_source) - - # Validate that this is a MountSpecification object - if not isinstance(virtual_mount_spec, MountSpecification): - raise IncorrectReturnTypeError( - Op.VIRTUAL_MOUNT_SPEC, - type(virtual_mount_spec), - MountSpecification) - - virtual_mount_spec_response = platform_pb2.VirtualMountSpecResponse() - - if virtual_mount_spec.ownership_specification: - ownership_spec = to_protobuf_ownership_spec( - virtual_mount_spec.ownership_specification) - virtual_mount_spec_response.return_value.ownership_spec.CopyFrom( - ownership_spec) - - mounts_list = [to_protobuf_single_mount(m) - for m in virtual_mount_spec.mounts] - virtual_mount_spec_response.return_value.mounts.extend(mounts_list) - return virtual_mount_spec_response - - class Plugin(object): def __init__(self): self.__discovery = DiscoveryOperations() self.__linked = LinkedOperations() self.__virtual = VirtualOperations() + self.__upgrade = UpgradeOperations() @property def discovery(self): @@ -1575,3 +107,7 @@ def linked(self): @property def virtual(self): return self.__virtual + + @property + def upgrade(self): + return self.__upgrade diff --git a/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py new file mode 100644 index 00000000..db13d731 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_upgrade.py @@ -0,0 +1,179 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""UpgradeOperations for the Virtualization Platform + +There are 5 different objects that we can upgrade. All migration ids must be +unique. To upgrade a specific schema, the plugin author would use that specific +decorator specifying the migration id. We save the implementations of each of +the upgrade functions in a dict for the specific schema. For each new upgrade +operation of the same schema, the key will be the migration id, and the value +will be the function that was implemented. +""" +import json +import logging +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.platform import MigrationIdSet +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectUpgradeObjectTypeError) + +logger = logging.getLogger(__name__) + +__all__ = ['UpgradeOperations'] + + +class UpgradeOperations(object): + + def __init__(self): + self.__migration_id_set = MigrationIdSet() + + self.repository_id_to_impl = {} + self.source_config_id_to_impl = {} + self.linked_source_id_to_impl = {} + self.virtual_source_id_to_impl = {} + self.snapshot_id_to_impl = {} + + def repository(self, migration_id): + def repository_decorator(repository_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, repository_impl.__name__) + self.repository_id_to_impl[std_mig_id] = v.check_function( + repository_impl, Op.UPGRADE_REPOSITORY) + return repository_impl + return repository_decorator + + def source_config(self, migration_id): + def source_config_decorator(source_config_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, source_config_impl.__name__) + self.source_config_id_to_impl[std_mig_id] = v.check_function( + source_config_impl, Op.UPGRADE_SOURCE_CONFIG) + return source_config_impl + return source_config_decorator + + def linked_source(self, migration_id): + def linked_source_decorator(linked_source_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, linked_source_impl.__name__) + self.linked_source_id_to_impl[std_mig_id] = v.check_function( + linked_source_impl, Op.UPGRADE_LINKED_SOURCE) + return linked_source_impl + return linked_source_decorator + + def virtual_source(self, migration_id): + def virtual_source_decorator(virtual_source_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, virtual_source_impl.__name__) + self.virtual_source_id_to_impl[std_mig_id] = v.check_function( + virtual_source_impl, Op.UPGRADE_VIRTUAL_SOURCE) + return virtual_source_impl + return virtual_source_decorator + + def snapshot(self, migration_id): + def snapshot_decorator(snapshot_impl): + std_mig_id = self.__migration_id_set.add( + migration_id, snapshot_impl.__name__) + self.snapshot_id_to_impl[std_mig_id] = v.check_function( + snapshot_impl, Op.UPGRADE_SNAPSHOT) + return snapshot_impl + return snapshot_decorator + + @property + def migration_id_list(self): + return self.__migration_id_set.get_sorted_ids() + + @staticmethod + def _success_upgrade_response(upgraded_dict): + upgrade_result = platform_pb2.UpgradeResult( + post_upgrade_parameters=upgraded_dict) + upgrade_response = platform_pb2.UpgradeResponse( + return_value=upgrade_result) + return upgrade_response + + def __process_upgrade_request(self, request, id_to_impl): + """Iterate through all objects in the pre_upgrade_parameters map, + invoke all available migrations on each object and its metadata, + and return a map containing the updated metadata for each object. + """ + post_upgrade_parameters = {} + for (object_ref, metadata) in request.pre_upgrade_parameters.items(): + # Load the object metadata into a dictionary + current_metadata = json.loads(metadata) + # + # Loop through all migrations that were passed into the upgrade + # request. Protobuf will preserve the ordering of repeated + # elements, so we can rely on the backend to sort the migration + # ids before packing them into the request. + # + for migration_id in request.migration_ids: + # Only try to execute the function if the id exists in the map. + if migration_id in id_to_impl: + current_metadata = id_to_impl[migration_id](current_metadata) + post_upgrade_parameters[object_ref] = json.dumps(current_metadata) + + return self._success_upgrade_response(post_upgrade_parameters) + + def _internal_repository(self, request): + """Upgrade repositories for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.REPOSITORY: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.REPOSITORY) + + logger.debug('Upgrade repositories [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.repository_id_to_impl) + + def _internal_source_config(self, request): + """Upgrade source configs for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.SOURCECONFIG: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.SOURCECONFIG) + + logger.debug('Upgrade source configs [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.source_config_id_to_impl) + + def _internal_linked_source(self, request): + """Upgrade linked source for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.LINKEDSOURCE: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.LINKEDSOURCE) + + logger.debug('Upgrade linked sources [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.linked_source_id_to_impl) + + def _internal_virtual_source(self, request): + """Upgrade virtual sources for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.VIRTUALSOURCE: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.VIRTUALSOURCE) + + logger.debug('Upgrade virtual sources [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.virtual_source_id_to_impl) + + def _internal_snapshot(self, request): + """Upgrade snapshots for plugins. + """ + if request.type != platform_pb2.UpgradeRequest.SNAPSHOT: + raise IncorrectUpgradeObjectTypeError( + request.type, platform_pb2.UpgradeRequest.SNAPSHOT) + + logger.debug('Upgrade snapshots [{}]'.format( + ', '.join(sorted(request.pre_upgrade_parameters.keys())))) + + return self.__process_upgrade_request(request, self.snapshot_id_to_impl) diff --git a/platform/src/main/python/dlpx/virtualization/platform/_virtual.py b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py new file mode 100644 index 00000000..76976c2a --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/_virtual.py @@ -0,0 +1,704 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +# -*- coding: utf-8 -*- + +"""VirtualOperations for the Virtualization Platform + +""" +import json +from dlpx.virtualization.common import RemoteConnection, RemoteEnvironment +from dlpx.virtualization.api import common_pb2 +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.platform import VirtualSource +from dlpx.virtualization.platform import Status +from dlpx.virtualization.platform import Mount +from dlpx.virtualization.platform import MountSpecification +from dlpx.virtualization.platform import validation_util as v +from dlpx.virtualization.platform.operation import Operation as Op +from dlpx.virtualization.platform.exceptions import ( + IncorrectReturnTypeError, OperationNotDefinedError, + OperationAlreadyDefinedError) + + +__all__ = ['VirtualOperations'] + + +class VirtualOperations(object): + + def __init__(self): + self.configure_impl = None + self.unconfigure_impl = None + self.reconfigure_impl = None + self.start_impl = None + self.stop_impl = None + self.pre_snapshot_impl = None + self.post_snapshot_impl = None + self.status_impl = None + self.initialize_impl = None + self.mount_specification_impl = None + + def configure(self): + def configure_decorator(configure_impl): + if self.configure_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_CONFIGURE) + self.configure_impl = v.check_function(configure_impl, + Op.VIRTUAL_CONFIGURE) + return configure_impl + return configure_decorator + + def unconfigure(self): + def unconfigure_decorator(unconfigure_impl): + if self.unconfigure_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_UNCONFIGURE) + self.unconfigure_impl = v.check_function(unconfigure_impl, + Op.VIRTUAL_UNCONFIGURE) + return unconfigure_impl + return unconfigure_decorator + + def reconfigure(self): + def reconfigure_decorator(reconfigure_impl): + if self.reconfigure_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_RECONFIGURE) + self.reconfigure_impl = v.check_function(reconfigure_impl, + Op.VIRTUAL_RECONFIGURE) + return reconfigure_impl + return reconfigure_decorator + + def start(self): + def start_decorator(start_impl): + if self.start_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_START) + self.start_impl = v.check_function(start_impl, Op.VIRTUAL_START) + return start_impl + return start_decorator + + def stop(self): + def stop_decorator(stop_impl): + if self.stop_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_STOP) + self.stop_impl = v.check_function(stop_impl, Op.VIRTUAL_STOP) + return stop_impl + return stop_decorator + + def pre_snapshot(self): + def pre_snapshot_decorator(pre_snapshot_impl): + if self.pre_snapshot_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) + self.pre_snapshot_impl = v.check_function(pre_snapshot_impl, + Op.VIRTUAL_PRE_SNAPSHOT) + return pre_snapshot_impl + return pre_snapshot_decorator + + def post_snapshot(self): + def post_snapshot_decorator(post_snapshot_impl): + if self.post_snapshot_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_POST_SNAPSHOT) + self.post_snapshot_impl = v.check_function( + post_snapshot_impl, Op.VIRTUAL_POST_SNAPSHOT) + return post_snapshot_impl + return post_snapshot_decorator + + def status(self): + def status_decorator(status_impl): + if self.status_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_STATUS) + self.status_impl = v.check_function(status_impl, Op.VIRTUAL_STATUS) + return status_impl + return status_decorator + + def initialize(self): + def initialize_decorator(initialize_impl): + if self.initialize_impl: + raise OperationAlreadyDefinedError(Op.VIRTUAL_INITIALIZE) + self.initialize_impl = v.check_function(initialize_impl, + Op.VIRTUAL_INITIALIZE) + return initialize_impl + return initialize_decorator + + def mount_specification(self): + def mount_specification_decorator(mount_specification_impl): + if self.mount_specification_impl: + raise OperationAlreadyDefinedError( + Op.VIRTUAL_MOUNT_SPEC) + self.mount_specification_impl = v.check_function( + mount_specification_impl, Op.VIRTUAL_MOUNT_SPEC) + return mount_specification_impl + return mount_specification_decorator + + @staticmethod + def _from_protobuf_single_subset_mount(single_subset_mount): + return Mount( + remote_environment=RemoteEnvironment.from_proto(single_subset_mount.remote_environment), + mount_path=single_subset_mount.mount_path, + shared_path=single_subset_mount.shared_path) + + def _internal_configure(self, request): + """Configure operation wrapper. + + Executed just after cloning the captured data and mounting it to a + target environment. Specifically, this plugin operation is run during + provision and refresh, prior to taking the initial snapshot of the + clone. This plugin operation is run before the user-customizable + Configure Clone and Before Refresh operations are run. It must return + a sourceConfig object that represents the new dataset. + + Configure the data to be usable on the target environment. For database + data files, this may mean recovering from a crash consistent format or + backup. For application files, this may mean reconfiguring XML files or + rewriting hostnames and symlinks. + + Args: + request (ConfigureRequest): Configure operation arguments. + + Returns: + ConfigureResponse: A response containing the return value of the + configure operation, as a ConfigureResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SourceConfigDefinition + + if not self.configure_impl: + raise OperationNotDefinedError(Op.VIRTUAL_CONFIGURE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + snapshot = SnapshotDefinition.from_dict( + json.loads(request.snapshot.parameters.json)) + + config = self.configure_impl( + virtual_source=virtual_source, + repository=repository, + snapshot=snapshot) + + # Validate that this is a SourceConfigDefinition object. + if not isinstance(config, SourceConfigDefinition): + raise IncorrectReturnTypeError( + Op.VIRTUAL_CONFIGURE, type(config), SourceConfigDefinition) + + configure_response = platform_pb2.ConfigureResponse() + configure_response.return_value.source_config.parameters.json = ( + json.dumps(config.to_dict())) + return configure_response + + def _internal_unconfigure(self, request): + """Unconfigure operation wrapper. + + Executed when disabling or deleting an existing virtual source which + has already been mounted to a target environment. This plugin operation + is run before unmounting the virtual source from the target + environment. + + Args: + request (UnconfigureRequest): Unconfigure operation arguments. + + Returns: + UnconfigureResponse: A response containing UnconfigureResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.unconfigure() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.unconfigure_impl: + raise OperationNotDefinedError(Op.VIRTUAL_UNCONFIGURE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.unconfigure_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + unconfigure_response = platform_pb2.UnconfigureResponse() + unconfigure_response.return_value.CopyFrom( + platform_pb2.UnconfigureResult()) + return unconfigure_response + + def _internal_reconfigure(self, request): + """Reconfigure operation wrapper. + + Executed while attaching a VDB during a virtual source enable job and + returns a virtual source config. + + Args: + request (ReconfigureRequest): Reconfigure operation arguments. + + Returns: + ReconfigureResponse: A response containing the return value of the + reconfigure operation, as a ReconfigureResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SourceConfigDefinition + from generated.definitions import RepositoryDefinition + + if not self.reconfigure_impl: + raise OperationNotDefinedError(Op.VIRTUAL_RECONFIGURE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + snapshot = SnapshotDefinition.from_dict( + json.loads(request.snapshot.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + config = self.reconfigure_impl( + snapshot=snapshot, + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + # Validate that this is a SourceConfigDefinition object. + if not isinstance(config, SourceConfigDefinition): + raise IncorrectReturnTypeError( + Op.VIRTUAL_RECONFIGURE, type(config), SourceConfigDefinition) + + reconfigure_response = platform_pb2.ReconfigureResponse() + reconfigure_response.return_value.source_config.parameters.json = ( + json.dumps(config.to_dict())) + return reconfigure_response + + def _internal_start(self, request): + """Start operation wrapper. + + Executed after attaching a VDB during a virtual source enable job to + start the database. + + Args: + request (StartRequest): Start operation arguments. + + Returns: + StartResponse: A response containing StartResult if successful or + PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.start() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.start_impl: + raise OperationNotDefinedError(Op.VIRTUAL_START) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.start_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + start_response = platform_pb2.StartResponse() + start_response.return_value.CopyFrom(platform_pb2.StartResult()) + return start_response + + def _internal_stop(self, request): + """Stop operation wrapper. + + Executed before unmounting a VDB during a virtual source stop job. + + Args: + request (StopRequest): Stop operation arguments. + + Returns: + StopResponse: A response containing StopResult if successful or + PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.stop() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.stop_impl: + raise OperationNotDefinedError(Op.VIRTUAL_STOP) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.stop_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + stop_response = platform_pb2.StopResponse() + stop_response.return_value.CopyFrom(platform_pb2.StopResult()) + return stop_response + + def _internal_pre_snapshot(self, request): + """Virtual pre snapshot operation wrapper. + + Executed before creating a ZFS snapshot. This plugin operation is run + prior to creating a snapshot for a virtual source. + + Run pre-snapshot operation for a virtual source. + + Args: + virtual_pre_snapshot_request (VirtualPreSnapshotRequest): + Virtual pre snapshot operation arguments. + + Returns: + VirtualPreSnapshotResponse: A response containing + VirtualPreSnapshotResult if successful or PluginErrorResult in case + of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.pre_snapshot() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.pre_snapshot_impl: + raise OperationNotDefinedError(Op.VIRTUAL_PRE_SNAPSHOT) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.pre_snapshot_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + virtual_pre_snapshot_response = ( + platform_pb2.VirtualPreSnapshotResponse()) + virtual_pre_snapshot_response.return_value.CopyFrom( + platform_pb2.VirtualPreSnapshotResult()) + return virtual_pre_snapshot_response + + def _internal_post_snapshot(self, request): + """Virtual post snapshot operation wrapper. + + Executed after creating a ZFS snapshot. This plugin operation is run + after creating a snapshot for a virtual source. + + Run post-snapshot operation for a virtual source. + + Args: + request (VirtualPostSnapshotRequest): Virtual post snapshot operation + arguments. + + Returns: + VirtualPostSnapshotResponse: A response containing the return value + of the virtual post snapshot operation, as a + VirtualPostSnapshotResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SnapshotDefinition + from generated.definitions import SourceConfigDefinition + + def to_protobuf(snapshot): + parameters = common_pb2.PluginDefinedObject() + parameters.json = json.dumps(snapshot.to_dict()) + snapshot_protobuf = common_pb2.Snapshot() + snapshot_protobuf.parameters.CopyFrom(parameters) + return snapshot_protobuf + + if not self.post_snapshot_impl: + raise OperationNotDefinedError(Op.VIRTUAL_POST_SNAPSHOT) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + snapshot = self.post_snapshot_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + # Validate that this is a SnapshotDefinition object + if not isinstance(snapshot, SnapshotDefinition): + raise IncorrectReturnTypeError( + Op.VIRTUAL_POST_SNAPSHOT, type(snapshot), SnapshotDefinition) + + virtual_post_snapshot_response = ( + platform_pb2.VirtualPostSnapshotResponse()) + virtual_post_snapshot_response.return_value.snapshot.CopyFrom( + to_protobuf(snapshot)) + return virtual_post_snapshot_response + + def _internal_status(self, request): + """Virtual status operation wrapper. + + Executed to get the status of a virtual source - active or inactive. + + Run status operation for a virtual source. + + Args: + request (VirtualStatusRequest): + Virtual status operation arguments. + + Returns: + VirtualStatusResponse: A response containing VirtualStatusResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + # + # While virtual.status() is not a required operation, this should + # not be called if it wasn't implemented. + # + if not self.status_impl: + raise OperationNotDefinedError(Op.VIRTUAL_STATUS) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + virtual_status = self.status_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + + # Validate that this is a Status object. + if not isinstance(virtual_status, Status): + raise IncorrectReturnTypeError( + Op.VIRTUAL_STATUS, type(virtual_status), Status) + + virtual_status_response = platform_pb2.VirtualStatusResponse() + virtual_status_response.return_value.status = virtual_status.value + return virtual_status_response + + def _internal_initialize(self, request): + """Initialize operation wrapper. + + Executed during VDB creation after mounting onto the target + environment. + + Run initialize operation for an empty virtual source. + + Args: + request (InitializeRequest): Initialize operation arguments. + + Returns: + InitializeResponse: A response containing InitializeResult + if successful or PluginErrorResult in case of an error. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + from generated.definitions import SourceConfigDefinition + + if not self.initialize_impl: + raise OperationNotDefinedError(Op.VIRTUAL_INITIALIZE) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + source_config = SourceConfigDefinition.from_dict( + json.loads(request.source_config.parameters.json)) + + self.initialize_impl( + repository=repository, + source_config=source_config, + virtual_source=virtual_source) + initialize_response = platform_pb2.InitializeResponse() + initialize_response.return_value.CopyFrom( + platform_pb2.InitializeResult()) + return initialize_response + + def _internal_mount_specification(self, request): + """Virtual mount spec operation wrapper. + + Executed to fetch the ownership spec before mounting onto a target + environment. + + Run mount spec operation for a virtual source. + + Args: + virtual_mount_spec_request (VirtualMountSpecRequest): + Virtual mount spec operation arguments. + + Returns: + VirtualMountSpecResponse: A response containing the return value of + the virtual mount spec operation, as a VirtualMountSpecResult. + """ + # Reasoning for method imports are in this file's docstring. + from generated.definitions import VirtualSourceDefinition + from generated.definitions import RepositoryDefinition + + def to_protobuf_single_mount(single_mount): + single_mount_protobuf = common_pb2.SingleSubsetMount() + + environment_protobuf = single_mount.remote_environment.to_proto() + + single_mount_protobuf.remote_environment.CopyFrom( + environment_protobuf) + single_mount_protobuf.mount_path = single_mount.mount_path + + if single_mount.shared_path: + single_mount_protobuf.shared_path = single_mount.shared_path + + return single_mount_protobuf + + def to_protobuf_ownership_spec(ownership_spec): + ownership_spec_protobuf = common_pb2.OwnershipSpec() + ownership_spec_protobuf.uid = ownership_spec.uid + ownership_spec_protobuf.gid = ownership_spec.gid + return ownership_spec_protobuf + + if not self.mount_specification_impl: + raise OperationNotDefinedError(Op.VIRTUAL_MOUNT_SPEC) + + virtual_source_definition = VirtualSourceDefinition.from_dict( + json.loads(request.virtual_source.parameters.json)) + mounts = [VirtualOperations._from_protobuf_single_subset_mount(m) + for m in request.virtual_source.mounts] + virtual_source = VirtualSource( + guid=request.virtual_source.guid, + connection=RemoteConnection.from_proto(request.virtual_source.connection), + parameters=virtual_source_definition, + mounts=mounts) + + repository = RepositoryDefinition.from_dict( + json.loads(request.repository.parameters.json)) + + virtual_mount_spec = self.mount_specification_impl( + repository=repository, + virtual_source=virtual_source) + + # Validate that this is a MountSpecification object + if not isinstance(virtual_mount_spec, MountSpecification): + raise IncorrectReturnTypeError( + Op.VIRTUAL_MOUNT_SPEC, + type(virtual_mount_spec), + MountSpecification) + + virtual_mount_spec_response = platform_pb2.VirtualMountSpecResponse() + + if virtual_mount_spec.ownership_specification: + ownership_spec = to_protobuf_ownership_spec( + virtual_mount_spec.ownership_specification) + virtual_mount_spec_response.return_value.ownership_spec.CopyFrom( + ownership_spec) + + mounts_list = [to_protobuf_single_mount(m) + for m in virtual_mount_spec.mounts] + virtual_mount_spec_response.return_value.mounts.extend(mounts_list) + return virtual_mount_spec_response \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py index ffaceb06..d800120f 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/exceptions.py +++ b/platform/src/main/python/dlpx/virtualization/platform/exceptions.py @@ -57,6 +57,27 @@ def __init__(self, operation, actual_type, expected_type): super(IncorrectReturnTypeError, self).__init__(message) +class IncorrectUpgradeObjectTypeError(PluginRuntimeError): + """IncorrectUpgradeObjectTypeError gets thrown when an upgrade workflow was + called with the incorrect object type to upgrade. + + Args: + actual type (platform_pb2.UpgradeRequest.Type): type that was passed in + expected_type (platform_pb2.UpgradeRequest.Type): expected type + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + + """ + + def __init__(self, actual_type, expected_type): + message = ( + 'The upgrade operation received objects with {} type but should' + ' have had type {}.'.format(actual_type, expected_type)) + super(IncorrectUpgradeObjectTypeError, self).__init__(message) + + class OperationAlreadyDefinedError(PlatformError): """OperationAlreadyDefinedError gets thrown when the plugin writer tries to define an operation more than ones. @@ -91,12 +112,96 @@ def __init__(self, operation): super(OperationNotDefinedError, self).__init__(message) +class MigrationIdIncorrectTypeError(PlatformError): + """MigrationIdIncorrectType gets thrown when the provided migration id is + not a string. + + Args: + migration_id (str): The migration id assigned for this operation + function_name (str): The name of the function that used the + decorator with the same migration id. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, migration_id, function_name): + message = ("The migration id '{}' used in the function '{}' should" + " be a string.".format(migration_id, function_name)) + super(MigrationIdIncorrectTypeError, self).__init__(message) + + +class MigrationIdIncorrectFormatError(PlatformError): + """MigrationIdIncorrectFormat gets thrown when the migration id given is + not in the correct format. It should be one or more positive integers + separated by periods. + + Args: + migration_id (str): The migration id assigned for this operation + function_name (str): The name of the function that used the + decorator with the same migration id. + format (str): The format expected of the migration_id. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, message): + super(MigrationIdIncorrectFormatError, self).__init__(message) + + @classmethod + def from_fields(cls, migration_id, function_name, format): + message = ("The migration id '{}' used in the function '{}' does not" + " follow the correct format '{}'.".format(migration_id, + function_name, + format)) + return cls(message) + + +class MigrationIdAlreadyUsedError(PlatformError): + """MigrationIdAlreadyUsedError gets thrown when the same migration id is + used for the same upgrade operation + + Args: + migration_id (str): The migration id assigned for this operation + function_name (str): The name of the function that used the + decorator with the same migration id. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, migration_id, std_migration_id, function_name): + message = ("The migration id '{}' used in the function '{}' has the" + " same canonical form '{}' as another migration.".format( + migration_id, function_name, std_migration_id)) + super(MigrationIdAlreadyUsedError, self).__init__(message) + +class DecoratorNotFunctionError(PlatformError): + """DecoratorNotFunctionError gets thrown when the decorated variable is + not a function when it should be. + + Args: + object_name (str): The name of the variable that should have been a + decorator_name (str): The decorator that is being incorrectly used. + + Attributes: + message (str): A localized user-readable message about what operation + should be returning what type. + """ + def __init__(self, object_name, decorator_name): + message = ("The object '{}' decorated by '{}' is" + " not a function.".format(object_name, decorator_name)) + super(DecoratorNotFunctionError, self).__init__(message) + + class IncorrectReferenceFormatError(PluginRuntimeError): """There are 2 possible errors that can be thrown with an incorrect reference. The reference passed in can be a non-string, throwing an IncorrectTypeError. The second error that can be thrown is IncorrectReferenceFormatError, which gets thrown when the reference is not - of the format "UNIX_HOST_ENVIRONMENT-#" nor of "WINDOWS_HOST_ENVIRONMENT-#". + of the format "UNIX_HOST_ENVIRONMENT-#" nor of + "WINDOWS_HOST_ENVIRONMENT-#". Args: reference (str): The incorrectly formatted reference @@ -105,6 +210,24 @@ class IncorrectReferenceFormatError(PluginRuntimeError): message (str): A user-readable message describing the exception. """ def __init__(self, reference): - message = ("Reference '{}' is not a correctly formatted host environment reference.".format(reference)) + message = ("Reference '{}' is not a correctly formatted host" + " environment reference.".format(reference)) super(IncorrectReferenceFormatError, self).__init__(message) +class IncorrectPluginCodeError(PluginRuntimeError): + """ + This gets thrown if the import validations come across invalid plugin + code that causes import to fail, or if the expected plugin entry point is + not found in the plugin code. + Args: + message (str): A user-readable message describing the exception. + + Attributes: + message (str): A user-readable message describing the exception. + """ + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(IncorrectPluginCodeError, self).__init__(message) \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_util.py b/platform/src/main/python/dlpx/virtualization/platform/import_util.py new file mode 100644 index 00000000..a8e8f807 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/import_util.py @@ -0,0 +1,138 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# +import inspect + +from dlpx.virtualization.platform import exceptions + + +_IMPORT_CHECKS = {} +_POST_IMPORT_CHECKS = {} + + +class PluginModule: + """ + Import helper class for the plugin. An instance of this class helps to pass + state of imported module and relevant info to all the validation methods. + """ + def __init__(self, + src_dir, + module, + entry_point, + plugin_type, + module_content, + v_maps, + validate_args=False): + self.__src_dir = src_dir + self.__module = module + self.__entry_point = entry_point + self.__type = plugin_type + self.__module_content = module_content + self.__expected_direct_args_by_op =\ + v_maps['EXPECTED_DIRECT_ARGS_BY_OP'] + self.__expected_staged_args_by_op =\ + v_maps['EXPECTED_STAGED_ARGS_BY_OP'] + self.__expected_upgrade_args = v_maps['EXPECTED_UPGRADE_ARGS'] + self.__validate_args = validate_args + + @property + def src_dir(self): + return self.__src_dir + + @property + def module(self): + return self.__module + + @property + def entry_point(self): + return self.__entry_point + + @property + def plugin_type(self): + return self.__type + + @property + def module_content(self): + return self.__module_content + + @property + def expected_direct_args_by_op(self): + return self.__expected_direct_args_by_op + + @property + def expected_staged_args_by_op(self): + return self.__expected_staged_args_by_op + + @property + def expected_upgrade_args(self): + return self.__expected_upgrade_args + + @property + def validate_args(self): + return self.__validate_args + + +def import_check(ordinal): + """ + This is the import check decorator. Ordinal here signifies the order in + which the checks are executed. + """ + def import_check_decorator(f): + assert inspect.isfunction(f) + assert ordinal not in _IMPORT_CHECKS + + _IMPORT_CHECKS[ordinal] = f + + return f + + return import_check_decorator + + +def post_import_check(ordinal): + """ + This is the post import check decorator. Ordinal here signifies the order + in which the checks are executed. + """ + def post_import_check_decorator(f): + assert inspect.isfunction(f) + assert ordinal not in _POST_IMPORT_CHECKS + + _POST_IMPORT_CHECKS[ordinal] = f + + return f + + return post_import_check_decorator + + +def validate_import(plugin_module): + """ + Runs validations on the module imported and checks if import was fine + and imported content is valid or not. + NOTE: Dependency checks are not handled well. A failure in one validation + should not impact the next one if each validation defines its dependencies + well. For now, any exception from one is considered failure of all + validations. This can be enhanced to define dependencies well. + """ + for key in sorted(_IMPORT_CHECKS.keys()): + try: + _IMPORT_CHECKS[key](plugin_module) + except exceptions.IncorrectPluginCodeError as plugin_err: + return [plugin_err.message] + except exceptions.UserError as user_err: + return [user_err.message] + return [] + + +def validate_post_import(plugin_module): + """ + Runs post import validations on the module content. + """ + warnings = [] + + # + # warnings.extend is used below since each import check returns a list of + # warnings. + # + for key in sorted(_POST_IMPORT_CHECKS.keys()): + warnings.extend(_POST_IMPORT_CHECKS[key](plugin_module)) + return warnings \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/import_validations.py b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py new file mode 100644 index 00000000..1a7ef3da --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/import_validations.py @@ -0,0 +1,191 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# +import inspect + +from dlpx.virtualization.platform.import_util import (import_check, + post_import_check, + PluginModule) +from dlpx.virtualization.platform import exceptions + + +@import_check(ordinal=1) +def validate_module_content(plugin_module): + # This should never happen and if it does, flag an error. + if plugin_module.module_content is None: + raise exceptions.IncorrectPluginCodeError( + 'Plugin module content is None.') + + +@import_check(ordinal=2) +def validate_entry_point(plugin_module): + # + # Schema validation on plugin config file would have ensured entry is a + # string and should never be none - so raise an error if it does. + # + if plugin_module.entry_point is None: + raise exceptions.IncorrectPluginCodeError( + 'Plugin entry point object is None.') + + if not hasattr(plugin_module.module_content, plugin_module.entry_point): + raise exceptions.UserError( + 'Entry point \'{}:{}\' does not exist. \'{}\' is not a symbol' + ' in module \'{}\'.'.format(plugin_module.module, + plugin_module.entry_point, + plugin_module.entry_point, + plugin_module.module)) + + +@import_check(ordinal=3) +def validate_plugin_object(plugin_module): + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point, + None) + + if plugin_object is None: + raise exceptions.UserError('Plugin object retrieved from the entry' + ' point {} is None'.format + (plugin_module.entry_point)) + + +@post_import_check(ordinal=1) +def validate_named_args(plugin_module): + """ + Does named argument validation based on the plugin type. + """ + warnings = [] + + if plugin_module.validate_args: + + # + # Validated methods args against expected args and return any + # resulting warnings to the caller to process. + # These warnings should be treated as an exception to make + # sure build fails. + # + + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point) + + # Iterate over attributes objects of the Plugin object + for plugin_attrib in plugin_object.__dict__.values(): + # + # For each plugin attribute object, its __dict__.keys will give + # us the name of the plugin implemntation method name. That name + # is useful in looking up named arguments expected and what is + # actually in the plugin code. And plugin_op_type can be, for e.g. + # LinkedOperations, DiscoveryOperations, VirtualOperations + # + plugin_op_type = plugin_attrib.__class__.__name__ + + # UpgradeOperations are validated differently, so ignore. + if plugin_op_type == 'UpgradeOperations': + continue + + for op_name_key, op_name in plugin_attrib.__dict__.items(): + if op_name is None: + continue + actual_args = inspect.getargspec(op_name) + warnings.extend( + _check_args(method_name=op_name.__name__, + expected_args=_lookup_expected_args( + plugin_module, plugin_op_type, + op_name_key), + actual_args=actual_args.args)) + + return warnings + + +@post_import_check(ordinal=2) +def check_upgrade_operations(plugin_module): + """ + Does named argument validation on UpgradeOperations. + """ + warnings = [] + + if plugin_module.validate_args: + + # + # Validated methods args against expected args and return any + # resulting warnings to the caller to process. + # These warnings should be treated as an exception to make + # sure build fails. + # + + plugin_object = getattr(plugin_module.module_content, + plugin_module.entry_point) + + # Iterate over attributes objects of the Plugin object + for plugin_attrib in plugin_object.__dict__.values(): + # + # For each plugin attribute object, its __dict__.keys will give + # us the name of the plugin implemntation method name. That name + # is useful in looking up named arguments expected and what is + # actually in the plugin code. And plugin_op_type can be, for e.g. + # LinkedOperations, DiscoveryOperations, VirtualOperations + # + plugin_op_type = plugin_attrib.__class__.__name__ + + if plugin_op_type != 'UpgradeOperations': + continue + + warnings.extend(_check_upgrade_args( + plugin_attrib, plugin_module.expected_upgrade_args)) + + return warnings + + +def _check_upgrade_args(upgrade_operations, expected_upgrade_args): + """ + Does named argument validation of all functions in dictionaries by looping + first through all the attributes in the UpgradeOperations for this plugin. + Any attributes that are not dictionaries that map migration_id -> + upgrade_function are skipped. We then loop through every key/value pair + of each of the dictionaries and validate that the argument in the defined + function has the expected name. + """ + warnings = [] + + for attribute_name, attribute in vars(upgrade_operations).items(): + if attribute_name not in expected_upgrade_args.keys(): + # Skip if not in one of the operation dicts we store functions in. + continue + # + # If the attribute_name was in the expected upgrade dicts then we know + # it is a dict containing migration id -> upgrade function that we can + # iterate on. + # + for migration_id, migration_func in attribute.items(): + actual = inspect.getargspec(migration_func).args + expected = expected_upgrade_args[attribute_name] + warnings.extend( + _check_args(method_name=migration_func.__name__, + expected_args=expected, + actual_args=actual)) + + return warnings + + +def _check_args(method_name, expected_args, actual_args): + warnings = [] + + if len(expected_args) != len(actual_args): + warnings.append('Number of arguments do not match in method {}.' + ' Expected: {}, Found: {}.'.format( + method_name, list(expected_args), actual_args)) + + if not all(arg in expected_args for arg in actual_args): + warnings.append('Named argument mismatch in method {}.' + ' Expected: {}, Found: {}.'.format( + method_name, list(expected_args), actual_args)) + + return warnings + + +def _lookup_expected_args(plugin_module, plugin_op_type, plugin_op_name): + if plugin_module.plugin_type == 'DIRECT': + return plugin_module.expected_direct_args_by_op[plugin_op_type][ + plugin_op_name] + else: + return plugin_module.expected_staged_args_by_op[plugin_op_type][ + plugin_op_name] \ No newline at end of file diff --git a/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py b/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py new file mode 100644 index 00000000..9dc6c142 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/migration_id_set.py @@ -0,0 +1,102 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import logging +import re + +from dlpx.virtualization.platform.exceptions import ( + MigrationIdAlreadyUsedError, MigrationIdIncorrectTypeError, + MigrationIdIncorrectFormatError) + +MIGRATION_ID_REGEX = re.compile(r'^\d+(\.\d+)*$') +logger = logging.getLogger(__name__) + + +class MigrationIdSet: + """ + Keeps track of all migrations and validites/standardizes them as they are + added / parsed. + + Exceptions can be thrown when trying to add a new migration id. Otherwise + at the end of reading in all migration functions can be gotten in the + correct order. + """ + def __init__(self): + """ + The list of migration ids will store migrations as an array of ids + where the id is represented by the standardized array of positive + integers. For example if there were these ids: 1.0.0, 1.2.03, and + 2.0.1.0, __migration_ids would be [ [1], [1, 2, 3], [2, 0, 1]] + """ + self.__migration_ids = [] + + def add(self, migration_id, impl_name): + """ + Validates that the migration id is the correct type/format and then + return the canonical format of the id. Add the id as an array of + integers into the list of migration ids. + """ + # First validate that the migration_id is the correct type/format. + self.validate_migration_id(migration_id, impl_name) + + # Then we must standardize the migration_id. + std_migration_id = self.standardize_migration_id_to_array( + migration_id, impl_name) + std_string = '.'.join(str(i) for i in std_migration_id) + + # Then we should check if this migration_id has already been used + if std_migration_id in self.__migration_ids: + raise MigrationIdAlreadyUsedError(migration_id, + std_string, + impl_name) + + # Lastly we should add this new array into the internal migration list. + self.__migration_ids.append(std_migration_id) + + # Return back the standardized format of the migration id + return std_string + + @staticmethod + def validate_migration_id(migration_id, impl_name): + # First validate that the id is a string + if not isinstance(migration_id, basestring): + raise MigrationIdIncorrectTypeError(migration_id, impl_name) + + # Next check if the id is the right format + if not MIGRATION_ID_REGEX.match(migration_id): + raise MigrationIdIncorrectFormatError.from_fields( + migration_id, impl_name, MIGRATION_ID_REGEX.pattern) + + @staticmethod + def standardize_migration_id_to_array(migration_id, impl_name): + # Split on the period and convert to integer + array = [int(i) for i in migration_id.split('.')] + + # + # We cannot allow a migration id of essentially '0' because otherwise + # there would be no way to add a migration that goes before this. + # + if not any(array): + raise MigrationIdIncorrectFormatError( + "The migration id '{}' used in the function '{}' cannot be" + " used because a 0 migration id is not allowed.".format( + migration_id, impl_name)) + + # Next we want to trim all trailing zeros so ex: 5.3.0.0 == 5.3 + while array: + if not array[-1]: + # Remove the last element which is a zero from array + array.pop() + else: + break + + return array + + def get_sorted_ids(self): + # First sort the migration ids + self.__migration_ids.sort() + + # Then convert all these arrays to the usual string format. + return ['.'.join(str(i) for i in migration_id) + for migration_id in self.__migration_ids] diff --git a/platform/src/main/python/dlpx/virtualization/platform/operation.py b/platform/src/main/python/dlpx/virtualization/platform/operation.py index 67b5bdd3..b938c270 100644 --- a/platform/src/main/python/dlpx/virtualization/platform/operation.py +++ b/platform/src/main/python/dlpx/virtualization/platform/operation.py @@ -27,3 +27,9 @@ class Operation(Enum): VIRTUAL_STATUS = 'virtual.status()' VIRTUAL_INITIALIZE = 'virtual.initialize()' VIRTUAL_MOUNT_SPEC = 'virtual.mount_specification()' + + UPGRADE_REPOSITORY = 'upgrade.repository()' + UPGRADE_SOURCE_CONFIG = 'upgrade.source_config()' + UPGRADE_LINKED_SOURCE = 'upgrade.linked_source()' + UPGRADE_VIRTUAL_SOURCE = 'upgrade.virtual_source()' + UPGRADE_SNAPSHOT = 'upgrade.snapshot()' diff --git a/platform/src/main/python/dlpx/virtualization/platform/validation_util.py b/platform/src/main/python/dlpx/virtualization/platform/validation_util.py new file mode 100644 index 00000000..bc39d098 --- /dev/null +++ b/platform/src/main/python/dlpx/virtualization/platform/validation_util.py @@ -0,0 +1,11 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +import inspect +from dlpx.virtualization.platform.exceptions import DecoratorNotFunctionError + + +def check_function(impl, operation): + if not inspect.isfunction(impl) and not inspect.ismethod(impl): + raise DecoratorNotFunctionError(impl.__name__, operation.value) + return impl diff --git a/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py b/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py new file mode 100644 index 00000000..42db7f92 --- /dev/null +++ b/platform/src/test/python/dlpx/virtualization/test_migration_id_set.py @@ -0,0 +1,107 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import pytest +from dlpx.virtualization.platform.exceptions import ( + MigrationIdAlreadyUsedError, MigrationIdIncorrectTypeError, + MigrationIdIncorrectFormatError) +from dlpx.virtualization.platform import migration_id_set as m + + +class TestMigrationIdSet: + @staticmethod + @pytest.fixture + def migration_set(): + yield m.MigrationIdSet() + + @staticmethod + @pytest.mark.parametrize('migration_id,expected_std_id', [ + ('5.3.2.1', '5.3.2.1'), + ('1000', '1000'), + ('50.0.0', '50'), + ('50.0.0000.1', '50.0.0.1'), + ('2019.10.04', '2019.10.4')]) + def test_basic_add(migration_set, migration_id, expected_std_id): + actual_std_id = migration_set.add(migration_id, 'function') + + assert actual_std_id == expected_std_id + + @staticmethod + @pytest.mark.parametrize('id_one,id_two', [ + ('5.3.2.1', '5.3.2.1'), + ('1000', '1000.0.0'), + ('50.0.0', '50'), + ('50.0.0000.1', '50.0.0.1.0000'), + ('2019.0010.0004', '2019.10.4')]) + def test_same_migration_id_used(migration_set, id_one, id_two): + std_id = migration_set.add(id_one, 'function') + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info: + migration_set.add(id_two, 'function2') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'function2' has the" + " same canonical form '{}' as another migration.".format(id_two, + std_id)) + + @staticmethod + @pytest.mark.parametrize('migration_id', [True, + 1000, + {'random set'}, + ['random', 'list']]) + def test_migration_incorrect_type(migration_set, migration_id): + with pytest.raises(MigrationIdIncorrectTypeError) as err_info: + migration_set.add(migration_id, 'upgrade') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' should" + " be a string.".format(migration_id)) + + @staticmethod + @pytest.mark.parametrize('migration_id', ['Not integers', + '1000.', + '2019 10 20']) + def test_migration_incorrect_format(migration_set, migration_id): + with pytest.raises(MigrationIdIncorrectFormatError) as err_info: + migration_set.add(migration_id, 'upgrade') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' does not" + " follow the correct format '{}'.".format( + migration_id, m.MIGRATION_ID_REGEX.pattern)) + + @staticmethod + @pytest.mark.parametrize('migration_id', ['0.0', + '0', + '0.000.000.00.0']) + def test_migration_id_is_zero(migration_set, migration_id): + with pytest.raises(MigrationIdIncorrectFormatError) as err_info: + migration_set.add(migration_id, 'upgrade') + + message = err_info.value.message + assert message == ( + "The migration id '{}' used in the function 'upgrade' cannot be" + " used because a 0 migration id is not allowed.".format( + migration_id)) + + @staticmethod + def test_get_sorted_ids(migration_set): + migration_set.add('2019.04.01', 'one') + migration_set.add('4.10.04', 'two') + migration_set.add('20190.10.006', 'three') + migration_set.add('1.2.3.4', 'four') + migration_set.add('5.4.3.2.1.0', 'five') + migration_set.add('1', 'six') + migration_set.add('10.01.10.00.1.0.0', 'seven') + + assert migration_set.get_sorted_ids() == ['1', + '1.2.3.4', + '4.10.4', + '5.4.3.2.1', + '10.1.10.0.1', + '2019.4.1', + '20190.10.6'] diff --git a/platform/src/test/python/dlpx/virtualization/test_plugin.py b/platform/src/test/python/dlpx/virtualization/test_plugin.py index 594725fb..90f97e7a 100755 --- a/platform/src/test/python/dlpx/virtualization/test_plugin.py +++ b/platform/src/test/python/dlpx/virtualization/test_plugin.py @@ -4,14 +4,10 @@ import json import pytest -import sys -from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.api import (platform_pb2, common_pb2) from dlpx.virtualization.common import (RemoteConnection, RemoteEnvironment, RemoteHost, RemoteUser) -from dlpx.virtualization.api import common_pb2 -from dlpx.virtualization.platform import _plugin from dlpx.virtualization.platform.exceptions import ( - IncorrectReturnTypeError, OperationAlreadyDefinedError, - PlatformError, PluginRuntimeError) + IncorrectReturnTypeError, IncorrectUpgradeObjectTypeError, OperationAlreadyDefinedError, PluginRuntimeError) from mock import MagicMock, patch import fake_generated_definitions @@ -51,6 +47,17 @@ TEST_STAGED_SOURCE_JSON = SIMPLE_JSON.format(TEST_STAGED_SOURCE) TEST_VIRTUAL_SOURCE_JSON = SIMPLE_JSON.format(TEST_VIRTUAL_SOURCE) TEST_SNAPSHOT_PARAMS_JSON = '{"resync": false}' +TEST_PRE_UPGRADE_PARAMS = {'obj': json.dumps({'name': 'upgrade'})} +TEST_POST_MIGRATION_METADATA_1 = ( + json.dumps({'obj': {'name': 'upgrade', 'prettyName': 'prettyUpgrade'}})) +TEST_POST_MIGRATION_METADATA_2 = ( + json.dumps({'obj': {'name': 'upgrade', 'prettyName': 'prettyUpgrade', + 'metadata': 'metadata'}})) +TEST_POST_UPGRADE_PARAMS = ( + {u'obj': '"{\\"obj\\": {\\"prettyName\\": \\"prettyUpgrade\\", ' + '\\"name\\": \\"upgrade\\", \\"metadata\\": \\"metadata\\"}}"'} +) +MIGRATION_IDS = ('2020.1.1', '2020.2.2') class TestPlugin: @@ -1090,3 +1097,207 @@ def staged_mount_spec_impl(staged_source, repository): message = err_info.value.message assert message == 'Shared path is not supported for linked sources.' + + @staticmethod + def test_upgrade_repository_success(my_plugin): + + @my_plugin.upgrade.repository('2020.1.1') + def upgrade_repository(old_repository): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.repository('2020.2.2') + def upgrade_repository(old_repository): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.REPOSITORY + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_repository(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters\ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_source_config_success(my_plugin): + + @my_plugin.upgrade.source_config('2020.1.1') + def upgrade_source_config(old_source_config): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.source_config('2020.2.2') + def upgrade_source_config(old_source_config): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.SOURCECONFIG + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_source_config(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_linked_source_success(my_plugin): + + @my_plugin.upgrade.linked_source('2020.1.1') + def upgrade_linked_source(old_linked_source): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.linked_source('2020.2.2') + def upgrade_linked_source(old_linked_source): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.LINKEDSOURCE + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_linked_source(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_virtual_source_success(my_plugin): + + @my_plugin.upgrade.virtual_source('2020.1.1') + def upgrade_virtual_source(old_virtual_source): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.virtual_source('2020.2.2') + def upgrade_virtual_source(old_virtual_source): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.VIRTUALSOURCE + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_virtual_source(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_snapshot_success(my_plugin): + + @my_plugin.upgrade.snapshot('2020.1.1') + def upgrade_snapshot(old_snapshot): + return TEST_POST_MIGRATION_METADATA_1 + + @my_plugin.upgrade.snapshot('2020.2.2') + def upgrade_snapshot(old_snapshot): + return TEST_POST_MIGRATION_METADATA_2 + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.SNAPSHOT + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + upgrade_response = \ + (my_plugin.upgrade._internal_snapshot(upgrade_request)) + + expected_response = platform_pb2.UpgradeResponse() + expected_response.return_value.post_upgrade_parameters \ + .update(TEST_POST_UPGRADE_PARAMS) + + assert expected_response == upgrade_response + + @staticmethod + def test_upgrade_repository_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_repository(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 1.") + + @staticmethod + def test_upgrade_source_config_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_source_config(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 0.") + + @staticmethod + def test_upgrade_linked_source_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_linked_source(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 2.") + + @staticmethod + def test_upgrade_virtual_source_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SNAPSHOT + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_virtual_source(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 4 type" + " but should have had type 3.") + + @staticmethod + def test_upgrade_snapshot_incorrect_upgrade_object_type(my_plugin): + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.type = upgrade_request.SOURCECONFIG + + with pytest.raises(IncorrectUpgradeObjectTypeError) as err_info: + my_plugin.upgrade._internal_snapshot(upgrade_request) + + message = err_info.value.message + assert message == ("The upgrade operation received objects with 0 type" + " but should have had type 4.") + + @staticmethod + def test_upgrade_snapshot_fail_with_runtime_error(my_plugin): + + @my_plugin.upgrade.snapshot('2020.1.1') + def upgrade_snapshot(old_snapshot): + raise RuntimeError('RuntimeError in snapshot migration') + + @my_plugin.upgrade.snapshot('2020.2.2') + def upgrade_snapshot(old_snapshot): + raise RuntimeError('RuntimeError in snapshot migration') + + upgrade_request = platform_pb2.UpgradeRequest() + upgrade_request.pre_upgrade_parameters.update(TEST_PRE_UPGRADE_PARAMS) + upgrade_request.type = upgrade_request.SNAPSHOT + upgrade_request.migration_ids.extend(MIGRATION_IDS) + + with pytest.raises(RuntimeError): + my_plugin.upgrade._internal_snapshot(upgrade_request) diff --git a/platform/src/test/python/dlpx/virtualization/test_upgrade.py b/platform/src/test/python/dlpx/virtualization/test_upgrade.py new file mode 100755 index 00000000..1ede1e5e --- /dev/null +++ b/platform/src/test/python/dlpx/virtualization/test_upgrade.py @@ -0,0 +1,307 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# + +import pytest +import logging +from dlpx.virtualization.api import platform_pb2 +from dlpx.virtualization.platform.exceptions import ( + DecoratorNotFunctionError, MigrationIdAlreadyUsedError) +from dlpx.virtualization.platform.operation import Operation as Op + + +class TestUpgrade: + @staticmethod + @pytest.fixture + def my_plugin(): + from dlpx.virtualization.platform import Plugin + yield Plugin() + + @staticmethod + def basic_upgrade_helper(decorator, id_to_impl, upgrade_operation): + @decorator('2019.10.01') + def repo_upgrade_one(input_dict): + output_dict = {'in': input_dict['in'], 'out': 'first'} + return output_dict + + @decorator('2019.10.02') + def repo_upgrade_two(input_dict): + output_dict = {'in': input_dict['in'], 'out': 'second'} + return output_dict + + migration_one = id_to_impl['2019.10.1'] + migration_two = id_to_impl['2019.10.2'] + + assert migration_one == repo_upgrade_one + assert migration_two == repo_upgrade_two + assert migration_one({'in':'in_one'}) == {'in': 'in_one', + 'out': 'first'} + assert migration_two({'in':'in_two'}) == {'in': 'in_two', + 'out': 'second'} + + assert upgrade_operation.migration_id_list == ['2019.10.1', + '2019.10.2'] + + @staticmethod + def decorator_not_function_helper(decorator, op): + + with pytest.raises(DecoratorNotFunctionError) as err_info: + @decorator('2019.10.03') + class RandomClass(object): + pass + + message = err_info.value.message + assert message == ("The object '{}' decorated by '{}' is" + " not a function.".format('RandomClass', + op.value)) + + @staticmethod + def test_upgrade_repository(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.repository, + my_plugin.upgrade.repository_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.repository, Op.UPGRADE_REPOSITORY) + + @staticmethod + def test_upgrade_source_config(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.source_config, + my_plugin.upgrade.source_config_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.source_config, Op.UPGRADE_SOURCE_CONFIG) + + @staticmethod + def test_upgrade_linked_source(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.linked_source, + my_plugin.upgrade.linked_source_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.linked_source, Op.UPGRADE_LINKED_SOURCE) + + @staticmethod + def test_upgrade_virtual_source(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.virtual_source, + my_plugin.upgrade.virtual_source_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.virtual_source, Op.UPGRADE_VIRTUAL_SOURCE) + + @staticmethod + def test_upgrade_snapshot(my_plugin): + TestUpgrade.basic_upgrade_helper( + my_plugin.upgrade.snapshot, + my_plugin.upgrade.snapshot_id_to_impl, + my_plugin.upgrade) + + TestUpgrade.decorator_not_function_helper( + my_plugin.upgrade.snapshot, Op.UPGRADE_SNAPSHOT) + + @staticmethod + def test_upgrade_same_migration_id_used(my_plugin): + @my_plugin.upgrade.repository('2019.10.01') + def repo_upgrade_one(): + return 'repo_one' + + @my_plugin.upgrade.repository('2019.10.04') + def repo_upgrade_two(): + return 'repo_two' + + @my_plugin.upgrade.repository('2019.10.006') + def repo_upgrade_three(): + return 'repo_three' + + @my_plugin.upgrade.source_config('2019.10.02') + def sc_upgrade_one(): + return 'sc_one' + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info_one: + @my_plugin.upgrade.source_config('2019.10.0004') + def sc_upgrade_two(): + return 'sc_two' + + @my_plugin.upgrade.linked_source('2019.10.3.000.0') + def ls_upgrade_one(): + return 'ls_one' + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info_two: + @my_plugin.upgrade.virtual_source('2019.10.03') + def vs_upgrade_one(): + return 'vs_one' + + @my_plugin.upgrade.virtual_source('2019.10.05') + def vs_upgrade_two(): + return 'vs_two' + + with pytest.raises(MigrationIdAlreadyUsedError) as err_info_three: + @my_plugin.upgrade.snapshot('2019.010.001') + def snap_upgrade_one(): + return 'snap_one' + + @my_plugin.upgrade.snapshot('2019.10.12') + def snap_upgrade_two(): + return 'snap_two' + + assert my_plugin.upgrade.migration_id_list == ['2019.10.1', + '2019.10.2', + '2019.10.3', + '2019.10.4', + '2019.10.5', + '2019.10.6', + '2019.10.12'] + + repo_one = my_plugin.upgrade.repository_id_to_impl['2019.10.1'] + repo_two = my_plugin.upgrade.repository_id_to_impl['2019.10.4'] + repo_three = my_plugin.upgrade.repository_id_to_impl['2019.10.6'] + assert repo_one == repo_upgrade_one + assert repo_two == repo_upgrade_two + assert repo_three == repo_upgrade_three + + sc_one = my_plugin.upgrade.source_config_id_to_impl['2019.10.2'] + assert sc_one == sc_upgrade_one + + ls_one = my_plugin.upgrade.linked_source_id_to_impl['2019.10.3'] + assert ls_one == ls_upgrade_one + + vs_two = my_plugin.upgrade.virtual_source_id_to_impl['2019.10.5'] + assert vs_two == vs_upgrade_two + + snap_two = my_plugin.upgrade.snapshot_id_to_impl['2019.10.12'] + assert snap_two == snap_upgrade_two + + assert err_info_one.value.message == ( + "The migration id '2019.10.0004' used in the function" + " 'sc_upgrade_two' has the same canonical form '2019.10.4'" + " as another migration.") + + assert err_info_two.value.message == ( + "The migration id '2019.10.03' used in the function" + " 'vs_upgrade_one' has the same canonical form '2019.10.3'" + " as another migration.") + + assert err_info_three.value.message == ( + "The migration id '2019.010.001' used in the function" + " 'snap_upgrade_one' has the same canonical form '2019.10.1'" + " as another migration.") + + @staticmethod + @pytest.fixture + def caplog(caplog): + caplog.set_level(logging.DEBUG) + return caplog + + @staticmethod + @pytest.fixture + def upgrade_request(fake_map_param, upgrade_type): + return platform_pb2.UpgradeRequest( + pre_upgrade_parameters=fake_map_param, + type=upgrade_type, + migration_ids=[] + ) + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_REPOSITORY-1': '{}', + 'APPDATA_REPOSITORY-2': '{}', + 'APPDATA_REPOSITORY-3': '{}' + }, platform_pb2.UpgradeRequest.REPOSITORY, + )]) + def test_repository(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_repository( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade repositories [APPDATA_REPOSITORY-1,' + ' APPDATA_REPOSITORY-2, APPDATA_REPOSITORY-3]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_SOURCE_CONFIG-1': '{}', + 'APPDATA_SOURCE_CONFIG-2': '{}', + 'APPDATA_SOURCE_CONFIG-3': '{}', + 'APPDATA_SOURCE_CONFIG-4': '{}' + }, platform_pb2.UpgradeRequest.SOURCECONFIG, + )]) + def test_source_config(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_source_config( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade source configs [APPDATA_SOURCE_CONFIG-1,' + ' APPDATA_SOURCE_CONFIG-2, APPDATA_SOURCE_CONFIG-3,' + ' APPDATA_SOURCE_CONFIG-4]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_STAGED_SOURCE-1': '{}', + 'APPDATA_STAGED_SOURCE-2': '{}', + 'APPDATA_STAGED_SOURCE-3': '{}' + }, platform_pb2.UpgradeRequest.LINKEDSOURCE, + )]) + def test_linked_source(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_linked_source( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade linked sources [APPDATA_STAGED_SOURCE-1,' + ' APPDATA_STAGED_SOURCE-2, APPDATA_STAGED_SOURCE-3]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_VIRTUAL_SOURCE-1': '{}', + 'APPDATA_VIRTUAL_SOURCE-2': '{}' + }, platform_pb2.UpgradeRequest.VIRTUALSOURCE, + )]) + def test_virtual_source( + my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_virtual_source( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade virtual sources [APPDATA_VIRTUAL_SOURCE-1,' + ' APPDATA_VIRTUAL_SOURCE-2]') + + @staticmethod + @pytest.mark.parametrize('fake_map_param,upgrade_type', + [({ + 'APPDATA_SNAPSHOT-1': '{}' + }, platform_pb2.UpgradeRequest.SNAPSHOT, + )]) + def test_snapshot(my_plugin, upgrade_request, fake_map_param, caplog): + upgrade_response = my_plugin.upgrade._internal_snapshot( + upgrade_request) + + # Check that the response's oneof is set to return_value and not error + assert upgrade_response.WhichOneof('result') == 'return_value' + assert (upgrade_response.return_value.post_upgrade_parameters + == fake_map_param) + assert (caplog.records[0].message == + 'Upgrade snapshots [APPDATA_SNAPSHOT-1]') diff --git a/tools/src/main/python/dlpx/virtualization/_internal/VERSION b/tools/src/main/python/dlpx/virtualization/_internal/VERSION index 04f83ef9..817c47e6 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/VERSION +++ b/tools/src/main/python/dlpx/virtualization/_internal/VERSION @@ -1 +1 @@ -1.1.0-internal-7 \ No newline at end of file +2.0.0-internal-001 \ No newline at end of file diff --git a/tools/src/main/python/dlpx/virtualization/_internal/cli.py b/tools/src/main/python/dlpx/virtualization/_internal/cli.py index aef8caf9..a71c6343 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/cli.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/cli.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import logging @@ -9,9 +9,8 @@ from contextlib import contextmanager import click -from dlpx.virtualization._internal import (click_util, exceptions, - logging_util, package_util, - util_classes) +from dlpx.virtualization._internal import (click_util, const, exceptions, + logging_util, package_util) from dlpx.virtualization._internal.commands import build as build_internal from dlpx.virtualization._internal.commands import \ download_logs as download_logs_internal @@ -109,20 +108,19 @@ def delphix_sdk(verbose, quiet): @click.option( '-s', '--ingestion-strategy', - default=util_classes.DIRECT_TYPE, + default=const.DIRECT_TYPE, show_default=True, - type=click.Choice([util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE], + type=click.Choice([const.DIRECT_TYPE, const.STAGED_TYPE], case_sensitive=False), help=('Set the ingestion strategy of the plugin. A "direct" plugin ' 'ingests without a staging server while a "staged" plugin ' 'requires a staging server.')) @click.option('-t', '--host-type', - default=util_classes.UNIX_HOST_TYPE, + default=const.UNIX_HOST_TYPE, show_default=True, - type=click.Choice([ - util_classes.UNIX_HOST_TYPE, util_classes.WINDOWS_HOST_TYPE - ]), + type=click.Choice( + [const.UNIX_HOST_TYPE, const.WINDOWS_HOST_TYPE]), help='Set the host platform supported by the plugin.') def init(root, ingestion_strategy, name, host_type): """ diff --git a/tools/src/main/python/dlpx/virtualization/_internal/codegen.py b/tools/src/main/python/dlpx/virtualization/_internal/codegen.py index c8b11d0f..9eda39b7 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/codegen.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/codegen.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import copy @@ -10,7 +10,7 @@ import shutil import subprocess -from dlpx.virtualization._internal import exceptions, file_util, util_classes +from dlpx.virtualization._internal import const, exceptions, file_util logger = logging.getLogger(__name__) UNKNOWN_ERR = 'UNKNOWN_ERR' @@ -77,7 +77,7 @@ def generate_python(name, source_dir, plugin_config_dir, schema_content): # relevant to the plugin writer. We want to always force this to be # recreated. # - output_dir = os.path.join(plugin_config_dir, util_classes.OUTPUT_DIR_NAME) + output_dir = os.path.join(plugin_config_dir, const.OUTPUT_DIR_NAME) logger.info('Creating new output directory: {}'.format(output_dir)) file_util.make_dir(output_dir, True) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache b/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache index 73972217..80f49d0b 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache +++ b/tools/src/main/python/dlpx/virtualization/_internal/codegen/templates/base_model_.mustache @@ -39,6 +39,28 @@ class Model(object): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) + if value is None: + # Plugins use the JSON schema specification to define their + # datatypes. JSON schemas, and therefore plugin data + # definitions, distinguish between these two independent + # situations: + # - The property X exists, and has the value `null` + # - The property X does not exist + # + # Unfortunately, Swagger's generated code conflates these two + # cases together. In either case, we'll receive `None` here. + # + # We don't know of a way that we can 100% reliably know which of + # these two cases is what the plugin code intended. However, + # we expect that real-world plugin code will almost always + # intend the "does not exist" case. + # + # So, for now, we'll simply omit these properties from the dict. + # If we want to be more sophisticated in future, we could start + # analyzing the property's subschema, or we could perhaps + # customize Swagger's generated code so it can distinguish + # these two cases. + continue attr = self.attribute_map[attr] if isinstance(value, list): result[attr] = list(map( diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py index d4079f5e..79c353a5 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/build.py @@ -13,8 +13,7 @@ from dlpx.virtualization._internal import (codegen, exceptions, file_util, package_util, - plugin_dependency_util, plugin_util, - util_classes) + plugin_dependency_util, plugin_util) logger = logging.getLogger(__name__) @@ -54,10 +53,10 @@ def build(plugin_config, local_vsdk_root = os.path.expanduser(local_vsdk_root) # Read content of the plugin config file provided and perform validations - logger.info('Reading and validating plugin config file %s', plugin_config) + logger.info('Validating plugin config file %s', plugin_config) try: - result = plugin_util.read_and_validate_plugin_config_file( - plugin_config, not generate_only, False, skip_id_validation) + result = plugin_util.validate_plugin_config_file( + plugin_config, not generate_only, skip_id_validation) except exceptions.UserError as err: raise exceptions.BuildFailedError(err) @@ -69,11 +68,11 @@ def build(plugin_config, plugin_config, plugin_config_content['schemaFile']) # Read schemas from the file provided in the config and validate them - logger.info('Reading and validating schemas from %s', schema_file) + logger.info('Validating schemas from %s', schema_file) try: - result = plugin_util.read_and_validate_schema_file( - schema_file, not generate_only) + result = plugin_util.validate_schema_file(schema_file, + not generate_only) except exceptions.UserError as err: raise exceptions.BuildFailedError(err) @@ -113,17 +112,12 @@ def build(plugin_config, plugin_config_content, not generate_only, skip_id_validation) - except exceptions.UserError as err: + except (exceptions.UserError, exceptions.SDKToolingError) as err: raise exceptions.BuildFailedError(err) plugin_manifest = {} if result: plugin_manifest = result.plugin_manifest - if result.warnings: - warning_msg = util_classes.MessageUtils.warning_msg( - result.warnings) - logger.warn('{}\n{} Warning(s). {} Error(s).'.format( - warning_msg, len(result.warnings['warning']), 0)) # # Setup a build directory for the plugin in its root. Dependencies are @@ -165,7 +159,7 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): # This is the output dictionary that will be written # to the upload_artifact. # - return { + artifact = { # Hard code the type to a set default. 'type': TYPE, @@ -180,8 +174,6 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): plugin_config_content['id'].lower(), 'prettyName': plugin_config_content['name'], - 'version': - plugin_config_content['version'], # set default value of locale to en-us 'defaultLocale': plugin_config_content.get('defaultLocale', LOCALE_DEFAULT), @@ -192,6 +184,9 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): plugin_config_content['hostTypes'], 'entryPoint': plugin_config_content['entryPoint'], + 'buildNumber': + plugin_util.get_standardized_build_number( + plugin_config_content['buildNumber']), 'buildApi': package_util.get_build_api_version(), 'engineApi': @@ -216,6 +211,11 @@ def prepare_upload_artifact(plugin_config_content, src_dir, schemas, manifest): manifest } + if plugin_config_content.get('externalVersion'): + artifact['externalVersion'] = plugin_config_content['externalVersion'] + + return artifact + def get_linked_source_definition_type(plugin_config_content): if 'STAGED' == plugin_config_content['pluginType'].upper(): diff --git a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py index 459dd3a4..7d6d04b5 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/commands/initialize.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import logging @@ -10,8 +10,8 @@ import jinja2 import yaml -from dlpx.virtualization._internal import (codegen, exceptions, file_util, - plugin_util, util_classes) +from dlpx.virtualization._internal import (codegen, const, exceptions, + file_util, plugin_util) logger = logging.getLogger(__name__) @@ -23,6 +23,7 @@ DEFAULT_ENTRY_POINT_SYMBOL = 'plugin' DEFAULT_ENTRY_POINT = '{}:{}'.format(DEFAULT_ENTRY_POINT_FILE[:-3], DEFAULT_ENTRY_POINT_SYMBOL) +DEFAULT_BUILD_NUMBER = '0.1.0' # Internal constants for the template directory. ENTRY_POINT_TEMPLATE_NAME = 'entry_point.py.template' @@ -103,9 +104,8 @@ def init(root, ingestion_strategy, name, host_type): logger.info('Writing schema file at %s.', schema_file_path) shutil.copyfile(SCHEMA_TEMPLATE_PATH, schema_file_path) - # Read and valida the schema file - result = plugin_util.read_and_validate_schema_file( - schema_file_path, False) + # Validate the schema file. + result = plugin_util.validate_schema_file(schema_file_path, False) # Generate the definitions based on the schema file codegen.generate_python(name, src_dir_path, @@ -123,7 +123,8 @@ def init(root, ingestion_strategy, name, host_type): config = _get_default_plugin_config(plugin_id, ingestion_strategy, name, DEFAULT_ENTRY_POINT, DEFAULT_SRC_DIRECTORY, - DEFAULT_SCHEMA_FILE, host_type) + DEFAULT_SCHEMA_FILE, host_type, + DEFAULT_BUILD_NUMBER) yaml.dump(config, f, default_flow_style=False) # @@ -164,15 +165,15 @@ def _get_entry_point_contents(plugin_name, ingestion_strategy, host_type): template = env.get_template(ENTRY_POINT_TEMPLATE_NAME) - if host_type == util_classes.WINDOWS_HOST_TYPE: + if host_type == const.WINDOWS_HOST_TYPE: default_mount_path = "C:\\\\tmp\\\\dlpx_staged_mounts\\\\{}" - elif host_type == util_classes.UNIX_HOST_TYPE: + elif host_type == const.UNIX_HOST_TYPE: default_mount_path = "/tmp/dlpx_staged_mounts/{}" - if ingestion_strategy == util_classes.DIRECT_TYPE: + if ingestion_strategy == const.DIRECT_TYPE: linked_operations = env.get_template( DIRECT_OPERATIONS_TEMPLATE_NAME).render() - elif ingestion_strategy == util_classes.STAGED_TYPE: + elif ingestion_strategy == const.STAGED_TYPE: linked_operations = env.get_template( STAGED_OPERATIONS_TEMPLATE_NAME).render( default_mount_path=default_mount_path) @@ -188,7 +189,7 @@ def _get_entry_point_contents(plugin_name, ingestion_strategy, host_type): def _get_default_plugin_config(plugin_id, ingestion_strategy, name, entry_point, src_dir_path, schema_file_path, - host_type): + host_type, default_build_number): """ Returns a valid plugin configuration as an OrderedDict. @@ -209,12 +210,14 @@ def _get_default_plugin_config(plugin_id, ingestion_strategy, name, # Ensure values are type 'str'. If they are type unicode yaml prints # them with '!!python/unicode' prepended to the value. config = OrderedDict([('id', plugin_id.encode('utf-8')), - ('name', name.encode('utf-8')), ('version', '0.1.0'), + ('name', name.encode('utf-8')), ('language', 'PYTHON27'), ('hostTypes', ['UNIX']), ('pluginType', ingestion_strategy.encode('utf-8')), ('entryPoint', entry_point.encode('utf-8')), ('srcDir', src_dir_path.encode('utf-8')), ('schemaFile', schema_file_path.encode('utf-8')), - ('hostTypes', [host_type.encode('utf-8')])]) + ('hostTypes', [host_type.encode('utf-8')]), + ('buildNumber', default_build_number.encode('utf-8')) + ]) return config diff --git a/tools/src/main/python/dlpx/virtualization/_internal/const.py b/tools/src/main/python/dlpx/virtualization/_internal/const.py new file mode 100644 index 00000000..2022af51 --- /dev/null +++ b/tools/src/main/python/dlpx/virtualization/_internal/const.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2020 by Delphix. All rights reserved. +# + +import os + +UNIX_HOST_TYPE = 'UNIX' +WINDOWS_HOST_TYPE = 'WINDOWS' +STAGED_TYPE = 'STAGED' +DIRECT_TYPE = 'DIRECT' + +OUTPUT_DIR_NAME = '.dvp-gen-output' +PLUGIN_SCHEMAS_DIR = os.path.join(os.path.dirname(__file__), + 'validation_schemas') +PLUGIN_CONFIG_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, + 'plugin_config_schema.json') + +PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION = os.path.join( + PLUGIN_SCHEMAS_DIR, 'plugin_config_schema_no_id_validation.json') + +PLUGIN_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, 'plugin_schema.json') diff --git a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py index 8d2880ea..bb1e5f47 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/exceptions.py @@ -7,6 +7,21 @@ import re +class SDKToolingError(Exception): + """ + SDKBuildError is one of the main errors that gets caught in cli.py. Errors + that are not related to the user input should raise this error. The + message from this exception is posted to logger.error. message will be the + first arg that is passed in (for any exception that is extending it). + """ + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(SDKToolingError, self).__init__(message) + + class UserError(Exception): """ UserError is the main error that gets caught in cli.py. The message from @@ -254,3 +269,53 @@ def __init__(self, command, exit_code, output): "{} failed with exit code {}.").format( output, command, exit_code) super(SubprocessFailedError, self).__init__(message) + + +class ValidationFailedError(UserError): + """ + ValidationFailedError gets raised when validation fails on plugin config + and its contents. + Defines helpers methods to format warning and exception messages. + """ + def __init__(self, warnings): + message = self.__report_warnings_and_exceptions(warnings) + super(ValidationFailedError, self).__init__(message) + + @classmethod + def __report_warnings_and_exceptions(cls, warnings): + """ + Prints the warnings and errors that were found in the plugin code, if + the warnings dictionary contains the 'exception' key. + """ + exception_msg = cls.sdk_exception_msg(warnings) + exception_msg += cls.exception_msg(warnings) + exception_msg += '\n{}'.format(cls.warning_msg(warnings)) + return '{}\n{} Warning(s). {} Error(s).'.format( + exception_msg, len(warnings['warning']), + len(warnings['exception']) + len(warnings['sdk exception'])) + + @classmethod + def sdk_exception_msg(cls, warnings): + sdk_exception_msg = '\n'.join([ + cls.__format_msg('SDK Error', ex) + for ex in warnings['sdk exception'] + ]) + return sdk_exception_msg + + @classmethod + def exception_msg(cls, exceptions): + exception_msg = '\n'.join( + cls.__format_msg('Error', ex) for ex in exceptions['exception']) + return exception_msg + + @classmethod + def warning_msg(cls, warnings): + warning_msg = '\n'.join( + cls.__format_msg('Warning', warning) + for warning in warnings['warning']) + return warning_msg + + @staticmethod + def __format_msg(msg_type, msg): + msg_str = "{}: {}".format(msg_type, msg) + return msg_str diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py index 0df69610..9f58feb1 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_importer.py @@ -1,25 +1,24 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import importlib -import inspect import logging import os import sys -from collections import defaultdict +from collections import defaultdict, namedtuple from multiprocessing import Process, Queue import yaml -from dlpx.virtualization._internal import exceptions, util_classes -from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE -from dlpx.virtualization._internal.util_classes import MessageUtils -from flake8.api import legacy as flake8 +from dlpx.virtualization._internal import const, exceptions +from dlpx.virtualization.platform import import_util logger = logging.getLogger(__name__) -PLUGIN_IMPORTER_YAML = os.path.join(util_classes.PLUGIN_SCHEMAS_DIR, +PLUGIN_IMPORTER_YAML = os.path.join(const.PLUGIN_SCHEMAS_DIR, 'plugin_importer.yaml') +validation_result = namedtuple('validation_result', ['plugin_manifest']) + def load_validation_maps(): """ @@ -33,19 +32,15 @@ def load_validation_maps(): class PluginImporter: """ Import helper class for the plugin. Imports the plugin module in a sub - process to ensure its isolated and does not pollute caller's runtime. + process to ensure it's isolated and does not pollute caller's runtime. On successful import, callers can get the manifest describing what methods are implemented in the plugin code. If import fails or has issues with validation of module content and entry points- will save errors/warnings in a dict that callers can access. """ - validation_maps = load_validation_maps() - expected_staged_args_by_op = validation_maps['EXPECTED_STAGED_ARGS_BY_OP'] - expected_direct_args_by_op = validation_maps['EXPECTED_DIRECT_ARGS_BY_OP'] - required_methods_by_plugin_type = \ - validation_maps['REQUIRED_METHODS_BY_PLUGIN_TYPE'] - required_methods_description = \ - validation_maps['REQUIRED_METHODS_DESCRIPTION'] + v_maps = load_validation_maps() + required_methods_by_plugin_type = v_maps['REQUIRED_METHODS_BY_PLUGIN_TYPE'] + required_methods_description = v_maps['REQUIRED_METHODS_DESCRIPTION'] def __init__(self, src_dir, @@ -58,41 +53,39 @@ def __init__(self, self.__plugin_entry_point = entry_point self.__plugin_type = plugin_type self.__validate = validate + self.__post_import_checks = [self.__check_for_required_methods] + + @property + def result(self): + return validation_result(plugin_manifest=self.__plugin_manifest) - def import_plugin(self): + def validate_plugin_module(self): """ - Imports the plugin module, does basic validation. + Imports the plugin module, does post import validation. Returns: plugin manifest - dict describing methods implemented in the plugin - Note: - warnings - dict containing a list of errors or warnings can be - obtained by the caller via warnings property. + is available to callers via the result property. + NOTE: + Importing module in the current context pollutes the runtime of + the caller, in this case dvp. If the module being imported, for + e.g. contains code that adds a handler to the root logger at + import time, this can cause issues with logging in this code and + callers of validator. To avoid such issues, perform the import in + in a sub-process and on completion return the output. """ logger.debug('Importing plugin module : %s', self.__plugin_module) + self.__plugin_manifest, warnings = self.__internal_import() + self.__run_checks(warnings) - self.__pre_import_checks() - plugin_manifest, warnings = self.__import_plugin() - self.__post_import_checks(plugin_manifest, warnings) - - return plugin_manifest, warnings - - def __pre_import_checks(self): - """ - Performs checks of the plugin code that should take place prior to - importing. - """ - warnings = PluginImporter.__check_for_undefined_names(self.__src_dir) - PluginImporter.__report_warnings_and_exceptions(warnings) - - def __import_plugin(self): + def __internal_import(self): """ - Imports the module to check for errors or issues. Also does an eval on - the entry point. + Imports the module in a sub-process to check for errors or issues. + Also does an eval on the entry point. """ plugin_manifest = {} warnings = defaultdict(list) try: - plugin_manifest, warnings = (PluginImporter.__import_in_subprocess( + plugin_manifest, warnings = (self.__import_in_subprocess( self.__src_dir, self.__plugin_module, self.__plugin_entry_point, self.__plugin_type, self.__validate)) @@ -107,40 +100,6 @@ def __import_plugin(self): return plugin_manifest, warnings - def __post_import_checks(self, plugin_manifest, warnings): - """ - Performs checks of the plugin code that should take place after - importing. - """ - check_warnings = self.__check_for_required_methods( - plugin_manifest, self.__plugin_type) - - if check_warnings and 'warning' in check_warnings: - warnings['warning'].extend(check_warnings['warning']) - - self.__report_warnings_and_exceptions(warnings) - - @staticmethod - def __check_for_required_methods(plugin_manifest, plugin_type): - """ - Checks for required methods in the manifest and adds warnings for any - missing methods. - """ - warnings = defaultdict(list) - if not plugin_manifest: - return warnings - for method_key, method_name in \ - PluginImporter.required_methods_by_plugin_type[ - plugin_type].items(): - if plugin_manifest[method_key] is False: - warnings['warning'].append( - 'Implementation missing ' - 'for required method {}. The Plugin Operation \'{}\' ' - 'will fail when executed.'.format( - method_name, PluginImporter. - required_methods_description[method_key])) - return warnings - @staticmethod def __import_in_subprocess(src_dir, module, entry_point, plugin_type, validate): @@ -177,121 +136,152 @@ def __parse_queue(queue): return manifest, warnings - @staticmethod - def __check_for_undefined_names(src_dir): + def __run_checks(self, warnings): """ - Checks the plugin module for undefined names. This catches - missing imports, references to nonexistent variables, etc. - - ..note:: - We are using the legacy flake8 api, because there is currently - no public, stable api for flake8 >= 3.0.0 - - For more info, see - https://flake8.pycqa.org/en/latest/user/python-api.html + Performs checks of the plugin code that should take place after + importing. """ - warnings = defaultdict(list) - exclude_dir = os.path.sep.join([src_dir, CODEGEN_PACKAGE]) - style_guide = flake8.get_style_guide(select=["F821"], - exclude=[exclude_dir], - quiet=1) - style_guide.check_files(paths=[src_dir]) - file_checkers = style_guide._application.file_checker_manager.checkers - for checker in file_checkers: - for result in checker.results: - # From the api code, result is a tuple defined as: error = - # (error_code, line_number, column, text, physical_line) - if result[0] == 'F821': - msg = "{} on line {} in {}".format(result[3], result[1], - checker.filename) - warnings['exception'].append(exceptions.UserError(msg)) - - return warnings - - @staticmethod - def __report_warnings_and_exceptions(warnings): + for check in self.__post_import_checks: + check_warnings = check() + if check_warnings and 'warning' in check_warnings: + warnings['warning'].extend(check_warnings['warning']) + + if warnings: + if 'exception' in warnings: + raise exceptions.ValidationFailedError(warnings) + if 'sdk exception' in warnings: + sdk_exception_msg =\ + exceptions.ValidationFailedError(warnings).message + raise exceptions.SDKToolingError(sdk_exception_msg) + + if 'warning' in warnings: + # + # Use the ValidationFailedError type to get a formatted message + # with number of warnings included in the message. + # + warning_msg = exceptions.ValidationFailedError( + warnings).message + logger.warn(warning_msg) + + def __check_for_required_methods(self): """ - Prints the warnings and errors that were found in the plugin code, if - the warnings dictionary contains the 'exception' key. + Checks for required methods in the manifest and adds warnings for any + missing methods. """ - if warnings and 'exception' in warnings: - exception_msg = MessageUtils.exception_msg(warnings) - exception_msg += '\n{}'.format(MessageUtils.warning_msg(warnings)) - raise exceptions.UserError( - '{}\n{} Warning(s). {} Error(s).'.format( - exception_msg, len(warnings['warning']), - len(warnings['exception']))) + warnings = defaultdict(list) + if not self.__plugin_manifest: + return warnings + for method_key, method_name in \ + PluginImporter.required_methods_by_plugin_type[ + self.__plugin_type].items(): + if self.__plugin_manifest[method_key] is False: + warnings['warning'].append( + 'Implementation missing ' + 'for required method {}. The Plugin Operation \'{}\' ' + 'will fail when executed.'.format( + method_name, PluginImporter. + required_methods_description[method_key])) + return warnings def _get_manifest(queue, src_dir, module, entry_point, plugin_type, validate): - manifest = {} + """ + Imports the plugin module, runs validations and returns the manifest. + """ + module_content = None + + try: + module_content = _import_helper(queue, src_dir, module) + except exceptions.UserError: + # + # Exception here means there was an error importing the module and + # queue is updated with the exception details inside _import_helper. + # + return + + # + # Create an instance of plugin module with associated state to pass around + # to the validation code. + # + plugin_module = import_util.PluginModule(src_dir, module, entry_point, + plugin_type, module_content, + PluginImporter.v_maps, validate) + + # Validate if the module imported fine and is the expected one. + warnings = import_util.validate_import(plugin_module) + _process_warnings(queue, warnings) + + # If the import itself had issues, no point validating further. + if warnings and len(warnings) > 0: + return + + # Run post import validations and consolidate issues. + warnings = import_util.validate_post_import(plugin_module) + _process_warnings(queue, warnings) + + manifest = _prepare_manifest(entry_point, module_content) + queue.put({'manifest': manifest}) + + +def _import_helper(queue, src_dir, module): + """Helper method to import the module and handle any import time + exceptions. + """ + module_content = None sys.path.append(src_dir) + try: module_content = importlib.import_module(module) - manifest = _validate_and_get_manifest(module, module_content, - entry_point) - - if validate: + except (ImportError, TypeError) as err: + queue.put({'exception': err}) + except Exception as err: + # + # We need to figure out if this is an error that was raised inside the + # wrappers which would mean that it is a user error. Otherwise we + # should still queue the error but specify that it's not a user error. + # + parent_class_list = [base.__name__ for base in err.__class__.__bases__] + if 'PlatformError' in parent_class_list: + # This is a user error + error = exceptions.UserError(err.message) + queue.put({'exception': error}) + else: # - # Validated methods args against expected args and add any - # resulting warnings to the queue for caller to process. - # These warnings should be treated as an exception to make - # sure build fails. + # Because we don't know if the output of the err is actually in the + # message, we just cast the exception to a string and hope to get + # the most information possible. # - warnings = _validate_named_args(module_content, entry_point, - plugin_type) - if warnings: - map(lambda warning: queue.put({'exception': warning}), - warnings) - except ImportError as err: - queue.put({'exception': err}) - except exceptions.UserError as user_err: - queue.put({'exception': user_err}) - except RuntimeError as rt_err: - queue.put({'exception': rt_err}) + error = exceptions.SDKToolingError(str(err)) + queue.put({'sdk exception': error}) finally: sys.path.remove(src_dir) - queue.put({'manifest': manifest}) + if not module_content: + raise exceptions.UserError("Plugin module content is None") + + return module_content + +def _process_warnings(queue, warnings): + for warning in warnings: + queue.put({'exception': warning}) -def _validate_and_get_manifest(module, module_content, entry_point): + +def _prepare_manifest(entry_point, module_content): """ Creates a plugin manifest indicating which plugin operations have been implemented by a plugin developer. Plugin_module_content is a module object which must have plugin_entry_point_name as one of its attributes. Args: - module: name of the module imported - module_content: plugin module content from import entry_point: name of entry point to the above plugin module + module_content: plugin module content from import Returns: dict: dictionary that represents plugin's manifest """ - # This should never happen and if it does, flag a run time error. - if module_content is None: - raise RuntimeError('Plugin module content is None.') - - # - # Schema validation on plugin config file would have ensured entry - # is a string and should never happen its none - so raise a run time - # error if it does. - # - if entry_point is None: - raise RuntimeError('Plugin entry point object is None.') - - if not hasattr(module_content, entry_point): - raise exceptions.UserError( - 'Entry point \'{}:{}\' does not exist. \'{}\' is not a symbol' - ' in module \'{}\'.'.format(module, entry_point, entry_point, - module)) plugin_object = getattr(module_content, entry_point) - if plugin_object is None: - raise exceptions.UserError('Plugin object retrieved from the entry' - ' point {} is None'.format(entry_point)) - # Check which methods on the plugin object have been implemented. manifest = { 'type': @@ -333,65 +323,9 @@ def _validate_and_get_manifest(module, module_content, entry_point): 'hasVirtualStatus': bool(plugin_object.virtual.status_impl), 'hasInitialize': - bool(plugin_object.virtual.initialize_impl) + bool(plugin_object.virtual.initialize_impl), + 'migrationIdList': + plugin_object.upgrade.migration_id_list } return manifest - - -def _validate_named_args(module_content, entry_point, plugin_type): - """ - Does named argument validation based on the plugin type. - """ - warnings = [] - - plugin_object = getattr(module_content, entry_point) - - # Iterate over attributes objects of the Plugin object - for plugin_attrib in plugin_object.__dict__.values(): - # - # For each plugin attribute object, its __dict__.keys will give - # us the name of the plugin implemntation method name. That name - # is useful in looking up named arguments expected and what is - # actually in the plugin code. And plugin_op_type can be, for e.g. - # LinkedOperations, DiscoveryOperations, VirtualOperations - # - plugin_op_type = plugin_attrib.__class__.__name__ - for op_name_key, op_name in plugin_attrib.__dict__.items(): - if op_name is None: - continue - actual_args = inspect.getargspec(op_name) - warnings.extend( - _check_args(method_name=op_name.__name__, - expected_args=_lookup_expected_args( - plugin_type, plugin_op_type, op_name_key), - actual_args=actual_args.args)) - - return warnings - - -def _check_args(method_name, expected_args, actual_args): - warnings = [] - - if len(expected_args) != len(actual_args): - warnings.append('Number of arguments do not match in method {}.' - ' Expected: {}, Found: {}.'.format( - method_name, list(expected_args), - str(actual_args))) - - if not all(arg in expected_args for arg in actual_args): - warnings.append('Named argument mismatch in method {}.' - ' Expected: {}, Found: {}.'.format( - method_name, list(expected_args), - str(actual_args))) - - return warnings - - -def _lookup_expected_args(plugin_type, plugin_op_type, plugin_op_name): - if plugin_type == util_classes.DIRECT_TYPE: - return PluginImporter.expected_direct_args_by_op[plugin_op_type][ - plugin_op_name] - else: - return PluginImporter.expected_staged_args_by_op[plugin_op_type][ - plugin_op_name] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py index 8ef97776..1a210423 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_util.py @@ -1,22 +1,50 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # +import enum import logging import os +from contextlib import contextmanager -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions, file_util +from dlpx.virtualization._internal.plugin_importer import PluginImporter from dlpx.virtualization._internal.plugin_validator import PluginValidator from dlpx.virtualization._internal.schema_validator import SchemaValidator -from dlpx.virtualization._internal.util_classes import ValidationMode logger = logging.getLogger(__name__) -def read_and_validate_plugin_config_file(plugin_config, - stop_build, - run_all_validations, - skip_id_validation=False): +class ValidationMode(enum.Enum): + """ + Defines the validation mode that validator uses. + INFO - validator will give out info messages if validation fails. + WARNING - validator will log a warning if validation fails. + ERROR - validator will raise an exception if validation fails. + """ + INFO = 1 + WARNING = 2 + ERROR = 3 + + +@contextmanager +def validate_error_handler(plugin_file, validation_mode): + try: + yield + except Exception as e: + if validation_mode is ValidationMode.INFO: + logger.info('Validation failed on plugin file %s : %s', + plugin_file, e) + elif validation_mode is ValidationMode.WARNING: + logger.warning('Validation failed on plugin file %s : %s', + plugin_file, e) + else: + raise e + + +def validate_plugin_config_file(plugin_config, + stop_build, + skip_id_validation=False): """ Reads a plugin config file and validates the contents using a pre-defined schema. If stop_build is True, will report exception @@ -27,12 +55,14 @@ def read_and_validate_plugin_config_file(plugin_config, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = ( - util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) - validator = PluginValidator(plugin_config, plugin_config_schema_file, - validation_mode, run_all_validations) - validator.validate() + plugin_config_schema_file = (const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION + if skip_id_validation else + const.PLUGIN_CONFIG_SCHEMA) + validator = PluginValidator(plugin_config, plugin_config_schema_file) + + with validate_error_handler(plugin_config, validation_mode): + validator.validate_plugin_config() + return validator.result @@ -48,18 +78,22 @@ def get_plugin_manifest(plugin_config_file, """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - plugin_config_schema_file = ( - util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION - if skip_id_validation else util_classes.PLUGIN_CONFIG_SCHEMA) - validator = PluginValidator.from_config_content(plugin_config_file, - plugin_config_content, - plugin_config_schema_file, - validation_mode) - validator.validate() - return validator.result + src_dir = file_util.get_src_dir_path(plugin_config_file, + plugin_config_content['srcDir']) + entry_point_module, entry_point_object = PluginValidator.split_entry_point( + plugin_config_content['entryPoint']) + plugin_type = plugin_config_content['pluginType'] + + importer = PluginImporter(src_dir, entry_point_module, entry_point_object, + plugin_type, True) + with validate_error_handler(plugin_config_file, validation_mode): + importer.validate_plugin_module() -def read_and_validate_schema_file(schema_file, stop_build): + return importer.result + + +def validate_schema_file(schema_file, stop_build): """ Reads a plugin schema file and validates the contents using a pre-defined schema. If stop_build is True, will report exception @@ -69,9 +103,11 @@ def read_and_validate_schema_file(schema_file, stop_build): """ validation_mode = (ValidationMode.ERROR if stop_build else ValidationMode.WARNING) - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - validation_mode) - validator.validate() + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) + + with validate_error_handler(schema_file, validation_mode): + validator.validate() + return validator.result @@ -79,8 +115,7 @@ def get_plugin_config_property(plugin_config_path, prop): """ Returns the value for a specific property from the plugin config file. """ - result = read_and_validate_plugin_config_file(plugin_config_path, False, - False) + result = validate_plugin_config_file(plugin_config_path, False, False) return result.plugin_config_content[prop] @@ -97,3 +132,23 @@ def get_schema_file_path(plugin_config, schema_file): if not os.path.isfile(schema_file): raise exceptions.PathTypeError(schema_file, 'file') return os.path.normpath(schema_file) + + +def get_standardized_build_number(build_number): + """ + Converts the build number the way back end expects it to be - without + leading or trailing zeros in each part of the multi part build number that + is separated by dots. + """ + # Split on the period and convert to integer + array = [int(i) for i in build_number.split('.')] + + # Next we want to trim all trailing zeros so ex: 5.3.0.0 == 5.3 + while array: + if not array[-1]: + # Remove the last element which is a zero from array + array.pop() + else: + break + + return '.'.join(str(i) for i in array) diff --git a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py index 09c0e0b2..46c46435 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/plugin_validator.py @@ -8,16 +8,14 @@ from collections import defaultdict, namedtuple import yaml -from dlpx.virtualization._internal import (exceptions, file_util, - plugin_importer) -from dlpx.virtualization._internal.util_classes import ValidationMode +from dlpx.virtualization._internal import exceptions +from dlpx.virtualization._internal.codegen import CODEGEN_PACKAGE +from flake8.api import legacy as flake8 from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple( - 'validation_result', - ['plugin_config_content', 'plugin_manifest', 'warnings']) +validation_result = namedtuple('validation_result', ['plugin_config_content']) class PluginValidator: @@ -29,33 +27,30 @@ class PluginValidator: config, content of the python module specified in in the pluginEntryPoint and also name of the plugin entry point in the module. If validation fails or has issues - will report exception - back if validation mode is error, otherwise warnings or info based - on validation mode. + back. """ def __init__(self, plugin_config, plugin_config_schema, - validation_mode, - run_all_validations, plugin_config_content=None): self.__plugin_config = plugin_config self.__plugin_config_schema = plugin_config_schema - self.__validation_mode = validation_mode - self.__run_all_validations = run_all_validations self.__plugin_config_content = plugin_config_content self.__plugin_manifest = None - self.__warnings = defaultdict(list) + self.__pre_import_checks = [ + self.__validate_plugin_config_content, + self.__validate_plugin_entry_point, + self.__check_for_undefined_names + ] @property def result(self): return validation_result( - plugin_config_content=self.__plugin_config_content, - plugin_manifest=self.__plugin_manifest, - warnings=self.__warnings) + plugin_config_content=self.__plugin_config_content) @classmethod def from_config_content(cls, plugin_config_file, plugin_config_content, - plugin_config_schema, validation_mode): + plugin_config_schema): """ Instantiates the validator with given plugin config content. plugin_config_file path is not read but used to get the absolute @@ -63,56 +58,27 @@ def from_config_content(cls, plugin_config_file, plugin_config_content, Returns: PluginValidator """ - return cls(plugin_config_file, plugin_config_schema, validation_mode, - True, plugin_config_content) + return cls(plugin_config_file, plugin_config_schema, + plugin_config_content) - def validate(self): - """ - Validates the plugin config file. - """ - logger.debug('Run config validations') - try: - self.__run_validations() - except Exception as e: - if self.__validation_mode is ValidationMode.INFO: - logger.info('Validation failed on plugin config file : %s', e) - elif self.__validation_mode is ValidationMode.WARNING: - logger.warning('Validation failed on plugin config file : %s', - e) - else: - raise e - - def __run_validations(self): + def validate_plugin_config(self): """ Reads a plugin config file and validates the contents using a - pre-defined schema. If validation is successful, tries to import - the plugin module and validates the entry point specified. + pre-defined schema. """ - logger.info('Reading plugin config file %s', self.__plugin_config) - if self.__plugin_config_content is None: self.__plugin_config_content = self.__read_plugin_config_file() logger.debug('Validating plugin config file content : %s', self.__plugin_config_content) - self.__validate_plugin_config_content() - - if not self.__run_all_validations: - logger.debug('Plugin config file schema validation is done') - return - - src_dir = file_util.get_src_dir_path( - self.__plugin_config, self.__plugin_config_content['srcDir']) - - logger.debug('Validating plugin entry point : %s', - self.__plugin_config_content['entryPoint']) - self.__validate_plugin_entry_point(src_dir) + self.__run_checks() def __read_plugin_config_file(self): """ Reads a plugin config file and raises UserError if there is an issue reading the file. """ + logger.info('Reading plugin config file %s', self.__plugin_config) try: with open(self.__plugin_config, 'rb') as f: try: @@ -133,6 +99,18 @@ def __read_plugin_config_file(self): '\nError code: {}. Error message: {}'.format( self.__plugin_config, err.errno, os.strerror(err.errno))) + def __run_checks(self): + """ + Runs validations on the plugin config content and raise exceptions + if any. + """ + # + # All the pre-import checks need to happen in sequence. So no point + # validating further if a check fails. + # + for check in self.__pre_import_checks: + check() + def __validate_plugin_config_content(self): """ Validates the given plugin configuration is valid. @@ -192,51 +170,55 @@ def __validate_plugin_config_content(self): raise exceptions.SchemaValidationError(self.__plugin_config, validation_errors) - def __validate_plugin_entry_point(self, src_dir): + def __validate_plugin_entry_point(self): """ Validates the plugin entry point by parsing the entry - point to get module and entry point. Imports the module - to check for errors or issues. Also does an eval on the - entry point. + point to get module and entry point. """ - entry_point_field = self.__plugin_config_content['entryPoint'] - entry_point_strings = entry_point_field.split(':') - # Get the module and entry point name to import - entry_point_module = entry_point_strings[0] - entry_point_object = entry_point_strings[1] - plugin_type = self.__plugin_config_content['pluginType'] + entry_point_module, entry_point_object = self.split_entry_point( + self.__plugin_config_content['entryPoint']) - try: - self.__plugin_manifest, self.__warnings = ( - PluginValidator.__import_plugin(src_dir, entry_point_module, - entry_point_object, - plugin_type)) - except ImportError as err: - raise exceptions.UserError( - 'Unable to load module \'{}\' specified in ' - 'pluginEntryPoint \'{}\' from path \'{}\'. ' - 'Error message: {}'.format(entry_point_module, - entry_point_object, src_dir, err)) + if not entry_point_module: + raise exceptions.UserError('Plugin module is invalid') - logger.debug("Got manifest %s", self.__plugin_manifest) + if not entry_point_object: + raise exceptions.UserError('Plugin object is invalid') - @staticmethod - def __import_plugin(src_dir, entry_point_module, entry_point_object, - plugin_type): + def __check_for_undefined_names(self): """ - Imports the given python module. - NOTE: - Importing module in the current context pollutes the runtime of - the caller, in this case dvp. If the module being imported, for - e.g. contains code that adds a handler to the root logger at - import time, this can cause issues with logging in this code and - callers of validator. To avoid such issues, perform the import in - in a sub-process and on completion return the output. + Checks the plugin module for undefined names. This catches + missing imports, references to nonexistent variables, etc. + + ..note:: + We are using the legacy flake8 api, because there is currently + no public, stable api for flake8 >= 3.0.0 + + For more info, see + https://flake8.pycqa.org/en/latest/user/python-api.html """ - importer = plugin_importer.PluginImporter(src_dir, entry_point_module, - entry_point_object, - plugin_type, True) - manifest, warnings = importer.import_plugin() + warnings = defaultdict(list) + src_dir = self.__plugin_config_content['srcDir'] + exclude_dir = os.path.sep.join([src_dir, CODEGEN_PACKAGE]) + style_guide = flake8.get_style_guide(select=["F821"], + exclude=[exclude_dir], + quiet=1) + style_guide.check_files(paths=[src_dir]) + file_checkers = style_guide._application.file_checker_manager.checkers + + for checker in file_checkers: + for result in checker.results: + # From the api code, result is a tuple defined as: error = + # (error_code, line_number, column, text, physical_line) + if result[0] == 'F821': + msg = "{} on line {} in {}".format(result[3], result[1], + checker.filename) + warnings['exception'].append(exceptions.UserError(msg)) + + if warnings and len(warnings) > 0: + raise exceptions.ValidationFailedError(warnings) - return manifest, warnings + @staticmethod + def split_entry_point(entry_point): + entry_point_strings = entry_point.split(':') + return entry_point_strings[0], entry_point_strings[1] diff --git a/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py b/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py index e66241c7..46354fce 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py +++ b/tools/src/main/python/dlpx/virtualization/_internal/schema_validator.py @@ -5,16 +5,14 @@ import json import logging import os -from collections import defaultdict, namedtuple +from collections import namedtuple from dlpx.virtualization._internal import exceptions -from dlpx.virtualization._internal.util_classes import ValidationMode from jsonschema import Draft7Validator logger = logging.getLogger(__name__) -validation_result = namedtuple('validation_result', - ['plugin_schemas', 'warnings']) +validation_result = namedtuple('validation_result', ['plugin_schemas']) class SchemaValidator: @@ -24,42 +22,18 @@ class SchemaValidator: Returns: On successful validation, callers can get the content of the plugin schemas. If validation fails or has issues - will report exception - back if validation mode is error, otherwise warnings or info based - on validation mode. + back. """ - def __init__(self, - schema_file, - plugin_meta_schema, - validation_mode, - schemas=None): + def __init__(self, schema_file, plugin_meta_schema, schemas=None): self.__schema_file = schema_file self.__plugin_meta_schema = plugin_meta_schema - self.__validation_mode = validation_mode self.__plugin_schemas = schemas - self.__warnings = defaultdict(list) @property def result(self): - return validation_result(plugin_schemas=self.__plugin_schemas, - warnings=self.__warnings) + return validation_result(plugin_schemas=self.__plugin_schemas) def validate(self): - """ - Validates the plugin schema file. - """ - logger.debug('Run schema validations') - try: - self.__run_validations() - except Exception as e: - if self.__validation_mode is ValidationMode.INFO: - logger.info('Validation failed on plugin schema file : %s', e) - elif self.__validation_mode is ValidationMode.WARNING: - logger.warning('Validation failed on plugin schema file : %s', - e) - else: - raise e - - def __run_validations(self): """ Reads a plugin schema file and validates the contents using a pre-defined schema. diff --git a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg index d6373bb9..54a8c453 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg +++ b/tools/src/main/python/dlpx/virtualization/_internal/settings.cfg @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # # @@ -20,7 +20,7 @@ # versions in those packages until they are shipped out of band. # [General] -engine_api_version = 1.10.5 +engine_api_version = 1.11.2 distribution_name = dvp-tools package_author = Delphix namespace_package = dlpx diff --git a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py b/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py deleted file mode 100644 index deb2da64..00000000 --- a/tools/src/main/python/dlpx/virtualization/_internal/util_classes.py +++ /dev/null @@ -1,63 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# - -import enum -import os - -UNIX_HOST_TYPE = 'UNIX' -WINDOWS_HOST_TYPE = 'WINDOWS' -STAGED_TYPE = 'STAGED' -DIRECT_TYPE = 'DIRECT' - -OUTPUT_DIR_NAME = '.dvp-gen-output' -PLUGIN_SCHEMAS_DIR = os.path.join(os.path.dirname(__file__), - 'validation_schemas') -PLUGIN_CONFIG_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, - 'plugin_config_schema.json') - -# -# This is a temporary file. Once blackbox has made the transition to 'id' -# instead of 'name' and uses UUIDs for the id, this, and everything -# associated with it can be removed. -# -PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION = os.path.join( - PLUGIN_SCHEMAS_DIR, 'plugin_config_schema_no_id_validation.json') - -PLUGIN_SCHEMA = os.path.join(PLUGIN_SCHEMAS_DIR, 'plugin_schema.json') - - -class ValidationMode(enum.Enum): - """ - Defines the validation mode that validator uses. - INFO - validator will give out info messages if validation fails. - WARNING - validator will log a warning if validation fails. - ERROR - validator will raise an exception if validation fails. - """ - INFO = 1 - WARNING = 2 - ERROR = 3 - - -class MessageUtils: - """ - Defines helpers methods to format warning and exception messages. - """ - @staticmethod - def exception_msg(exceptions): - exception_msg = '\n'.join( - MessageUtils.__format_msg('Error', ex) - for ex in exceptions['exception']) - return exception_msg - - @staticmethod - def warning_msg(warnings): - warning_msg = '\n'.join( - MessageUtils.__format_msg('Warning', warning) - for warning in warnings['warning']) - return warning_msg - - @staticmethod - def __format_msg(msg_type, msg): - msg_str = "{}: {}".format(msg_type, msg) - return msg_str diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json index 35a6ba1b..5d49e478 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema.json @@ -8,9 +8,8 @@ "name": { "type": "string" }, - "version": { - "type": "string", - "pattern": "^([0-9]+)\\.([0-9]+)\\.([a-zA-Z0-9_]+)$" + "externalVersion": { + "type": "string" }, "hostTypes": { "type": "array", @@ -47,8 +46,12 @@ "defaultLocale": { "type": "string", "default": "en-us" + }, + "buildNumber": { + "type": "string", + "pattern": "^([0-9]+\\.)*[0-9]*[1-9][0-9]*(\\.[0-9]+)*$" } }, "additionalProperties": false, - "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language"] + "required": ["id", "name", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] } diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json index 504d3837..703382fa 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_config_schema_no_id_validation.json @@ -7,9 +7,8 @@ "name": { "type": "string" }, - "version": { - "type": "string", - "pattern": "^([0-9]+)\\.([0-9]+)\\.([a-zA-Z0-9_]+)$" + "externalVersion": { + "type": "string" }, "hostTypes": { "type": "array", @@ -46,8 +45,12 @@ "defaultLocale": { "type": "string", "default": "en-us" + }, + "buildNumber": { + "type": "string", + "pattern": "^([0-9]+\\.)*[0-9]*[1-9][0-9]*(\\.[0-9]+)*$" } }, "additionalProperties": false, - "required": ["id", "name", "version", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language"] + "required": ["id", "name", "hostTypes", "entryPoint", "srcDir", "schemaFile", "pluginType", "language", "buildNumber"] } diff --git a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml index 2d5b0f99..063877ad 100644 --- a/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml +++ b/tools/src/main/python/dlpx/virtualization/_internal/validation_schemas/plugin_importer.yaml @@ -139,23 +139,35 @@ EXPECTED_DIRECT_ARGS_BY_OP: - virtual_source - repository +EXPECTED_UPGRADE_ARGS: + repository_id_to_impl: + - old_repository + source_config_id_to_impl: + - old_source_config + linked_source_id_to_impl: + - old_linked_source + virtual_source_id_to_impl: + - old_virtual_source + snapshot_id_to_impl: + - old_snapshot + REQUIRED_METHODS_BY_PLUGIN_TYPE: DIRECT: - hasRepositoryDiscovery: discovery.repository(), - hasSourceConfigDiscovery: discovery.source_config(), - hasLinkedPostSnapshot: linked.post_snapshot(), - hasVirtualConfigure: virtual.configure(), - hasVirtualReconfigure: virtual.reconfigure(), - hasVirtualPostSnapshot: virtual.post_snapshot(), + hasRepositoryDiscovery: discovery.repository() + hasSourceConfigDiscovery: discovery.source_config() + hasLinkedPostSnapshot: linked.post_snapshot() + hasVirtualConfigure: virtual.configure() + hasVirtualReconfigure: virtual.reconfigure() + hasVirtualPostSnapshot: virtual.post_snapshot() hasVirtualMountSpecification: virtual.mount_specification() STAGED: - hasRepositoryDiscovery: discovery.repository(), - hasSourceConfigDiscovery: discovery.source_config(), - hasLinkedPostSnapshot: linked.post_snapshot(), - hasLinkedMountSpecification: linked.mount_specification(), - hasVirtualConfigure: virtual.configure(), - hasVirtualReconfigure: virtual.reconfigure(), - hasVirtualPostSnapshot: virtual.post_snapshot(), + hasRepositoryDiscovery: discovery.repository() + hasSourceConfigDiscovery: discovery.source_config() + hasLinkedPostSnapshot: linked.post_snapshot() + hasLinkedMountSpecification: linked.mount_specification() + hasVirtualConfigure: virtual.configure() + hasVirtualReconfigure: virtual.reconfigure() + hasVirtualPostSnapshot: virtual.post_snapshot() hasVirtualMountSpecification: virtual.mount_specification() REQUIRED_METHODS_DESCRIPTION: diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py index 96c92098..64939dca 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_build.py @@ -1,14 +1,14 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json import os import yaml -from dlpx.virtualization._internal import exceptions, util_classes +from dlpx.virtualization._internal import const, exceptions from dlpx.virtualization._internal.commands import build -from dlpx.virtualization._internal.plugin_validator import PluginValidator +from dlpx.virtualization._internal.plugin_importer import PluginImporter import mock import pytest @@ -58,8 +58,8 @@ def test_build_success(mock_relative_path, mock_install_deps, @staticmethod @pytest.mark.parametrize('artifact_filename', ['somefile.json']) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) @mock.patch('dlpx.virtualization._internal.codegen.generate_python') @mock.patch( @@ -342,8 +342,8 @@ def test_zip_and_encode_source_files_encode_fail(mock_encode, src_dir): ''.format(src_dir, 'something')) @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) @mock.patch( 'dlpx.virtualization._internal.plugin_dependency_util.install_deps') @@ -359,8 +359,8 @@ def test_id_validation_positive(mock_relative_path, mock_install_deps, skip_id_validation) @staticmethod - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', + @mock.patch.object(PluginImporter, + '_PluginImporter__internal_import', return_value=({}, None)) @pytest.mark.parametrize('plugin_id', ['mongo']) def test_id_validation_negative(mock_import_plugin, plugin_config_file, @@ -628,13 +628,27 @@ def test_manual_discovery_parameter(plugin_config_content, src_dir, @staticmethod def test_plugin_config_schemas_diff(): - with open(util_classes.PLUGIN_CONFIG_SCHEMA) as f: + with open(const.PLUGIN_CONFIG_SCHEMA) as f: config_schema = json.load(f) - with open(util_classes.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION) as f: + with open(const.PLUGIN_CONFIG_SCHEMA_NO_ID_VALIDATION) as f: config_schema_no_id = json.load(f) # Only the id's pattern should be different so remove it. config_schema['properties']['id'].pop('pattern') assert config_schema == config_schema_no_id + + @staticmethod + @pytest.mark.parametrize('build_number, expected', [ + pytest.param('0.0.1', '0.0.1'), + pytest.param('0.1.0', '0.1'), + pytest.param('1.0.01.0', '1.0.1') + ]) + def test_build_number_parameter(plugin_config_content, src_dir, + schema_content, expected): + + upload_artifact = build.prepare_upload_artifact( + plugin_config_content, src_dir, schema_content, {}) + + assert expected == upload_artifact['buildNumber'] diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py index c61b9a92..adca36ce 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_codegen.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import errno @@ -7,10 +7,8 @@ import os import subprocess -from dlpx.virtualization._internal import (codegen, exceptions, file_util, - util_classes) - import pytest +from dlpx.virtualization._internal import codegen, const, exceptions, file_util class TestCodegen: @@ -140,7 +138,7 @@ def test_codegen_success(codegen_gen_py_inputs, popen_helper): assert popen_helper.package_name == codegen.CODEGEN_PACKAGE assert popen_helper.module_name == codegen.CODEGEN_MODULE expected_output_dir = os.path.join(gen_py.plugin_content_dir, - util_classes.OUTPUT_DIR_NAME) + const.OUTPUT_DIR_NAME) assert popen_helper.output_dir == expected_output_dir # Validate that the "generated" file were copied. @@ -159,7 +157,7 @@ def test_codegen_success(codegen_gen_py_inputs, popen_helper): @staticmethod def test_get_build_dir_success(tmpdir): - testdir = os.path.join(tmpdir.strpath, util_classes.OUTPUT_DIR_NAME) + testdir = os.path.join(tmpdir.strpath, const.OUTPUT_DIR_NAME) file_util.make_dir(testdir, True) assert os.path.exists(testdir) assert os.path.isdir(testdir) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py index 860e1d70..5486eaa6 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_initialize.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import ast @@ -7,13 +7,11 @@ import os import jinja2 -from dlpx.virtualization._internal import (exceptions, plugin_util, - plugin_validator, schema_validator, - util_classes) -from dlpx.virtualization._internal.commands import initialize as init - import mock import pytest +from dlpx.virtualization._internal import (const, exceptions, plugin_util, + plugin_validator, schema_validator) +from dlpx.virtualization._internal.commands import initialize as init @pytest.fixture @@ -49,14 +47,14 @@ def format_entry_point_template(entry_point_template): template = jinja2.Environment().from_string(entry_point_template) def format_template(plugin_name, ingestion_strategy, host_type): - if host_type == util_classes.WINDOWS_HOST_TYPE: + if host_type == const.WINDOWS_HOST_TYPE: default_mount_path = "C:\\\\tmp\\\\dlpx_staged_mounts\\\\{}" - elif host_type == util_classes.UNIX_HOST_TYPE: + elif host_type == const.UNIX_HOST_TYPE: default_mount_path = "/tmp/dlpx_staged_mounts/{}" - if ingestion_strategy == util_classes.DIRECT_TYPE: + if ingestion_strategy == const.DIRECT_TYPE: operations = direct_operations_template() - elif ingestion_strategy == util_classes.STAGED_TYPE: + elif ingestion_strategy == const.STAGED_TYPE: operations = jinja2.Environment().from_string( staged_operations_template()) operations = operations.render( @@ -74,21 +72,19 @@ def format_template(plugin_name, ingestion_strategy, host_type): class TestInitialize: @staticmethod - @pytest.mark.parametrize( - 'ingestion_strategy', - [util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE]) - @pytest.mark.parametrize( - 'host_type', - [util_classes.UNIX_HOST_TYPE, util_classes.WINDOWS_HOST_TYPE]) + @pytest.mark.parametrize('ingestion_strategy', + [const.DIRECT_TYPE, const.STAGED_TYPE]) + @pytest.mark.parametrize('host_type', + [const.UNIX_HOST_TYPE, const.WINDOWS_HOST_TYPE]) def test_init(tmpdir, ingestion_strategy, host_type, schema_template, plugin_name, format_entry_point_template): # Initialize an empty directory. init.init(tmpdir.strpath, ingestion_strategy, plugin_name, host_type) # Validate the config file is as we expect. - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content @@ -98,6 +94,7 @@ def test_init(tmpdir, ingestion_strategy, host_type, schema_template, assert config['entryPoint'] == init.DEFAULT_ENTRY_POINT assert config['srcDir'] == init.DEFAULT_SRC_DIRECTORY assert config['schemaFile'] == init.DEFAULT_SCHEMA_FILE + assert config['buildNumber'] == init.DEFAULT_BUILD_NUMBER # Validate the schema file is identical to the template. schema_file_path = os.path.join(tmpdir.strpath, config['schemaFile']) @@ -119,12 +116,11 @@ def test_init(tmpdir, ingestion_strategy, host_type, schema_template, @staticmethod def test_init_without_plugin_name(tmpdir): - init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, "", - util_classes.UNIX_HOST_TYPE) + init.init(tmpdir.strpath, const.DIRECT_TYPE, "", const.UNIX_HOST_TYPE) - result = plugin_util.read_and_validate_plugin_config_file( + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content @@ -133,54 +129,57 @@ def test_init_without_plugin_name(tmpdir): @staticmethod def test_init_windows_plugin(tmpdir, plugin_name): - init.init(tmpdir.strpath, util_classes.DIRECT_TYPE, plugin_name, - util_classes.WINDOWS_HOST_TYPE) - result = plugin_util.read_and_validate_plugin_config_file( + init.init(tmpdir.strpath, const.DIRECT_TYPE, plugin_name, + const.WINDOWS_HOST_TYPE) + result = plugin_util.validate_plugin_config_file( os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE), - True, False) + True) config = result.plugin_config_content # Validate that the host type is WINDOWS host_types = config['hostTypes'] assert len(host_types) == 1 - assert host_types[0] == util_classes.WINDOWS_HOST_TYPE + assert host_types[0] == const.WINDOWS_HOST_TYPE @staticmethod - @pytest.mark.parametrize( - 'ingestion_strategy', - [util_classes.DIRECT_TYPE, util_classes.STAGED_TYPE]) + @pytest.mark.parametrize('ingestion_strategy', + [const.DIRECT_TYPE, const.STAGED_TYPE]) def test_plugin_from_init_is_valid(tmpdir, ingestion_strategy, plugin_name): init.init(tmpdir.strpath, ingestion_strategy, plugin_name, - util_classes.UNIX_HOST_TYPE) + const.UNIX_HOST_TYPE) plugin_config_file = os.path.join(tmpdir.strpath, init.DEFAULT_PLUGIN_CONFIG_FILE) schema_file = os.path.join(tmpdir.strpath, init.DEFAULT_SCHEMA_FILE) validator = plugin_validator.PluginValidator(plugin_config_file, - schema_file, True, True) - validator.validate() + schema_file) + + # Assert config file validation is not done. + assert not validator.result.plugin_config_content + + validator.validate_plugin_config() - assert not validator.result.warnings + # Assert config file is validated. + assert validator.result.plugin_config_content @staticmethod def test_invalid_with_config_file(plugin_config_file): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(plugin_config_file), - util_classes.DIRECT_TYPE, None, - util_classes.UNIX_HOST_TYPE) + init.init(os.path.dirname(plugin_config_file), const.DIRECT_TYPE, + None, const.UNIX_HOST_TYPE) @staticmethod def test_invalid_with_schema_file(schema_file): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(schema_file), util_classes.DIRECT_TYPE, - None, util_classes.UNIX_HOST_TYPE) + init.init(os.path.dirname(schema_file), const.DIRECT_TYPE, None, + const.UNIX_HOST_TYPE) @staticmethod def test_invalid_with_src_dir(src_dir): with pytest.raises(exceptions.PathExistsError): - init.init(os.path.dirname(src_dir), util_classes.DIRECT_TYPE, None, - util_classes.UNIX_HOST_TYPE) + init.init(os.path.dirname(src_dir), const.DIRECT_TYPE, None, + const.UNIX_HOST_TYPE) @staticmethod @mock.patch('yaml.dump') @@ -189,8 +188,8 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, tmpdir, plugin_name): mock_yaml_dump.side_effect = RuntimeError() with pytest.raises(exceptions.UserError): - init.init(tmpdir.strpath, util_classes.STAGED_TYPE, plugin_name, - util_classes.UNIX_HOST_TYPE) + init.init(tmpdir.strpath, const.STAGED_TYPE, plugin_name, + const.UNIX_HOST_TYPE) src_dir_path = os.path.join(tmpdir.strpath, init.DEFAULT_SRC_DIRECTORY) config_file_path = os.path.join(tmpdir.strpath, @@ -203,9 +202,8 @@ def test_init_calls_cleanup_on_failure(mock_cleanup, mock_yaml_dump, @staticmethod def test_default_schema_definition(schema_template): - validator = schema_validator.SchemaValidator( - None, util_classes.PLUGIN_SCHEMA, - util_classes.ValidationMode.ERROR, schema_template) + validator = schema_validator.SchemaValidator(None, const.PLUGIN_SCHEMA, + schema_template) validator.validate() # Validate the repository schema only has the 'name' property. @@ -240,7 +238,7 @@ def test_default_schema_definition(schema_template): @staticmethod def test_default_entry_point(plugin_id): entry_point_contents = init._get_entry_point_contents( - plugin_id, util_classes.DIRECT_TYPE, util_classes.UNIX_HOST_TYPE) + plugin_id, const.DIRECT_TYPE, const.UNIX_HOST_TYPE) tree = ast.parse(entry_point_contents) for stmt in ast.walk(tree): if isinstance(stmt, ast.Assign): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py index fa5e9681..bb7e9d89 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/commands/test_templates.py @@ -131,10 +131,7 @@ def test_success(module): assert not test_object.string_property test_dict = test_object.to_dict() - assert test_dict == { - 'requiredStringProperty': 'test string', - 'stringProperty': None - } + assert test_dict == {'requiredStringProperty': 'test string'} from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @@ -161,10 +158,7 @@ def test_unicode_success(module): assert not test_object.string_property test_dict = test_object.to_dict() - assert test_dict == { - 'requiredStringProperty': u'test\u2345\u2603', - 'stringProperty': None - } + assert test_dict == {'requiredStringProperty': u'test\u2345\u2603'} from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @@ -343,9 +337,7 @@ def test_success(module): test_dict = test_object.to_dict() assert test_dict == { 'requiredNumberProperty': 200.5, - 'numberProperty': None, - 'requiredIntegerProperty': -50, - 'integerProperty': None + 'requiredIntegerProperty': -50 } from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @@ -1270,20 +1262,17 @@ def test_successs(module): test_dict = test_object.to_dict() assert test_dict == { 'requiredStringProperty': 'A', - 'stringProperty': None, 'requiredObjectProperty': { 'TWO': 'dos', 'ONE': 'uno' }, - 'objectProperty': None, - 'requiredArrayProperty': ['DO', 'RE', 'MI'], - 'arrayProperty': None + 'requiredArrayProperty': ['DO', 'RE', 'MI'] } from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object @staticmethod - def test_successs_setter(module): + def test_success_setter(module): test_object = module.TestDefinition(required_string_property='B', required_object_property={}, required_array_property=[]) @@ -1301,14 +1290,11 @@ def test_successs_setter(module): test_dict = test_object.to_dict() assert test_dict == { 'requiredStringProperty': 'A', - 'stringProperty': None, 'requiredObjectProperty': { 'TWO': 'dos', 'ONE': 'uno' }, - 'objectProperty': None, - 'requiredArrayProperty': ['DO', 'RE', 'MI'], - 'arrayProperty': None + 'requiredArrayProperty': ['DO', 'RE', 'MI'] } from_dict_object = module.TestDefinition.from_dict(test_dict) assert test_object == from_dict_object diff --git a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py index f3138bce..28ccaef1 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/conftest.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/conftest.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import configparser @@ -8,8 +8,7 @@ import os import yaml -from dlpx.virtualization._internal import (cli, click_util, package_util, - util_classes) +from dlpx.virtualization._internal import cli, click_util, const, package_util import pytest @@ -47,18 +46,6 @@ def plugin_config_filename(): return 'plugin_config.yml' -@pytest.fixture -def fake_staged_plugin_config(): - return os.path.join(os.path.dirname(__file__), - 'fake_plugin/staged/plugin_config.yml') - - -@pytest.fixture -def fake_direct_plugin_config(): - return os.path.join(os.path.dirname(__file__), - 'fake_plugin/direct/plugin_config.yml') - - @pytest.fixture def src_dir(tmpdir, src_dirname): """ @@ -182,104 +169,246 @@ def artifact_file_created(): @pytest.fixture -def plugin_config_content(plugin_id, plugin_name, src_dir, schema_file, - language, manual_discovery, plugin_type): +def plugin_config_content(plugin_id, plugin_name, external_version, language, + host_types, plugin_type, entry_point, src_dir, + schema_file, manual_discovery, build_number): """ This fixutre creates the dict expected in the properties yaml file the customer must provide for the build and compile commands. """ config = { - 'version': '2.0.0', - 'hostTypes': ['UNIX'], - 'entryPoint': 'python_vfiles:vfiles', 'defaultLocale': 'en-us', 'rootSquashEnabled': True, } - if id: + + if plugin_id: config['id'] = plugin_id if plugin_name: config['name'] = plugin_name + if external_version: + config['externalVersion'] = external_version + + if language: + config['language'] = language + + if host_types: + config['hostTypes'] = host_types + if plugin_type: config['pluginType'] = plugin_type + if entry_point: + config['entryPoint'] = entry_point + if src_dir: config['srcDir'] = src_dir if schema_file: config['schemaFile'] = schema_file - if language: - config['language'] = language - - # Here we do is not None check because we will be passing in + # Here we do an 'is not None' check because we will be passing in # booleans as a parameter in tests. if manual_discovery is not None: config['manualDiscovery'] = manual_discovery + if build_number: + config['buildNumber'] = build_number + return config @pytest.fixture -def plugin_entry_point_name(): +def plugin_id(): + return '16bef554-9470-11e9-b2e3-8c8590d4a42c' + + +@pytest.fixture +def plugin_name(): + return 'python_vfiles' + + +@pytest.fixture +def external_version(): + return '2.0.0' + + +@pytest.fixture +def language(): + return 'PYTHON27' + + +@pytest.fixture +def host_types(): + return ['UNIX'] + + +@pytest.fixture +def plugin_type(): + return const.DIRECT_TYPE + + +@pytest.fixture +def entry_point(entry_point_module, entry_point_object): + return '{}:{}'.format(entry_point_module, entry_point_object) + + +@pytest.fixture +def entry_point_module(): + return 'python_vfiles' + + +@pytest.fixture +def entry_point_object(): return 'vfiles' @pytest.fixture -def plugin_module_content(plugin_entry_point_name): +def manual_discovery(): + return None + + +@pytest.fixture +def build_number(): + return '2.0.0' + + +@pytest.fixture +def artifact_manual_discovery(): + return True + + +@pytest.fixture +def plugin_module_content(entry_point_object, discovery_operation, + linked_operation, virtual_operation, + upgrade_operation): class Object(object): pass - discovery = Object() - discovery.repository_impl = True - discovery.source_config_impl = True - - linked = Object() - linked.pre_snapshot_impl = True - linked.post_snapshot_impl = True - linked.start_staging_impl = True - linked.stop_staging_impl = False - linked.status_impl = True - linked.worker_impl = False - linked.mount_specification_impl = True - - virtual = Object() - virtual.configure_impl = True - virtual.unconfigure_impl = False - virtual.reconfigure_impl = True - virtual.start_impl = True - virtual.stop_impl = False - virtual.pre_snapshot_impl = True - virtual.post_snapshot_impl = True - virtual.mount_specification_impl = True - virtual.status_impl = False - virtual.initialize_impl = False - plugin_object = Object() - plugin_object.discovery = discovery - plugin_object.linked = linked - plugin_object.virtual = virtual + plugin_object.discovery = discovery_operation + plugin_object.linked = linked_operation + plugin_object.virtual = virtual_operation + plugin_object.upgrade = upgrade_operation plugin_module = Object() - setattr(plugin_module, plugin_entry_point_name, plugin_object) + setattr(plugin_module, entry_point_object, plugin_object) return plugin_module @pytest.fixture -def plugin_manifest(): +def discovery_operation(): + class DiscoveryOperations(object): + pass + + discovery = DiscoveryOperations() + + def repository_discovery(source_connection): + return None + + def source_config_discovery(source_connection, repository): + return None + + discovery.repository_impl = repository_discovery + discovery.source_config_impl = source_config_discovery + + return discovery + + +@pytest.fixture +def linked_operation(): + class LinkedOperations(object): + pass + + linked = LinkedOperations() + + def pre_snapshot(direct_source, repository, source_config): + pass + + def post_snapshot(direct_source, repository, source_config): + return None + + linked.pre_snapshot_impl = pre_snapshot + linked.post_snapshot_impl = post_snapshot + linked.start_staging_impl = None + linked.stop_staging_impl = None + linked.status_impl = None + linked.worker_impl = None + linked.mount_specification_impl = None + + return linked + + +@pytest.fixture +def virtual_operation(): + class VirtualOperations(object): + pass + + virtual = VirtualOperations() + + def configure(virtual_source, repository, snapshot): + return None + + def reconfigure(virtual_source, repository, source_config, snapshot): + pass + + def start(virtual_source, repository, source_config): + pass + + def pre_snapshot(virtual_source, repository, source_config): + pass + + def post_snapshot(virtual_source, repository, source_config): + return None + + def mount_specification(virtual_source, repository): + return None + + virtual.configure_impl = configure + virtual.unconfigure_impl = None + virtual.reconfigure_impl = reconfigure + virtual.start_impl = start + virtual.stop_impl = None + virtual.pre_snapshot_impl = pre_snapshot + virtual.post_snapshot_impl = post_snapshot + virtual.mount_specification_impl = mount_specification + virtual.status_impl = None + virtual.initialize_impl = None + + return virtual + + +@pytest.fixture +def upgrade_operation(): + class UpgradeOperation(object): + pass + + upgrade = UpgradeOperation() + upgrade.migration_id_list = [] + upgrade.repository_id_to_impl = {} + upgrade.source_config_id_to_impl = {} + upgrade.linked_source_id_to_impl = {} + upgrade.virtual_source_id_to_impl = {} + upgrade.snapshot_id_to_impl = {} + + return upgrade + + +@pytest.fixture +def plugin_manifest(upgrade_operation): manifest = { 'type': 'PluginManifest', 'hasRepositoryDiscovery': True, 'hasSourceConfigDiscovery': True, 'hasLinkedPreSnapshot': True, 'hasLinkedPostSnapshot': True, - 'hasLinkedStartStaging': True, + 'hasLinkedStartStaging': False, 'hasLinkedStopStaging': False, - 'hasLinkedStatus': True, + 'hasLinkedStatus': False, 'hasLinkedWorker': False, - 'hasLinkedMountSpecification': True, + 'hasLinkedMountSpecification': False, 'hasVirtualConfigure': True, 'hasVirtualUnconfigure': False, 'hasVirtualReconfigure': True, @@ -289,41 +418,12 @@ def plugin_manifest(): 'hasVirtualPostSnapshot': True, 'hasVirtualMountSpecification': True, 'hasVirtualStatus': False, - 'hasInitialize': False + 'hasInitialize': False, + 'migrationIdList': upgrade_operation.migration_id_list } return manifest -@pytest.fixture -def plugin_id(): - return '16bef554-9470-11e9-b2e3-8c8590d4a42c' - - -@pytest.fixture -def plugin_name(): - return 'python_vfiles' - - -@pytest.fixture -def language(): - return 'PYTHON27' - - -@pytest.fixture -def manual_discovery(): - return None - - -@pytest.fixture -def artifact_manual_discovery(): - return True - - -@pytest.fixture -def plugin_type(): - return util_classes.DIRECT_TYPE - - @pytest.fixture def schema_content(repository_definition, source_config_definition, virtual_source_definition, linked_source_definition, @@ -452,7 +552,7 @@ def basic_artifact_content(engine_api, virtual_source_definition, 'type': 'Plugin', 'name': '16bef554-9470-11e9-b2e3-8c8590d4a42c', 'prettyName': 'python_vfiles', - 'version': '2.0.0', + 'externalVersion': '2.0.0', 'defaultLocale': 'en-us', 'language': 'PYTHON27', 'hostTypes': ['UNIX'], @@ -460,6 +560,7 @@ def basic_artifact_content(engine_api, virtual_source_definition, 'buildApi': package_util.get_build_api_version(), 'engineApi': engine_api, 'rootSquashEnabled': True, + 'buildNumber': '2', 'sourceCode': 'UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==', 'manifest': {} } @@ -498,7 +599,7 @@ def artifact_content(engine_api, virtual_source_definition, 'type': 'Plugin', 'name': '16bef554-9470-11e9-b2e3-8c8590d4a42c', 'prettyName': 'python_vfiles', - 'version': '2.0.0', + 'externalVersion': '2.0.0', 'defaultLocale': 'en-us', 'language': 'PYTHON27', 'hostTypes': ['UNIX'], @@ -506,6 +607,7 @@ def artifact_content(engine_api, virtual_source_definition, 'buildApi': package_util.get_build_api_version(), 'sourceCode': 'UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==', 'rootSquashEnabled': True, + 'buildNumber': '2', 'manifest': {} } @@ -535,7 +637,7 @@ def artifact_content(engine_api, virtual_source_definition, @pytest.fixture def engine_api(): - return {'type': 'APIVersion', 'major': 1, 'minor': 10, 'micro': 5} + return {'type': 'APIVersion', 'major': 1, 'minor': 11, 'micro': 2} @pytest.fixture diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py new file mode 100644 index 00000000..cd28b92c --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/arbitrary_error.py @@ -0,0 +1,19 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +class ArbitraryError(Exception): + @property + def message(self): + return self.args[0] + + def __init__(self, message): + super(ArbitraryError, self).__init__(message) + + +raise ArbitraryError('Got an arbitrary non-platforms error for testing.') diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py new file mode 100644 index 00000000..2688405b --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/dec_not_function.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from __future__ import print_function + +import logging + +from dlpx.virtualization.platform import Plugin + +logger = logging.getLogger() +logger.setLevel(logging.NOTSET) + +plugin = Plugin() + + +@plugin.discovery.repository() +def repository_discovery(source_connection): + return None + + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + return None + + +# Defining the decorator as not a function +@plugin.linked.pre_snapshot() +class PreSnapshot(object): + pass diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py new file mode 100644 index 00000000..6b99f58c --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_bad_format.py @@ -0,0 +1,12 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository('1234.0.0.') +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py new file mode 100644 index 00000000..6ea3add1 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_not_string.py @@ -0,0 +1,12 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository(['testing', 'out', 'validation']) +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py new file mode 100644 index 00000000..5f8196d1 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/id_used.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.upgrade.repository('5.4.0.1') +def repo_upgrade(old_repository): + return old_repository + + +@plugin.upgrade.snapshot('5.04.000.01') +def snap_upgrade(old_snapshot): + return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/src/python_vfiles.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/multiple_warnings.py similarity index 93% rename from tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/src/python_vfiles.py rename to tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/multiple_warnings.py index d5a00513..c0a031bc 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/src/python_vfiles.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/multiple_warnings.py @@ -71,3 +71,8 @@ def stop(repository, source_config, virtual_source): @vfiles.virtual.unconfigure() def unconfigure(repository, source_config, virtual_source): pass + + +@vfiles.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py new file mode 100644 index 00000000..59fbcc0c --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/op_already_defined.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin + +plugin = Plugin() + + +@plugin.discovery.repository() +def repository_discovery(source_connection): + return None + + +@plugin.discovery.source_config() +def source_config_discovery(source_connection, repository): + return None + + +# Defining another function with the same decorator +@plugin.discovery.source_config() +def source_config_discovery_two(source_connection, repository): + return None diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml deleted file mode 100644 index 6ddc6cd1..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/plugin_config.yml +++ /dev/null @@ -1,11 +0,0 @@ -id: 16bef554-9470-11e9-b2e3-8c8590d4a42c -name: Unstructured Files using Python -version: 2.0.0 -hostTypes: - - UNIX -entryPoint: python_vfiles:vfiles -srcDir: src/ -schemaFile: ./schema.json -manualDiscovery: true -pluginType: DIRECT -language: PYTHON27 diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json deleted file mode 100644 index ba2ebcc6..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/schema.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "repositoryDefinition": { - "type": "object", - "properties": { - "name": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["name"] - }, - "sourceConfigDefinition": { - "type": "object", - "required": ["name", "path"], - "additionalProperties": false, - "properties": { - "name": { "type": "string" }, - "path": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["path"] - }, - "virtualSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties" : { - "path": { "type": "string" } - } - }, - "linkedSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties" : {} - }, - "snapshotDefinition": { - "type" : "object", - "additionalProperties" : false, - "properties" : {} - } -} diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py new file mode 100644 index 00000000..010c705a --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/successful.py @@ -0,0 +1,84 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin, Status + +direct = Plugin() + + +@direct.discovery.repository() +def repository_discovery(source_connection): + return [] + + +@direct.discovery.source_config() +def source_config_discovery(source_connection, repository): + return [] + + +@direct.linked.pre_snapshot() +def direct_pre_snapshot(direct_source, repository, source_config): + return + + +@direct.linked.post_snapshot() +def direct_post_snapshot(direct_source, repository, source_config): + return None + + +@direct.virtual.configure() +def configure(virtual_source, repository, snapshot): + path = virtual_source.parameters.path + name = "VDB mounted to " + path + return None + + +@direct.virtual.mount_specification() +def mount_specification(repository, virtual_source): + return None + + +@direct.virtual.post_snapshot() +def postSnapshot(repository, source_config, virtual_source): + return None + + +@direct.virtual.pre_snapshot() +def preSnapshot(repository, source_config, virtual_source): + pass + + +@direct.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + pass + + +@direct.virtual.start() +def start(repository, source_config, virtual_source): + pass + + +@direct.virtual.status() +def status(repository, source_config, virtual_source): + return Status.ACTIVE + + +@direct.virtual.stop() +def stop(repository, source_config, virtual_source): + pass + + +@direct.virtual.unconfigure() +def unconfigure(repository, source_config, virtual_source): + pass + + +@direct.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository + + +@direct.upgrade.snapshot('2019.11.30') +def snap_upgrade(old_snapshot): + return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py new file mode 100644 index 00000000..68ecd5b2 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/direct/upgrade_warnings.py @@ -0,0 +1,100 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from dlpx.virtualization.platform import Plugin, Status + +direct = Plugin() + + +@direct.discovery.repository() +def repository_discovery(source_connection): + return [] + + +@direct.discovery.source_config() +def source_config_discovery(source_connection, repository): + return [] + + +@direct.linked.pre_snapshot() +def direct_pre_snapshot(direct_source, repository, source_config): + return + + +@direct.linked.post_snapshot() +def direct_post_snapshot(direct_source, repository, source_config): + return None + + +@direct.virtual.configure() +def configure(virtual_source, repository, snapshot): + return None + + +@direct.virtual.mount_specification() +def mount_specification(repository, virtual_source): + return None + + +@direct.virtual.post_snapshot() +def postSnapshot(repository, source_config, virtual_source): + return None + + +@direct.virtual.pre_snapshot() +def preSnapshot(repository, source_config, virtual_source): + pass + + +@direct.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + pass + + +@direct.virtual.start() +def start(repository, source_config, virtual_source): + pass + + +@direct.virtual.status() +def status(repository, source_config, virtual_source): + return Status.ACTIVE + + +@direct.virtual.stop() +def stop(repository, source_config, virtual_source): + pass + + +@direct.virtual.unconfigure() +def unconfigure(repository, source_config, virtual_source): + pass + + +@direct.upgrade.repository('2019.11.20') +def repo_upgrade(old_repository): + return old_repository + + +@direct.upgrade.source_config('2019.11.22') +def sc_upgrade(old_source_config): + return old_source_config + + +# Added second arg to check if length arg check fails. +@direct.upgrade.linked_source('2019.11.24') +def ls_upgrade(old_linked, old_source): + return old_linked + + +# Renamed old_virtual_source to old_linked_source to test named arg checks. +@direct.upgrade.virtual_source('2019.11.26') +def ls_upgrade(old_linked_source): + return old_linked_source + + +# Renamed old_snapshot to bad_input_name to test named arg checks. +@direct.upgrade.snapshot('2019.11.30') +def snap_upgrade(bad_input_name): + return bad_input_name diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/src/python_staged.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/multiple_warnings.py similarity index 89% rename from tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/src/python_staged.py rename to tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/multiple_warnings.py index acd32437..094c1dde 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/src/python_staged.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/multiple_warnings.py @@ -17,12 +17,12 @@ # Renamed source_connection to connection to test if named arg check detects. @staged.discovery.repository() def repository_discovery(connection): - return None + return [] @staged.discovery.source_config() def source_config_discovery(source_connection, repository): - return None + return [] @staged.linked.mount_specification() @@ -67,6 +67,11 @@ def configure(virtual_source, repository, snapshot): return None +@staged.virtual.reconfigure() +def reconfigure(virtual_source, repository, source_config, snapshot): + return None + + # Removed virtual.mount_specification for test validation. @@ -86,8 +91,11 @@ def start(repository, source_config, virtual_source): # Added snapshot parameter to check if arg check fails. - - @staged.virtual.stop() def stop(repository, source_config, virtual_source, snapshot): pass + + +@staged.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml deleted file mode 100644 index 1742b86b..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/plugin_config.yml +++ /dev/null @@ -1,14 +0,0 @@ -# -# Copyright (c) 2019 by Delphix. All rights reserved. -# -id: 16bef554-9470-11e9-b2e3-8c8590d4a42c -name: Staged Toolkit using Python -version: 1.0.0 -hostTypes: - - UNIX -entryPoint: python_staged:staged -srcDir: src/ -schemaFile: ./schema.json -manualDiscovery: true -pluginType: STAGED -language: PYTHON27 diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json deleted file mode 100644 index 7c7d10ea..00000000 --- a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/schema.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "repositoryDefinition": { - "type": "object", - "properties": { - "name": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["name"] - }, - "sourceConfigDefinition": { - "type": "object", - "required": ["name"], - "additionalProperties": false, - "properties": { - "name": { "type": "string" } - }, - "nameField": "name", - "identityFields": ["name"] - }, - "virtualSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties" : { - "path": { "type": "string" } - }, - "required": ["path"] - }, - "linkedSourceDefinition": { - "type": "object", - "additionalProperties" : false, - "properties": { - "path": { "type": "string" } - } - }, - "snapshotDefinition": { - "type" : "object", - "additionalProperties" : false, - "properties" : {} - } -} diff --git a/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py new file mode 100644 index 00000000..31ae1151 --- /dev/null +++ b/tools/src/test/python/dlpx/virtualization/_internal/fake_plugin/staged/successful.py @@ -0,0 +1,101 @@ +# +# Copyright (c) 2019 by Delphix. All rights reserved. +# +# flake8: noqa +from __future__ import print_function + +import logging + +from dlpx.virtualization.platform import Plugin + +logger = logging.getLogger() +logger.setLevel(logging.NOTSET) + +staged = Plugin() + + +@staged.discovery.repository() +def repository_discovery(source_connection): + return None + + +@staged.discovery.source_config() +def source_config_discovery(source_connection, repository): + return None + + +@staged.linked.mount_specification() +def staged_mount_specification(staged_source, repository): + return None + + +@staged.linked.pre_snapshot() +def staged_pre_snapshot(repository, source_config, staged_source, + snapshot_parameters): + pass + + +@staged.linked.post_snapshot() +def staged_post_snapshot(repository, source_config, staged_source, + snapshot_parameters): + return None + + +@staged.linked.start_staging() +def start_staging(repository, source_config, staged_source): + pass + + +@staged.linked.stop_staging() +def stop_staging(repository, source_config, staged_source): + pass + + +@staged.linked.status() +def staged_status(staged_source, repository, source_config): + return None + + +@staged.linked.worker() +def staged_worker(repository, source_config, staged_source): + pass + + +@staged.virtual.configure() +def configure(virtual_source, repository, snapshot): + return None + + +@staged.virtual.mount_specification() +def mount_specification(virtual_source, repository): + return None + + +@staged.virtual.pre_snapshot() +def pre_snapshot(repository, source_config, virtual_source): + pass + + +@staged.virtual.post_snapshot() +def post_snapshot(repository, source_config, virtual_source): + return None + + +@staged.virtual.start() +def start(repository, source_config, virtual_source): + pass + + +@staged.virtual.stop() +def stop(repository, source_config, virtual_source): + pass + + +@staged.upgrade.repository('2019.10.30') +def repo_upgrade(old_repository): + return old_repository + + +@staged.upgrade.snapshot('2019.11.30') +def snap_upgrade(old_snapshot): + return old_snapshot diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py b/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py index 761e3df4..32b4a9ca 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_cli.py @@ -1,12 +1,12 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import os import click.testing as click_testing import yaml -from dlpx.virtualization._internal import cli, exceptions, util_classes +from dlpx.virtualization._internal import cli, const, exceptions import mock import pytest @@ -107,10 +107,8 @@ def test_command_user_error(mock_init, plugin_name): assert result.output == 'codegen_error\n' # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.initialize.init') @@ -124,10 +122,8 @@ def test_command_non_user_error(mock_init, plugin_name): assert 'Internal error, please contact Delphix.\n' in result.output # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) class TestInitCli: @@ -141,26 +137,21 @@ def test_default_params(mock_init, plugin_name): assert result.exit_code == 0, 'Output: {}'.format(result.output) # 'DIRECT' and os.getcwd() are the expected defaults - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod @mock.patch('dlpx.virtualization._internal.commands.initialize.init') def test_non_default_params(mock_init, plugin_name): runner = click_testing.CliRunner() - result = runner.invoke(cli.delphix_sdk, [ - 'init', '-s', util_classes.STAGED_TYPE, '-r', '.', '-n', - plugin_name - ]) + result = runner.invoke( + cli.delphix_sdk, + ['init', '-s', const.STAGED_TYPE, '-r', '.', '-n', plugin_name]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_init.assert_called_once_with(os.getcwd(), - util_classes.STAGED_TYPE, - plugin_name, - util_classes.UNIX_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.STAGED_TYPE, + plugin_name, const.UNIX_HOST_TYPE) @staticmethod def test_invalid_ingestion_strategy(plugin_name): @@ -185,8 +176,8 @@ def test_multiple_host_types(): runner = click_testing.CliRunner() result = runner.invoke(cli.delphix_sdk, [ - 'init', '-t', '{},{}'.format(util_classes.UNIX_HOST_TYPE, - util_classes.WINDOWS_HOST_TYPE) + 'init', '-t', '{},{}'.format(const.UNIX_HOST_TYPE, + const.WINDOWS_HOST_TYPE) ]) assert result.exit_code != 0 @@ -199,12 +190,10 @@ def test_windows_host_type(mock_init, plugin_name): result = runner.invoke( cli.delphix_sdk, - ['init', '-n', plugin_name, '-t', util_classes.WINDOWS_HOST_TYPE]) + ['init', '-n', plugin_name, '-t', const.WINDOWS_HOST_TYPE]) assert result.exit_code == 0, 'Output: {}'.format(result.output) - mock_init.assert_called_once_with(os.getcwd(), - util_classes.DIRECT_TYPE, - plugin_name, - util_classes.WINDOWS_HOST_TYPE) + mock_init.assert_called_once_with(os.getcwd(), const.DIRECT_TYPE, + plugin_name, const.WINDOWS_HOST_TYPE) @staticmethod def test_invalid_host_type(): diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py index 1f263475..0c925bec 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_file_util.py @@ -240,5 +240,5 @@ def test_tmpdir_with_raised_exception(): raise RuntimeError('test') except RuntimeError as e: - assert e.message == 'test' + assert str(e) == 'test' assert not os.path.exists(d) diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py index 111b915c..59ffa487 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_package_util.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # from dlpx.virtualization._internal import package_util @@ -10,7 +10,7 @@ class TestPackageUtil: @staticmethod def test_get_version(): - assert package_util.get_version() == '1.1.0-internal-7' + assert package_util.get_version() == '2.0.0-internal-001' @staticmethod def test_get_virtualization_api_version(): @@ -18,7 +18,7 @@ def test_get_virtualization_api_version(): @staticmethod def test_get_engine_api_version(): - assert package_util.get_engine_api_version_from_settings() == '1.10.5' + assert package_util.get_engine_api_version_from_settings() == '1.11.2' @staticmethod def test_get_build_api_version_json(): @@ -35,8 +35,8 @@ def test_get_engine_api_version_json(): engine_api_version = { 'type': 'APIVersion', 'major': 1, - 'minor': 10, - 'micro': 5 + 'minor': 11, + 'micro': 2 } assert package_util.get_engine_api_version() == engine_api_version diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py index b312e497..ccd294d7 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_dependency_util.py @@ -135,9 +135,8 @@ def test_build_wheel_fails_with_no_setup_file(tmp_path): with pytest.raises(RuntimeError) as excinfo: pdu._build_wheel(tmp_path.as_posix()) - assert excinfo.value.message == ( - 'No setup.py file exists in directory ' - '{}'.format(tmp_path.as_posix())) + assert str(excinfo.value) == ('No setup.py file exists in directory ' + '{}'.format(tmp_path.as_posix())) @staticmethod @mock.patch.object(subprocess, 'Popen') diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py index 25a3317e..05980b3f 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_importer.py @@ -2,63 +2,91 @@ # Copyright (c) 2019 by Delphix. All rights reserved. # import exceptions -from collections import defaultdict +import os +import uuid +from collections import OrderedDict from dlpx.virtualization._internal.plugin_importer import PluginImporter +from dlpx.virtualization._internal import (file_util, plugin_util, + plugin_validator) import mock import pytest +import yaml + + + +@pytest.fixture +def fake_src_dir(plugin_type): + """ + This fixture gets the path of the fake plugin src files used for testing + """ + return os.path.join(os.path.dirname(__file__), 'fake_plugin', + plugin_type.lower()) + + +def get_plugin_importer(plugin_config_file): + plugin_config_content = None + with open(plugin_config_file, 'rb') as f: + plugin_config_content = yaml.safe_load(f) + + src_dir = file_util.get_src_dir_path(plugin_config_file, + plugin_config_content['srcDir']) + entry_point_module, entry_point_object = plugin_validator.PluginValidator\ + .split_entry_point(plugin_config_content['entryPoint']) + plugin_type = plugin_config_content['pluginType'] + + return PluginImporter(src_dir, entry_point_module, entry_point_object, + plugin_type, True) class TestPluginImporter: @staticmethod @mock.patch('importlib.import_module') def test_get_plugin_manifest(mock_import, src_dir, plugin_type, - plugin_name, plugin_entry_point_name, + entry_point_module, entry_point_object, plugin_module_content, plugin_manifest): mock_import.return_value = plugin_module_content - importer = PluginImporter(src_dir, plugin_name, - plugin_entry_point_name, plugin_type, False) - manifest, warnings = importer.import_plugin() - assert not warnings - assert manifest == plugin_manifest + importer = PluginImporter(src_dir, entry_point_module, + entry_point_object, plugin_type, False) + importer.validate_plugin_module() + + assert importer.result.plugin_manifest == plugin_manifest @staticmethod @mock.patch('importlib.import_module') def test_plugin_module_content_none(mock_import, src_dir, plugin_type, - plugin_name, plugin_entry_point_name): + entry_point_module, + entry_point_object): mock_import.return_value = None - manifest = {} - warnings = defaultdict(list) - - with pytest.raises(exceptions.UserError) as err_info: - importer = PluginImporter(src_dir, plugin_name, - plugin_entry_point_name, plugin_type, - False) - manifest, warnings = importer.import_plugin() - - message = str(err_info) - assert warnings.items() > 0 - assert manifest == {} - assert 'Plugin module content is None.' in message + importer = PluginImporter(src_dir, entry_point_module, + entry_point_object, plugin_type, False) + importer.validate_plugin_module() + result = importer.result + + # + # If module_content is None, importer does not perform any validations + # and just does a return. So result should have an empty manifest and + # assert to make sure it is the case. + # + assert result.plugin_manifest == {} @staticmethod @mock.patch('importlib.import_module') def test_plugin_entry_object_none(mock_import, src_dir, plugin_type, plugin_name, plugin_module_content): mock_import.return_value = plugin_module_content - manifest = {} - warnings = defaultdict(list) + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, None, plugin_type, False) - manifest, warnings = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = str(err_info) - assert warnings.items() > 0 - assert manifest == {} + assert result == () assert 'Plugin entry point object is None.' in message @staticmethod @@ -68,17 +96,16 @@ def test_plugin_entry_point_nonexistent(mock_import, src_dir, plugin_type, plugin_module_content): entry_point_name = "nonexistent entry point" mock_import.return_value = plugin_module_content - manifest = {} - warnings = defaultdict(list) + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, entry_point_name, plugin_type, False) - manifest, warnings = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = err_info.value.message - assert warnings.items() > 0 - assert manifest == {} + assert result == () assert ('\'{}\' is not a symbol in module'.format(entry_point_name) in message) @@ -90,16 +117,139 @@ def test_plugin_object_none(mock_import, src_dir, plugin_type, plugin_name, setattr(plugin_module_content, none_entry_point, None) mock_import.return_value = plugin_module_content - manifest = {} - warnings = defaultdict(list) + result = () with pytest.raises(exceptions.UserError) as err_info: importer = PluginImporter(src_dir, plugin_name, none_entry_point, plugin_type, False) - manifest, warnings = importer.import_plugin() + importer.validate_plugin_module() + result = importer.result message = err_info.value.message - assert warnings.items() > 0 - assert manifest == {} + assert result == () assert ('Plugin object retrieved from the entry point {} is' ' None'.format(none_entry_point)) in message + + @staticmethod + @pytest.mark.parametrize('entry_point,plugin_type', + [('successful:staged', 'STAGED'), + ('successful:direct', 'DIRECT')]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_successful_validation(mock_file_util, plugin_config_file, + fake_src_dir): + mock_file_util.return_value = fake_src_dir + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,plugin_type,expected_errors', + [('multiple_warnings:staged', 'STAGED', [ + 'Error: Named argument mismatch in method repository_discovery', + 'Error: Number of arguments do not match in method stop', + 'Error: Named argument mismatch in method stop', + 'Warning: Implementation missing for required method' + ' virtual.mount_specification().', '1 Warning(s). 3 Error(s).' + ]), + ('multiple_warnings:vfiles', 'DIRECT', [ + 'Error: Number of arguments do not match in method status', + 'Error: Named argument mismatch in method status', + 'Warning: Implementation missing for required method' + ' virtual.reconfigure().', '1 Warning(s). 2 Error(s).' + ])]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_multiple_warnings(mock_file_util, plugin_config_file, + fake_src_dir, expected_errors): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + for error in expected_errors: + assert error in message + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,expected_errors', [('upgrade_warnings:direct', [ + 'Error: Named argument mismatch in method snap_upgrade.', + 'Error: Number of arguments do not match in method ls_upgrade.', + 'Error: Named argument mismatch in method ls_upgrade.', + 'Error: Named argument mismatch in method ls_upgrade.', + '0 Warning(s). 4 Error(s).' + ])]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_upgrade_warnings(mock_file_util, plugin_config_file, fake_src_dir, + expected_errors): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + for error in expected_errors: + assert error in message + + @staticmethod + @pytest.mark.parametrize( + 'entry_point,expected_error', + [('op_already_defined:plugin', 'has already been defined'), + ('dec_not_function:plugin', "decorated by 'linked.pre_snapshot()'" + " is not a function"), + ('id_not_string:plugin', "The migration id '['testing', 'out'," + " 'validation']' used in the function" + " 'repo_upgrade' should be a string."), + ('id_bad_format:plugin', "used in the function 'repo_upgrade' does" + " not follow the correct format"), + ('id_used:plugin', "'5.04.000.01' used in the function 'snap_upgrade'" + " has the same canonical form '5.4.0.1' as another migration")]) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_wrapper_failures(mock_file_util, plugin_config_file, fake_src_dir, + expected_error): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.UserError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + assert expected_error in message + assert '0 Warning(s). 1 Error(s).' in message + + @staticmethod + @pytest.mark.parametrize('entry_point', ['arbitrary_error:plugin']) + @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') + def test_sdk_error(mock_file_util, plugin_config_file, fake_src_dir): + mock_file_util.return_value = fake_src_dir + + with pytest.raises(exceptions.SDKToolingError) as err_info: + importer = get_plugin_importer(plugin_config_file) + importer.validate_plugin_module() + + message = err_info.value.message + assert ('SDK Error: Got an arbitrary non-platforms error for testing.' + in message) + assert '0 Warning(s). 1 Error(s).' in message + + @staticmethod + @mock.patch('os.path.isabs', return_value=False) + @mock.patch('importlib.import_module') + def test_plugin_info_warn_mode(mock_import, mock_relative_path, + plugin_config_file, src_dir, + plugin_module_content): + plugin_config_content = OrderedDict([ + ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), + ('version', '0.1.0'), ('language', 'PYTHON27'), + ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), + ('manualDiscovery', True), + ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), + ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) + ]) + mock_import.return_value = plugin_module_content + try: + plugin_util.get_plugin_manifest(plugin_config_file, + plugin_config_content, False) + except Exception: + raise AssertionError() diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py index 874bc428..52722bd4 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_plugin_validator.py @@ -1,29 +1,13 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json -import os -import uuid -from collections import OrderedDict - -from dlpx.virtualization._internal import exceptions, util_classes -from dlpx.virtualization._internal.plugin_validator import PluginValidator -from dlpx.virtualization._internal.util_classes import ValidationMode import mock import pytest - - -@pytest.fixture -def plugin_config_file(tmpdir): - return os.path.join(tmpdir.strpath, 'plugin_config.yml') - - -@pytest.fixture -def src_dir(tmpdir): - tmpdir.mkdir('src') - return os.path.join(tmpdir.strpath, 'src') +from dlpx.virtualization._internal import const, exceptions +from dlpx.virtualization._internal.plugin_validator import PluginValidator class TestPluginValidator: @@ -31,21 +15,12 @@ class TestPluginValidator: @pytest.mark.parametrize( 'schema_content', ['{}\nNOT JSON'.format(json.dumps({'random': 'json'}))]) - def test_plugin_bad_schema(plugin_config_file, schema_file): - plugin_config_content = OrderedDict([ - ('name', 'staged'.encode('utf-8')), - ('prettyName', 'StagedPlugin'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) + def test_plugin_bad_schema(plugin_config_file, plugin_config_content, + schema_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator.from_config_content( - plugin_config_file, plugin_config_content, schema_file, - ValidationMode.ERROR) - validator.validate() + plugin_config_file, plugin_config_content, schema_file) + validator.validate_plugin_config() message = err_info.value.message assert ('Failed to load schemas because {} is not a valid json file.' @@ -53,12 +28,12 @@ def test_plugin_bad_schema(plugin_config_file, schema_file): ' (char 19 - 27)'.format(schema_file)) in message @staticmethod + @pytest.mark.parametrize('plugin_config_file', ['/dir/plugin_config.yml']) def test_plugin_bad_config_file(plugin_config_file): with pytest.raises(exceptions.UserError) as err_info: validator = PluginValidator(plugin_config_file, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert message == ("Unable to read plugin config file '{}'" @@ -67,245 +42,132 @@ def test_plugin_bad_config_file(plugin_config_file): @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - def test_plugin_valid_content(mock_import_plugin, mock_relative_path, - src_dir, plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + def test_plugin_valid_content(src_dir, plugin_config_file, + plugin_config_content): validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() - - mock_import_plugin.assert_called() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() @staticmethod - def test_plugin_missing_field(plugin_config_file): - plugin_config_content = OrderedDict([ - ('name', 'staged'.encode('utf-8')), ('version', '0.1.0'), - ('language', 'PYTHON27'), ('hostTypes', ['UNIX']), - ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize('src_dir', [None]) + def test_plugin_missing_field(plugin_config_file, plugin_config_content): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - @pytest.mark.parametrize('version, expected', [ - pytest.param('xxx', "'xxx' does not match"), - pytest.param('1.0.0', None), - pytest.param('1.0.0_HF', None) - ]) - def test_plugin_version_format(mock_import_plugin, mock_path_is_relative, - src_dir, plugin_config_file, version, - expected): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', version), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize('external_version,expected', + [(1, "1 is not of type 'string'"), + (1.0, "1.0 is not of type 'string'"), + ('my_version', None), ('1.0.0', None), + ('1.0.0_HF', None)]) + def test_plugin_version_format(src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() - mock_import_plugin.assert_called() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - @pytest.mark.parametrize('entry_point, expected', [ - pytest.param('staged_plugin', "'staged_plugin' does not match"), - pytest.param(':staged_plugin', "':staged_plugin' does not match"), - pytest.param('staged:', "'staged:' does not match"), - pytest.param('staged_plugin::staged', - "'staged_plugin::staged' does not match"), - pytest.param(':staged_plugin:staged:', - "':staged_plugin:staged:' does not match"), - pytest.param('staged_plugin:staged', None) - ]) - def test_plugin_entry_point(mock_import_plugin, mock_relative_path, - src_dir, plugin_config_file, entry_point, - expected): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '1.0.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', entry_point.encode('utf-8')), ('srcDir', src_dir), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize( + 'entry_point,expected', + [('staged_plugin', "'staged_plugin' does not match"), + (':staged_plugin', "':staged_plugin' does not match"), + ('staged:', "'staged:' does not match"), + ('staged_plugin::staged', "'staged_plugin::staged' does not match"), + (':staged_plugin:staged:', "':staged_plugin:staged:' does not match"), + ('staged_plugin:staged', None)]) + def test_plugin_entry_point(src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() - mock_import_plugin.assert_called() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod - def test_plugin_additional_properties(src_dir, plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '1.0.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('unknown_key', 'unknown_value'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) + def test_plugin_additional_properties(src_dir, plugin_config_file, + plugin_config_content): + # Adding an unknown key + plugin_config_content['unknown_key'] = 'unknown_value' try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message - assert "Additional properties are not allowed " \ - "('unknown_key' was unexpected)" in message + assert ("Additional properties are not allowed" + " ('unknown_key' was unexpected)" in message) @staticmethod - def test_multiple_validation_errors(plugin_config_file): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['xxx']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize('host_types', [['xxx']]) + @pytest.mark.parametrize('src_dir', [None]) + def test_multiple_validation_errors(plugin_config_file, + plugin_config_content): with pytest.raises(exceptions.SchemaValidationError) as err_info: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() message = err_info.value.message assert "'srcDir' is a required property" in message assert "'xxx' is not one of ['UNIX', 'WINDOWS']" in message - @staticmethod - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_staged_plugin(mock_file_util, fake_staged_plugin_config): - src_dir = os.path.dirname(fake_staged_plugin_config) - mock_file_util.return_value = os.path.join(src_dir, 'src/') - - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(fake_staged_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() - - message = err_info.value.message - assert validator.result.warnings.items() > 0 - assert 'Named argument mismatch in method' in message - assert 'Number of arguments do not match' in message - assert 'Implementation missing for required method' in message - - @staticmethod - @mock.patch('dlpx.virtualization._internal.file_util.get_src_dir_path') - def test_direct_plugin(mock_file_util, fake_direct_plugin_config): - src_dir = os.path.dirname(fake_direct_plugin_config) - mock_file_util.return_value = os.path.join(src_dir, 'src/') - - with pytest.raises(exceptions.UserError) as err_info: - validator = PluginValidator(fake_direct_plugin_config, - util_classes.PLUGIN_CONFIG_SCHEMA, - ValidationMode.ERROR, True) - validator.validate() - - message = err_info.value.message - assert validator.result.warnings.items() > 0 - assert 'Named argument mismatch in method' in message - assert 'Number of arguments do not match' in message - assert 'Implementation missing for required method' in message - @staticmethod @mock.patch('os.path.isabs', return_value=False) - @mock.patch.object(PluginValidator, - '_PluginValidator__import_plugin', - return_value=({}, None)) - @pytest.mark.parametrize('plugin_id , expected', [ - pytest.param('Staged_plugin', "'Staged_plugin' does not match"), - pytest.param('staged_Plugin', "'staged_Plugin' does not match"), - pytest.param('STAGED', "'STAGED' does not match"), - pytest.param('E3b69c61-4c30-44f7-92c0-504c8388b91e', None), - pytest.param('e3b69c61-4c30-44f7-92c0-504c8388b91e', None) - ]) - def test_plugin_id(mock_import_plugin, mock_relative_path, src_dir, - plugin_config_file, plugin_id, expected): - plugin_config_content = OrderedDict([ - ('id', plugin_id.encode('utf-8')), ('name', 'python_vfiles'), - ('version', '1.0.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('srcDir', src_dir), ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - + @pytest.mark.parametrize( + 'plugin_id , expected', + [('Staged_plugin', "'Staged_plugin' does not match"), + ('staged_Plugin', "'staged_Plugin' does not match"), + ('STAGED', "'STAGED' does not match"), + ('E3b69c61-4c30-44f7-92c0-504c8388b91e', None), + ('e3b69c61-4c30-44f7-92c0-504c8388b91e', None)]) + def test_plugin_id(mock_import_plugin, src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, ValidationMode.ERROR) - validator.validate() - mock_import_plugin.assert_called() + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() except exceptions.SchemaValidationError as err_info: message = err_info.message assert expected in message @staticmethod - @pytest.mark.parametrize('validation_mode', - [ValidationMode.INFO, ValidationMode.WARNING]) - def test_plugin_info_warn_mode(plugin_config_file, validation_mode): - plugin_config_content = OrderedDict([ - ('id', str(uuid.uuid4())), ('name', 'staged'.encode('utf-8')), - ('version', '0.1.0'), ('language', 'PYTHON27'), - ('hostTypes', ['UNIX']), ('pluginType', 'STAGED'.encode('utf-8')), - ('manualDiscovery', True), - ('entryPoint', 'staged_plugin:staged'.encode('utf-8')), - ('schemaFile', 'schema.json'.encode('utf-8')) - ]) - err_info = None + @mock.patch('os.path.isabs', return_value=False) + @pytest.mark.parametrize('build_number, expected', + [('xxx', "'xxx' does not match"), ('1', None), + ('1.x', "'1.x' does not match"), ('1.100', None), + ('0.1.2', None), ('02.5000', None), + (None, "'buildNumber' is a required property"), + ('1.0.0_HF', "'1.0.0_HF' does not match"), + ('0.0.0', "'0.0.0' does not match"), + ('0', "'0' does not match"), + ('0.0.00', "'0.0.00' does not match"), + ('0.1', None)]) + def test_plugin_build_number_format(src_dir, plugin_config_file, + plugin_config_content, expected): try: validator = PluginValidator.from_config_content( plugin_config_file, plugin_config_content, - util_classes.PLUGIN_CONFIG_SCHEMA, validation_mode) - validator.validate() - except Exception as e: - err_info = e - - assert err_info is None + const.PLUGIN_CONFIG_SCHEMA) + validator.validate_plugin_config() + except exceptions.SchemaValidationError as err_info: + message = err_info.message + assert expected in message diff --git a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py index 58cdafd1..2b064b57 100644 --- a/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py +++ b/tools/src/test/python/dlpx/virtualization/_internal/test_schema_validator.py @@ -1,15 +1,13 @@ # -# Copyright (c) 2019 by Delphix. All rights reserved. +# Copyright (c) 2019, 2020 by Delphix. All rights reserved. # import json import os -from dlpx.virtualization._internal import exceptions, util_classes -from dlpx.virtualization._internal.schema_validator import SchemaValidator -from dlpx.virtualization._internal.util_classes import ValidationMode - import pytest +from dlpx.virtualization._internal import const, exceptions, plugin_util +from dlpx.virtualization._internal.schema_validator import SchemaValidator class TestSchemaValidator: @@ -19,7 +17,7 @@ def test_bad_meta_schema(schema_file, tmpdir, schema_filename): f = tmpdir.join(schema_filename) f.write(meta_schema) with pytest.raises(exceptions.UserError) as err_info: - validator = SchemaValidator(schema_file, f, ValidationMode.ERROR) + validator = SchemaValidator(schema_file, f) validator.validate() message = err_info.value.message @@ -31,9 +29,7 @@ def test_bad_meta_schema(schema_file, tmpdir, schema_filename): def test_bad_schema_file(schema_file): os.remove(schema_file) with pytest.raises(exceptions.UserError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -43,8 +39,7 @@ def test_bad_schema_file(schema_file): @staticmethod def test_valid_schema(schema_file): - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -69,8 +64,7 @@ def test_missing_root_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -92,9 +86,7 @@ def test_missing_root_type(schema_file): }]) def test_bad_root_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -119,9 +111,7 @@ def test_bad_root_type_num(schema_file): }]) def test_bad_root_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -145,9 +135,7 @@ def test_bad_root_type(schema_file): }]) def test_missing_identity_fields(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -171,9 +159,7 @@ def test_missing_identity_fields(schema_file): }]) def test_missing_name_field(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -200,8 +186,7 @@ def test_missing_sub_type(schema_file): # this test will not raise validation errors even though type # is not specified and will pass. # - validator = SchemaValidator(schema_file, util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() @staticmethod @@ -223,9 +208,7 @@ def test_missing_sub_type(schema_file): }]) def test_bad_sub_type(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -250,9 +233,7 @@ def test_bad_sub_type(schema_file): }]) def test_bad_sub_type_num(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -279,9 +260,7 @@ def test_missing_required_field(schema_file): # pytest.skip("required fields validation is not working yet") with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -305,9 +284,7 @@ def test_missing_required_field(schema_file): }]) def test_multiple_validation_errors(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message @@ -315,8 +292,6 @@ def test_multiple_validation_errors(schema_file): assert "'identityFields' is a required property" in message @staticmethod - @pytest.mark.parametrize('validation_mode', - [ValidationMode.INFO, ValidationMode.WARNING]) @pytest.mark.parametrize('source_config_definition', [{ 'type': 'object', @@ -333,13 +308,10 @@ def test_multiple_validation_errors(schema_file): 'nameField': 'name', 'identityFields': ['name'] }]) - def test_bad_sub_type_info_warn_mode(schema_file, validation_mode): + def test_bad_sub_type_info_warn_mode(schema_file): err_info = None try: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - validation_mode) - validator.validate() + plugin_util.validate_schema_file(schema_file, False) except Exception as e: err_info = e @@ -378,9 +350,7 @@ def test_bad_sub_type_info_warn_mode(schema_file, validation_mode): }]) def test_bad_type_in_array(schema_file): with pytest.raises(exceptions.SchemaValidationError) as err_info: - validator = SchemaValidator(schema_file, - util_classes.PLUGIN_SCHEMA, - ValidationMode.ERROR) + validator = SchemaValidator(schema_file, const.PLUGIN_SCHEMA) validator.validate() message = err_info.value.message