diff --git a/google/cloud/aiplatform_v1/__init__.py b/google/cloud/aiplatform_v1/__init__.py index df58f80445..b60f8a61af 100644 --- a/google/cloud/aiplatform_v1/__init__.py +++ b/google/cloud/aiplatform_v1/__init__.py @@ -333,6 +333,7 @@ from .types.migration_service import SearchMigratableResourcesResponse from .types.model import Model from .types.model import ModelContainerSpec +from .types.model import ModelSourceInfo from .types.model import Port from .types.model import PredictSchemata from .types.model_deployment_monitoring_job import ( @@ -857,6 +858,7 @@ "ModelMonitoringObjectiveConfig", "ModelMonitoringStatsAnomalies", "ModelServiceClient", + "ModelSourceInfo", "MutateDeployedIndexOperationMetadata", "MutateDeployedIndexRequest", "MutateDeployedIndexResponse", diff --git a/google/cloud/aiplatform_v1/types/__init__.py b/google/cloud/aiplatform_v1/types/__init__.py index be7bf356b8..b8dd87c849 100644 --- a/google/cloud/aiplatform_v1/types/__init__.py +++ b/google/cloud/aiplatform_v1/types/__init__.py @@ -384,6 +384,7 @@ from .model import ( Model, ModelContainerSpec, + ModelSourceInfo, Port, PredictSchemata, ) @@ -885,6 +886,7 @@ "SearchMigratableResourcesResponse", "Model", "ModelContainerSpec", + "ModelSourceInfo", "Port", "PredictSchemata", "ModelDeploymentMonitoringBigQueryTable", diff --git a/google/cloud/aiplatform_v1/types/model.py b/google/cloud/aiplatform_v1/types/model.py index 0dbb2ad510..611a233562 100644 --- a/google/cloud/aiplatform_v1/types/model.py +++ b/google/cloud/aiplatform_v1/types/model.py @@ -30,6 +30,7 @@ "PredictSchemata", "ModelContainerSpec", "Port", + "ModelSourceInfo", }, ) @@ -274,6 +275,11 @@ class Model(proto.Message): Customer-managed encryption key spec for a Model. If set, this Model and all sub-resources of this Model will be secured by this key. + model_source_info (google.cloud.aiplatform_v1.types.ModelSourceInfo): + Output only. Source of a model. It can either + be automl training pipeline, custom training + pipeline, BigQuery ML, or existing Vertex AI + Model. """ class DeploymentResourcesType(proto.Enum): @@ -443,6 +449,11 @@ class ExportableContent(proto.Enum): number=24, message=gca_encryption_spec.EncryptionSpec, ) + model_source_info = proto.Field( + proto.MESSAGE, + number=38, + message="ModelSourceInfo", + ) class PredictSchemata(proto.Message): @@ -800,4 +811,26 @@ class Port(proto.Message): ) +class ModelSourceInfo(proto.Message): + r"""Detail description of the source information of the model. + + Attributes: + source_type (google.cloud.aiplatform_v1.types.ModelSourceInfo.ModelSourceType): + Type of the model source. + """ + + class ModelSourceType(proto.Enum): + r"""Source of the model.""" + MODEL_SOURCE_TYPE_UNSPECIFIED = 0 + AUTOML = 1 + CUSTOM = 2 + BQML = 3 + + source_type = proto.Field( + proto.ENUM, + number=1, + enum=ModelSourceType, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/__init__.py b/google/cloud/aiplatform_v1beta1/__init__.py index 6b50c2616b..da3382b749 100644 --- a/google/cloud/aiplatform_v1beta1/__init__.py +++ b/google/cloud/aiplatform_v1beta1/__init__.py @@ -362,6 +362,7 @@ from .types.migration_service import SearchMigratableResourcesResponse from .types.model import Model from .types.model import ModelContainerSpec +from .types.model import ModelSourceInfo from .types.model import Port from .types.model import PredictSchemata from .types.model_deployment_monitoring_job import ( @@ -905,6 +906,7 @@ "ModelMonitoringObjectiveConfig", "ModelMonitoringStatsAnomalies", "ModelServiceClient", + "ModelSourceInfo", "MutateDeployedIndexOperationMetadata", "MutateDeployedIndexRequest", "MutateDeployedIndexResponse", diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py index 9ab57a863e..c00bb559e9 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py @@ -218,40 +218,40 @@ def parse_dataset_path(path: str) -> Dict[str, str]: @staticmethod def dataset_path( project: str, - location: str, dataset: str, ) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}".format( + return "projects/{project}/datasets/{dataset}".format( project=project, - location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", - path, - ) + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def dataset_path( project: str, + location: str, dataset: str, ) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format( + return "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, + location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod diff --git a/google/cloud/aiplatform_v1beta1/types/__init__.py b/google/cloud/aiplatform_v1beta1/types/__init__.py index 0017b2bebc..06d824de56 100644 --- a/google/cloud/aiplatform_v1beta1/types/__init__.py +++ b/google/cloud/aiplatform_v1beta1/types/__init__.py @@ -407,6 +407,7 @@ from .model import ( Model, ModelContainerSpec, + ModelSourceInfo, Port, PredictSchemata, ) @@ -931,6 +932,7 @@ "SearchMigratableResourcesResponse", "Model", "ModelContainerSpec", + "ModelSourceInfo", "Port", "PredictSchemata", "ModelDeploymentMonitoringBigQueryTable", diff --git a/google/cloud/aiplatform_v1beta1/types/model.py b/google/cloud/aiplatform_v1beta1/types/model.py index 28ab5cfda4..1e2a735031 100644 --- a/google/cloud/aiplatform_v1beta1/types/model.py +++ b/google/cloud/aiplatform_v1beta1/types/model.py @@ -30,6 +30,7 @@ "PredictSchemata", "ModelContainerSpec", "Port", + "ModelSourceInfo", }, ) @@ -274,6 +275,11 @@ class Model(proto.Message): Customer-managed encryption key spec for a Model. If set, this Model and all sub-resources of this Model will be secured by this key. + model_source_info (google.cloud.aiplatform_v1beta1.types.ModelSourceInfo): + Output only. Source of a model. It can either + be automl training pipeline, custom training + pipeline, BigQuery ML, or existing Vertex AI + Model. """ class DeploymentResourcesType(proto.Enum): @@ -443,6 +449,11 @@ class ExportableContent(proto.Enum): number=24, message=gca_encryption_spec.EncryptionSpec, ) + model_source_info = proto.Field( + proto.MESSAGE, + number=38, + message="ModelSourceInfo", + ) class PredictSchemata(proto.Message): @@ -800,4 +811,26 @@ class Port(proto.Message): ) +class ModelSourceInfo(proto.Message): + r"""Detail description of the source information of the model. + + Attributes: + source_type (google.cloud.aiplatform_v1beta1.types.ModelSourceInfo.ModelSourceType): + Type of the model source. + """ + + class ModelSourceType(proto.Enum): + r"""Source of the model.""" + MODEL_SOURCE_TYPE_UNSPECIFIED = 0 + AUTOML = 1 + CUSTOM = 2 + BQML = 3 + + source_type = proto.Field( + proto.ENUM, + number=1, + enum=ModelSourceType, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py index 8a52fe213b..0185bdabbe 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py @@ -2032,22 +2032,19 @@ def test_parse_dataset_path(): def test_dataset_path(): project = "squid" - location = "clam" - dataset = "whelk" - expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( + dataset = "clam" + expected = "projects/{project}/datasets/{dataset}".format( project=project, - location=location, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, location, dataset) + actual = MigrationServiceClient.dataset_path(project, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "octopus", - "location": "oyster", - "dataset": "nudibranch", + "project": "whelk", + "dataset": "octopus", } path = MigrationServiceClient.dataset_path(**expected) @@ -2057,19 +2054,22 @@ def test_parse_dataset_path(): def test_dataset_path(): - project = "cuttlefish" - dataset = "mussel" - expected = "projects/{project}/datasets/{dataset}".format( + project = "oyster" + location = "nudibranch" + dataset = "cuttlefish" + expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, + location=location, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, dataset) + actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "winkle", + "project": "mussel", + "location": "winkle", "dataset": "nautilus", } path = MigrationServiceClient.dataset_path(**expected)