From 1cda4b4833d31c1be44322850c4e0574e3e46e2b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 20 Sep 2022 16:47:33 -0700 Subject: [PATCH] feat: Add support for V1 and V2 classification models for the V1Beta2 API (#1680) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add timestamp_outside_retention_rows_count to ImportFeatureValuesResponse and ImportFeatureValuesOperationMetadata in aiplatform v1beta1 featurestore_service.proto feat: add RemoveContextChildren rpc to aiplatform v1beta1 metadata_service.proto feat: add order_by to ListArtifactsRequest, ListContextsRequest, and ListExecutionsRequest in aiplatform v1beta1 metadata_service.proto feat: add InputArtifact to RuntimeConfig in aiplatform v1beta1 pipeline_job.proto feat: add read_mask to ListPipelineJobsRequest in aiplatform v1beta1 pipeline_service.proto feat: add TransferLearningConfig in aiplatform v1beta1 study.proto PiperOrigin-RevId: 475580307 Source-Link: https://github.com/googleapis/googleapis/commit/dbc83bd75946f2dbbca5fbb2be13d6afbc635b83 Source-Link: https://github.com/googleapis/googleapis-gen/commit/26c120594d6049d40061023cae345da80181077c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjZjMTIwNTk0ZDYwNDlkNDAwNjEwMjNjYWUzNDVkYTgwMTgxMDc3YyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add timestamp_outside_retention_rows_count to ImportFeatureValuesResponse and ImportFeatureValuesOperationMetadata in aiplatform v1 featurestore_service.proto feat: add RemoveContextChildren rpc to aiplatform v1 metadata_service.proto feat: add order_by to ListArtifactsRequest, ListContextsRequest, and ListExecutionsRequest in aiplatform v1 metadata_service.proto PiperOrigin-RevId: 475580702 Source-Link: https://github.com/googleapis/googleapis/commit/af65a1984fc94c2de4080d0e068a6623d9ae7169 Source-Link: https://github.com/googleapis/googleapis-gen/commit/023d431569a53f9b5cd0d47c6af85b8a46cea5e8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDIzZDQzMTU2OWE1M2Y5YjVjZDBkNDdjNmFmODViOGE0NmNlYTVlOCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add support for V1 and V2 classification models for the V1Beta2 API PiperOrigin-RevId: 475604619 Source-Link: https://github.com/googleapis/googleapis/commit/044a15c14b1a1939684ad271c13ac84c5ac6a2c7 Source-Link: https://github.com/googleapis/googleapis-gen/commit/410020af934c7248f7804770d6f8ec4571bfa551 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDEwMDIwYWY5MzRjNzI0OGY3ODA0NzcwZDZmOGVjNDU3MWJmYTU1MSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- aiplatform-v1beta1-py.tar.gz | 0 google/cloud/aiplatform_v1/__init__.py | 4 + .../cloud/aiplatform_v1/gapic_metadata.json | 10 + .../services/metadata_service/async_client.py | 116 ++++++++ .../services/metadata_service/client.py | 116 ++++++++ .../metadata_service/transports/base.py | 17 ++ .../metadata_service/transports/grpc.py | 31 +++ .../transports/grpc_asyncio.py | 31 +++ google/cloud/aiplatform_v1/types/__init__.py | 4 + .../types/batch_prediction_job.py | 2 +- .../cloud/aiplatform_v1/types/custom_job.py | 1 - .../types/featurestore_service.py | 16 ++ .../cloud/aiplatform_v1/types/job_service.py | 25 ++ .../aiplatform_v1/types/metadata_service.py | 69 +++++ .../aiplatform_v1/types/pipeline_service.py | 3 + google/cloud/aiplatform_v1beta1/__init__.py | 4 + .../aiplatform_v1beta1/gapic_metadata.json | 10 + .../featurestore_service/async_client.py | 6 +- .../services/featurestore_service/client.py | 6 +- .../services/metadata_service/async_client.py | 116 ++++++++ .../services/metadata_service/client.py | 116 ++++++++ .../metadata_service/transports/base.py | 17 ++ .../metadata_service/transports/grpc.py | 31 +++ .../transports/grpc_asyncio.py | 31 +++ .../services/migration_service/client.py | 18 +- .../aiplatform_v1beta1/types/__init__.py | 4 + .../types/batch_prediction_job.py | 2 +- .../aiplatform_v1beta1/types/custom_job.py | 1 - .../types/featurestore_service.py | 68 +++++ .../aiplatform_v1beta1/types/job_service.py | 25 ++ .../types/metadata_service.py | 69 +++++ .../aiplatform_v1beta1/types/pipeline_job.py | 32 +++ .../types/pipeline_service.py | 10 + .../cloud/aiplatform_v1beta1/types/study.py | 34 +++ ...a_service_remove_context_children_async.py | 52 ++++ ...ta_service_remove_context_children_sync.py | 52 ++++ ...a_service_remove_context_children_async.py | 52 ++++ ...ta_service_remove_context_children_sync.py | 52 ++++ .../snippet_metadata_aiplatform_v1.json | 169 ++++++++++++ .../snippet_metadata_aiplatform_v1beta1.json | 169 ++++++++++++ .../aiplatform_v1/test_metadata_service.py | 252 ++++++++++++++++++ .../test_featurestore_service.py | 1 + .../test_metadata_service.py | 252 ++++++++++++++++++ .../test_migration_service.py | 38 +-- 44 files changed, 2098 insertions(+), 36 deletions(-) create mode 100644 aiplatform-v1beta1-py.tar.gz create mode 100644 samples/generated_samples/aiplatform_v1_generated_metadata_service_remove_context_children_async.py create mode 100644 samples/generated_samples/aiplatform_v1_generated_metadata_service_remove_context_children_sync.py create mode 100644 samples/generated_samples/aiplatform_v1beta1_generated_metadata_service_remove_context_children_async.py create mode 100644 samples/generated_samples/aiplatform_v1beta1_generated_metadata_service_remove_context_children_sync.py diff --git a/aiplatform-v1beta1-py.tar.gz b/aiplatform-v1beta1-py.tar.gz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/google/cloud/aiplatform_v1/__init__.py b/google/cloud/aiplatform_v1/__init__.py index 549f3f399c..df58f80445 100644 --- a/google/cloud/aiplatform_v1/__init__.py +++ b/google/cloud/aiplatform_v1/__init__.py @@ -317,6 +317,8 @@ from .types.metadata_service import QueryArtifactLineageSubgraphRequest from .types.metadata_service import QueryContextLineageSubgraphRequest from .types.metadata_service import QueryExecutionInputsAndOutputsRequest +from .types.metadata_service import RemoveContextChildrenRequest +from .types.metadata_service import RemoveContextChildrenResponse from .types.metadata_service import UpdateArtifactRequest from .types.metadata_service import UpdateContextRequest from .types.metadata_service import UpdateExecutionRequest @@ -898,6 +900,8 @@ "ReadTensorboardBlobDataResponse", "ReadTensorboardTimeSeriesDataRequest", "ReadTensorboardTimeSeriesDataResponse", + "RemoveContextChildrenRequest", + "RemoveContextChildrenResponse", "RemoveDatapointsRequest", "RemoveDatapointsResponse", "ResourcesConsumed", diff --git a/google/cloud/aiplatform_v1/gapic_metadata.json b/google/cloud/aiplatform_v1/gapic_metadata.json index ffbf9eb765..865ac4781a 100644 --- a/google/cloud/aiplatform_v1/gapic_metadata.json +++ b/google/cloud/aiplatform_v1/gapic_metadata.json @@ -1078,6 +1078,11 @@ "query_execution_inputs_and_outputs" ] }, + "RemoveContextChildren": { + "methods": [ + "remove_context_children" + ] + }, "UpdateArtifact": { "methods": [ "update_artifact" @@ -1238,6 +1243,11 @@ "query_execution_inputs_and_outputs" ] }, + "RemoveContextChildren": { + "methods": [ + "remove_context_children" + ] + }, "UpdateArtifact": { "methods": [ "update_artifact" diff --git a/google/cloud/aiplatform_v1/services/metadata_service/async_client.py b/google/cloud/aiplatform_v1/services/metadata_service/async_client.py index 79ead6d7f1..5d47b5b02f 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/async_client.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/async_client.py @@ -2392,6 +2392,122 @@ async def sample_add_context_children(): # Done; return the response. return response + async def remove_context_children( + self, + request: Union[metadata_service.RemoveContextChildrenRequest, dict] = None, + *, + context: str = None, + child_contexts: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_service.RemoveContextChildrenResponse: + r"""Remove a set of children contexts from a parent + Context. If any of the child Contexts were NOT added to + the parent Context, they are simply skipped. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import aiplatform_v1 + + async def sample_remove_context_children(): + # Create a client + client = aiplatform_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1.RemoveContextChildrenRequest( + context="context_value", + ) + + # Make the request + response = await client.remove_context_children(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1.types.RemoveContextChildrenRequest, dict]): + The request object. Request message for + [MetadataService.DeleteContextChildrenRequest][]. + context (:class:`str`): + Required. The resource name of the parent Context. + + Format: + ``projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`` + + This corresponds to the ``context`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + child_contexts (:class:`Sequence[str]`): + The resource names of the child + Contexts. + + This corresponds to the ``child_contexts`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1.types.RemoveContextChildrenResponse: + Response message for + [MetadataService.RemoveContextChildren][google.cloud.aiplatform.v1.MetadataService.RemoveContextChildren]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([context, child_contexts]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metadata_service.RemoveContextChildrenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if context is not None: + request.context = context + if child_contexts: + request.child_contexts.extend(child_contexts) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.remove_context_children, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("context", request.context),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def query_context_lineage_subgraph( self, request: Union[ diff --git a/google/cloud/aiplatform_v1/services/metadata_service/client.py b/google/cloud/aiplatform_v1/services/metadata_service/client.py index ff75a8311e..07a31e6775 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/client.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/client.py @@ -2705,6 +2705,122 @@ def sample_add_context_children(): # Done; return the response. return response + def remove_context_children( + self, + request: Union[metadata_service.RemoveContextChildrenRequest, dict] = None, + *, + context: str = None, + child_contexts: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_service.RemoveContextChildrenResponse: + r"""Remove a set of children contexts from a parent + Context. If any of the child Contexts were NOT added to + the parent Context, they are simply skipped. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import aiplatform_v1 + + def sample_remove_context_children(): + # Create a client + client = aiplatform_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1.RemoveContextChildrenRequest( + context="context_value", + ) + + # Make the request + response = client.remove_context_children(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1.types.RemoveContextChildrenRequest, dict]): + The request object. Request message for + [MetadataService.DeleteContextChildrenRequest][]. + context (str): + Required. The resource name of the parent Context. + + Format: + ``projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`` + + This corresponds to the ``context`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + child_contexts (Sequence[str]): + The resource names of the child + Contexts. + + This corresponds to the ``child_contexts`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1.types.RemoveContextChildrenResponse: + Response message for + [MetadataService.RemoveContextChildren][google.cloud.aiplatform.v1.MetadataService.RemoveContextChildren]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([context, child_contexts]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metadata_service.RemoveContextChildrenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metadata_service.RemoveContextChildrenRequest): + request = metadata_service.RemoveContextChildrenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if context is not None: + request.context = context + if child_contexts is not None: + request.child_contexts = child_contexts + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_context_children] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("context", request.context),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def query_context_lineage_subgraph( self, request: Union[ diff --git a/google/cloud/aiplatform_v1/services/metadata_service/transports/base.py b/google/cloud/aiplatform_v1/services/metadata_service/transports/base.py index b3453f9df2..460dc89049 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/transports/base.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/transports/base.py @@ -233,6 +233,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.remove_context_children: gapic_v1.method.wrap_method( + self.remove_context_children, + default_timeout=None, + client_info=client_info, + ), self.query_context_lineage_subgraph: gapic_v1.method.wrap_method( self.query_context_lineage_subgraph, default_timeout=None, @@ -491,6 +496,18 @@ def add_context_children( ]: raise NotImplementedError() + @property + def remove_context_children( + self, + ) -> Callable[ + [metadata_service.RemoveContextChildrenRequest], + Union[ + metadata_service.RemoveContextChildrenResponse, + Awaitable[metadata_service.RemoveContextChildrenResponse], + ], + ]: + raise NotImplementedError() + @property def query_context_lineage_subgraph( self, diff --git a/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc.py b/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc.py index 16a87c42c1..0bd02c2850 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc.py @@ -758,6 +758,37 @@ def add_context_children( ) return self._stubs["add_context_children"] + @property + def remove_context_children( + self, + ) -> Callable[ + [metadata_service.RemoveContextChildrenRequest], + metadata_service.RemoveContextChildrenResponse, + ]: + r"""Return a callable for the remove context children method over gRPC. + + Remove a set of children contexts from a parent + Context. If any of the child Contexts were NOT added to + the parent Context, they are simply skipped. + + Returns: + Callable[[~.RemoveContextChildrenRequest], + ~.RemoveContextChildrenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "remove_context_children" not in self._stubs: + self._stubs["remove_context_children"] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/RemoveContextChildren", + request_serializer=metadata_service.RemoveContextChildrenRequest.serialize, + response_deserializer=metadata_service.RemoveContextChildrenResponse.deserialize, + ) + return self._stubs["remove_context_children"] + @property def query_context_lineage_subgraph( self, diff --git a/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc_asyncio.py index c7a4f722b3..c088dca078 100644 --- a/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1/services/metadata_service/transports/grpc_asyncio.py @@ -784,6 +784,37 @@ def add_context_children( ) return self._stubs["add_context_children"] + @property + def remove_context_children( + self, + ) -> Callable[ + [metadata_service.RemoveContextChildrenRequest], + Awaitable[metadata_service.RemoveContextChildrenResponse], + ]: + r"""Return a callable for the remove context children method over gRPC. + + Remove a set of children contexts from a parent + Context. If any of the child Contexts were NOT added to + the parent Context, they are simply skipped. + + Returns: + Callable[[~.RemoveContextChildrenRequest], + Awaitable[~.RemoveContextChildrenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "remove_context_children" not in self._stubs: + self._stubs["remove_context_children"] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1.MetadataService/RemoveContextChildren", + request_serializer=metadata_service.RemoveContextChildrenRequest.serialize, + response_deserializer=metadata_service.RemoveContextChildrenResponse.deserialize, + ) + return self._stubs["remove_context_children"] + @property def query_context_lineage_subgraph( self, diff --git a/google/cloud/aiplatform_v1/types/__init__.py b/google/cloud/aiplatform_v1/types/__init__.py index 07fe8eeca8..be7bf356b8 100644 --- a/google/cloud/aiplatform_v1/types/__init__.py +++ b/google/cloud/aiplatform_v1/types/__init__.py @@ -360,6 +360,8 @@ QueryArtifactLineageSubgraphRequest, QueryContextLineageSubgraphRequest, QueryExecutionInputsAndOutputsRequest, + RemoveContextChildrenRequest, + RemoveContextChildrenResponse, UpdateArtifactRequest, UpdateContextRequest, UpdateExecutionRequest, @@ -867,6 +869,8 @@ "QueryArtifactLineageSubgraphRequest", "QueryContextLineageSubgraphRequest", "QueryExecutionInputsAndOutputsRequest", + "RemoveContextChildrenRequest", + "RemoveContextChildrenResponse", "UpdateArtifactRequest", "UpdateContextRequest", "UpdateExecutionRequest", diff --git a/google/cloud/aiplatform_v1/types/batch_prediction_job.py b/google/cloud/aiplatform_v1/types/batch_prediction_job.py index a98d8feb11..8d472beafb 100644 --- a/google/cloud/aiplatform_v1/types/batch_prediction_job.py +++ b/google/cloud/aiplatform_v1/types/batch_prediction_job.py @@ -57,7 +57,7 @@ class BatchPredictionJob(proto.Message): Required. The user-defined name of this BatchPredictionJob. model (str): - The name of the Model resoure that produces the predictions + The name of the Model resource that produces the predictions via this job, must share the same ancestor Location. Starting this job has no impact on any existing deployments of the Model and their resources. Exactly one of model and diff --git a/google/cloud/aiplatform_v1/types/custom_job.py b/google/cloud/aiplatform_v1/types/custom_job.py index 2b33bbf119..ea4c102339 100644 --- a/google/cloud/aiplatform_v1/types/custom_job.py +++ b/google/cloud/aiplatform_v1/types/custom_job.py @@ -165,7 +165,6 @@ class CustomJob(proto.Message): class CustomJobSpec(proto.Message): r"""Represents the spec of a CustomJob. - Next Id: 15 Attributes: worker_pool_specs (Sequence[google.cloud.aiplatform_v1.types.WorkerPoolSpec]): diff --git a/google/cloud/aiplatform_v1/types/featurestore_service.py b/google/cloud/aiplatform_v1/types/featurestore_service.py index 0fb47545cb..f505c7fd34 100644 --- a/google/cloud/aiplatform_v1/types/featurestore_service.py +++ b/google/cloud/aiplatform_v1/types/featurestore_service.py @@ -461,6 +461,10 @@ class ImportFeatureValuesResponse(proto.Message): - Having a null entityId. - Having a null timestamp. - Not being parsable (applicable for CSV sources). + timestamp_outside_retention_rows_count (int): + The number rows that weren't ingested due to + having feature timestamps outside the retention + boundary. """ imported_entity_count = proto.Field( @@ -475,6 +479,10 @@ class ImportFeatureValuesResponse(proto.Message): proto.INT64, number=6, ) + timestamp_outside_retention_rows_count = proto.Field( + proto.INT64, + number=4, + ) class BatchReadFeatureValuesRequest(proto.Message): @@ -1564,6 +1572,10 @@ class ImportFeatureValuesOperationMetadata(proto.Message): - Having a null entityId. - Having a null timestamp. - Not being parsable (applicable for CSV sources). + timestamp_outside_retention_rows_count (int): + The number rows that weren't ingested due to + having timestamps outside the retention + boundary. """ generic_metadata = proto.Field( @@ -1583,6 +1595,10 @@ class ImportFeatureValuesOperationMetadata(proto.Message): proto.INT64, number=6, ) + timestamp_outside_retention_rows_count = proto.Field( + proto.INT64, + number=7, + ) class ExportFeatureValuesOperationMetadata(proto.Message): diff --git a/google/cloud/aiplatform_v1/types/job_service.py b/google/cloud/aiplatform_v1/types/job_service.py index 7d2e5140d0..c70166d522 100644 --- a/google/cloud/aiplatform_v1/types/job_service.py +++ b/google/cloud/aiplatform_v1/types/job_service.py @@ -133,6 +133,9 @@ class ListCustomJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -140,6 +143,8 @@ class ListCustomJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): @@ -294,6 +299,9 @@ class ListDataLabelingJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -301,6 +309,8 @@ class ListDataLabelingJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): @@ -466,6 +476,9 @@ class ListHyperparameterTuningJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -473,6 +486,8 @@ class ListHyperparameterTuningJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): @@ -635,6 +650,9 @@ class ListBatchPredictionJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -642,6 +660,8 @@ class ListBatchPredictionJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): @@ -931,6 +951,9 @@ class ListModelDeploymentMonitoringJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -938,6 +961,8 @@ class ListModelDeploymentMonitoringJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): diff --git a/google/cloud/aiplatform_v1/types/metadata_service.py b/google/cloud/aiplatform_v1/types/metadata_service.py index 30a48e00f3..37d42b7aed 100644 --- a/google/cloud/aiplatform_v1/types/metadata_service.py +++ b/google/cloud/aiplatform_v1/types/metadata_service.py @@ -57,6 +57,8 @@ "AddContextArtifactsAndExecutionsResponse", "AddContextChildrenRequest", "AddContextChildrenResponse", + "RemoveContextChildrenRequest", + "RemoveContextChildrenResponse", "QueryContextLineageSubgraphRequest", "CreateExecutionRequest", "GetExecutionRequest", @@ -361,6 +363,14 @@ class ListArtifactsRequest(proto.Message): For example: ``display_name = "test" AND metadata.field1.bool_value = true``. + order_by (str): + How the list of messages is ordered. Specify the values to + order by and an ordering operation. The default sorting + order is ascending. To specify descending order for a field, + users append a " desc" suffix; for example: "foo desc, bar". + Subfields are specified with a ``.`` character, such as + foo.bar. see https://google.aip.dev/132#ordering for more + details. """ parent = proto.Field( @@ -379,6 +389,10 @@ class ListArtifactsRequest(proto.Message): proto.STRING, number=4, ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class ListArtifactsResponse(proto.Message): @@ -657,6 +671,14 @@ class ListContextsRequest(proto.Message): For example: ``display_name = "test" AND metadata.field1.bool_value = true``. + order_by (str): + How the list of messages is ordered. Specify the values to + order by and an ordering operation. The default sorting + order is ascending. To specify descending order for a field, + users append a " desc" suffix; for example: "foo desc, bar". + Subfields are specified with a ``.`` character, such as + foo.bar. see https://google.aip.dev/132#ordering for more + details. """ parent = proto.Field( @@ -675,6 +697,10 @@ class ListContextsRequest(proto.Message): proto.STRING, number=4, ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class ListContextsResponse(proto.Message): @@ -923,6 +949,37 @@ class AddContextChildrenResponse(proto.Message): """ +class RemoveContextChildrenRequest(proto.Message): + r"""Request message for + [MetadataService.DeleteContextChildrenRequest][]. + + Attributes: + context (str): + Required. The resource name of the parent Context. + + Format: + ``projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`` + child_contexts (Sequence[str]): + The resource names of the child Contexts. + """ + + context = proto.Field( + proto.STRING, + number=1, + ) + child_contexts = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class RemoveContextChildrenResponse(proto.Message): + r"""Response message for + [MetadataService.RemoveContextChildren][google.cloud.aiplatform.v1.MetadataService.RemoveContextChildren]. + + """ + + class QueryContextLineageSubgraphRequest(proto.Message): r"""Request message for [MetadataService.QueryContextLineageSubgraph][google.cloud.aiplatform.v1.MetadataService.QueryContextLineageSubgraph]. @@ -1048,6 +1105,14 @@ class ListExecutionsRequest(proto.Message): Each of the above supported filters can be combined together using logical operators (``AND`` & ``OR``). For example: ``display_name = "test" AND metadata.field1.bool_value = true``. + order_by (str): + How the list of messages is ordered. Specify the values to + order by and an ordering operation. The default sorting + order is ascending. To specify descending order for a field, + users append a " desc" suffix; for example: "foo desc, bar". + Subfields are specified with a ``.`` character, such as + foo.bar. see https://google.aip.dev/132#ordering for more + details. """ parent = proto.Field( @@ -1066,6 +1131,10 @@ class ListExecutionsRequest(proto.Message): proto.STRING, number=4, ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class ListExecutionsResponse(proto.Message): diff --git a/google/cloud/aiplatform_v1/types/pipeline_service.py b/google/cloud/aiplatform_v1/types/pipeline_service.py index 376c4fb520..b18d477480 100644 --- a/google/cloud/aiplatform_v1/types/pipeline_service.py +++ b/google/cloud/aiplatform_v1/types/pipeline_service.py @@ -101,6 +101,9 @@ class ListTrainingPipelinesRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: diff --git a/google/cloud/aiplatform_v1beta1/__init__.py b/google/cloud/aiplatform_v1beta1/__init__.py index 2cd5443ea9..6b50c2616b 100644 --- a/google/cloud/aiplatform_v1beta1/__init__.py +++ b/google/cloud/aiplatform_v1beta1/__init__.py @@ -346,6 +346,8 @@ from .types.metadata_service import QueryArtifactLineageSubgraphRequest from .types.metadata_service import QueryContextLineageSubgraphRequest from .types.metadata_service import QueryExecutionInputsAndOutputsRequest +from .types.metadata_service import RemoveContextChildrenRequest +from .types.metadata_service import RemoveContextChildrenResponse from .types.metadata_service import UpdateArtifactRequest from .types.metadata_service import UpdateContextRequest from .types.metadata_service import UpdateExecutionRequest @@ -949,6 +951,8 @@ "ReadTensorboardBlobDataResponse", "ReadTensorboardTimeSeriesDataRequest", "ReadTensorboardTimeSeriesDataResponse", + "RemoveContextChildrenRequest", + "RemoveContextChildrenResponse", "RemoveDatapointsRequest", "RemoveDatapointsResponse", "ResourcesConsumed", diff --git a/google/cloud/aiplatform_v1beta1/gapic_metadata.json b/google/cloud/aiplatform_v1beta1/gapic_metadata.json index 4c7581b4f8..eec04fcb19 100644 --- a/google/cloud/aiplatform_v1beta1/gapic_metadata.json +++ b/google/cloud/aiplatform_v1beta1/gapic_metadata.json @@ -1162,6 +1162,11 @@ "query_execution_inputs_and_outputs" ] }, + "RemoveContextChildren": { + "methods": [ + "remove_context_children" + ] + }, "UpdateArtifact": { "methods": [ "update_artifact" @@ -1322,6 +1327,11 @@ "query_execution_inputs_and_outputs" ] }, + "RemoveContextChildren": { + "methods": [ + "remove_context_children" + ] + }, "UpdateArtifact": { "methods": [ "update_artifact" diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py index 6516af0ce1..df5d78ec8e 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/async_client.py @@ -2788,8 +2788,10 @@ async def sample_delete_feature_values(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.DeleteFeatureValuesResponse` Response message for - [FeaturestoreService.DeleteFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeatureValues]. + The result type for the operation will be + :class:`google.cloud.aiplatform_v1beta1.types.DeleteFeatureValuesResponse` + Response message for + [FeaturestoreService.DeleteFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeatureValues]. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py b/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py index 93355f0edc..f9de3ea53f 100644 --- a/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/featurestore_service/client.py @@ -3056,8 +3056,10 @@ def sample_delete_feature_values(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.DeleteFeatureValuesResponse` Response message for - [FeaturestoreService.DeleteFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeatureValues]. + The result type for the operation will be + :class:`google.cloud.aiplatform_v1beta1.types.DeleteFeatureValuesResponse` + Response message for + [FeaturestoreService.DeleteFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeatureValues]. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py index 9a98e4380a..6a7f0d25e2 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/async_client.py @@ -2392,6 +2392,122 @@ async def sample_add_context_children(): # Done; return the response. return response + async def remove_context_children( + self, + request: Union[metadata_service.RemoveContextChildrenRequest, dict] = None, + *, + context: str = None, + child_contexts: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_service.RemoveContextChildrenResponse: + r"""Remove a set of children contexts from a parent + Context. If any of the child Contexts were NOT added to + the parent Context, they are simply skipped. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import aiplatform_v1beta1 + + async def sample_remove_context_children(): + # Create a client + client = aiplatform_v1beta1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.RemoveContextChildrenRequest( + context="context_value", + ) + + # Make the request + response = await client.remove_context_children(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.RemoveContextChildrenRequest, dict]): + The request object. Request message for + [MetadataService.DeleteContextChildrenRequest][]. + context (:class:`str`): + Required. The resource name of the parent Context. + + Format: + ``projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`` + + This corresponds to the ``context`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + child_contexts (:class:`Sequence[str]`): + The resource names of the child + Contexts. + + This corresponds to the ``child_contexts`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1beta1.types.RemoveContextChildrenResponse: + Response message for + [MetadataService.RemoveContextChildren][google.cloud.aiplatform.v1beta1.MetadataService.RemoveContextChildren]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([context, child_contexts]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metadata_service.RemoveContextChildrenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if context is not None: + request.context = context + if child_contexts: + request.child_contexts.extend(child_contexts) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.remove_context_children, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("context", request.context),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def query_context_lineage_subgraph( self, request: Union[ diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py index ca72ed8a3a..e4aed060c6 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/client.py @@ -2705,6 +2705,122 @@ def sample_add_context_children(): # Done; return the response. return response + def remove_context_children( + self, + request: Union[metadata_service.RemoveContextChildrenRequest, dict] = None, + *, + context: str = None, + child_contexts: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_service.RemoveContextChildrenResponse: + r"""Remove a set of children contexts from a parent + Context. If any of the child Contexts were NOT added to + the parent Context, they are simply skipped. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import aiplatform_v1beta1 + + def sample_remove_context_children(): + # Create a client + client = aiplatform_v1beta1.MetadataServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.RemoveContextChildrenRequest( + context="context_value", + ) + + # Make the request + response = client.remove_context_children(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.aiplatform_v1beta1.types.RemoveContextChildrenRequest, dict]): + The request object. Request message for + [MetadataService.DeleteContextChildrenRequest][]. + context (str): + Required. The resource name of the parent Context. + + Format: + ``projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`` + + This corresponds to the ``context`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + child_contexts (Sequence[str]): + The resource names of the child + Contexts. + + This corresponds to the ``child_contexts`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.aiplatform_v1beta1.types.RemoveContextChildrenResponse: + Response message for + [MetadataService.RemoveContextChildren][google.cloud.aiplatform.v1beta1.MetadataService.RemoveContextChildren]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([context, child_contexts]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metadata_service.RemoveContextChildrenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metadata_service.RemoveContextChildrenRequest): + request = metadata_service.RemoveContextChildrenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if context is not None: + request.context = context + if child_contexts is not None: + request.child_contexts = child_contexts + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.remove_context_children] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("context", request.context),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def query_context_lineage_subgraph( self, request: Union[ diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/base.py index 69ea3c50e8..2a27942e44 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/base.py @@ -233,6 +233,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=5.0, client_info=client_info, ), + self.remove_context_children: gapic_v1.method.wrap_method( + self.remove_context_children, + default_timeout=None, + client_info=client_info, + ), self.query_context_lineage_subgraph: gapic_v1.method.wrap_method( self.query_context_lineage_subgraph, default_timeout=5.0, @@ -491,6 +496,18 @@ def add_context_children( ]: raise NotImplementedError() + @property + def remove_context_children( + self, + ) -> Callable[ + [metadata_service.RemoveContextChildrenRequest], + Union[ + metadata_service.RemoveContextChildrenResponse, + Awaitable[metadata_service.RemoveContextChildrenResponse], + ], + ]: + raise NotImplementedError() + @property def query_context_lineage_subgraph( self, diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc.py index f4912c7638..cfc117883d 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc.py @@ -758,6 +758,37 @@ def add_context_children( ) return self._stubs["add_context_children"] + @property + def remove_context_children( + self, + ) -> Callable[ + [metadata_service.RemoveContextChildrenRequest], + metadata_service.RemoveContextChildrenResponse, + ]: + r"""Return a callable for the remove context children method over gRPC. + + Remove a set of children contexts from a parent + Context. If any of the child Contexts were NOT added to + the parent Context, they are simply skipped. + + Returns: + Callable[[~.RemoveContextChildrenRequest], + ~.RemoveContextChildrenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "remove_context_children" not in self._stubs: + self._stubs["remove_context_children"] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/RemoveContextChildren", + request_serializer=metadata_service.RemoveContextChildrenRequest.serialize, + response_deserializer=metadata_service.RemoveContextChildrenResponse.deserialize, + ) + return self._stubs["remove_context_children"] + @property def query_context_lineage_subgraph( self, diff --git a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc_asyncio.py index 48d0ba2e52..776ee1d5bf 100644 --- a/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/metadata_service/transports/grpc_asyncio.py @@ -784,6 +784,37 @@ def add_context_children( ) return self._stubs["add_context_children"] + @property + def remove_context_children( + self, + ) -> Callable[ + [metadata_service.RemoveContextChildrenRequest], + Awaitable[metadata_service.RemoveContextChildrenResponse], + ]: + r"""Return a callable for the remove context children method over gRPC. + + Remove a set of children contexts from a parent + Context. If any of the child Contexts were NOT added to + the parent Context, they are simply skipped. + + Returns: + Callable[[~.RemoveContextChildrenRequest], + Awaitable[~.RemoveContextChildrenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "remove_context_children" not in self._stubs: + self._stubs["remove_context_children"] = self.grpc_channel.unary_unary( + "/google.cloud.aiplatform.v1beta1.MetadataService/RemoveContextChildren", + request_serializer=metadata_service.RemoveContextChildrenRequest.serialize, + response_deserializer=metadata_service.RemoveContextChildrenResponse.deserialize, + ) + return self._stubs["remove_context_children"] + @property def query_context_lineage_subgraph( self, diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py index 6461c44027..9ab57a863e 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py @@ -196,18 +196,23 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]: @staticmethod def dataset_path( project: str, + location: str, dataset: str, ) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format( + return "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, + location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod @@ -235,23 +240,18 @@ def parse_dataset_path(path: str) -> Dict[str, str]: @staticmethod def dataset_path( project: str, - location: str, dataset: str, ) -> str: """Returns a fully-qualified dataset string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}".format( + return "projects/{project}/datasets/{dataset}".format( project=project, - location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: """Parses a dataset path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", - path, - ) + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod diff --git a/google/cloud/aiplatform_v1beta1/types/__init__.py b/google/cloud/aiplatform_v1beta1/types/__init__.py index 17b0985b59..0017b2bebc 100644 --- a/google/cloud/aiplatform_v1beta1/types/__init__.py +++ b/google/cloud/aiplatform_v1beta1/types/__init__.py @@ -383,6 +383,8 @@ QueryArtifactLineageSubgraphRequest, QueryContextLineageSubgraphRequest, QueryExecutionInputsAndOutputsRequest, + RemoveContextChildrenRequest, + RemoveContextChildrenResponse, UpdateArtifactRequest, UpdateContextRequest, UpdateExecutionRequest, @@ -913,6 +915,8 @@ "QueryArtifactLineageSubgraphRequest", "QueryContextLineageSubgraphRequest", "QueryExecutionInputsAndOutputsRequest", + "RemoveContextChildrenRequest", + "RemoveContextChildrenResponse", "UpdateArtifactRequest", "UpdateContextRequest", "UpdateExecutionRequest", diff --git a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py index bc046037b0..998a558ac8 100644 --- a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py +++ b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py @@ -61,7 +61,7 @@ class BatchPredictionJob(proto.Message): Required. The user-defined name of this BatchPredictionJob. model (str): - The name of the Model resoure that produces the predictions + The name of the Model resource that produces the predictions via this job, must share the same ancestor Location. Starting this job has no impact on any existing deployments of the Model and their resources. Exactly one of model and diff --git a/google/cloud/aiplatform_v1beta1/types/custom_job.py b/google/cloud/aiplatform_v1beta1/types/custom_job.py index 4cbb27b616..11204de285 100644 --- a/google/cloud/aiplatform_v1beta1/types/custom_job.py +++ b/google/cloud/aiplatform_v1beta1/types/custom_job.py @@ -165,7 +165,6 @@ class CustomJob(proto.Message): class CustomJobSpec(proto.Message): r"""Represents the spec of a CustomJob. - Next Id: 15 Attributes: worker_pool_specs (Sequence[google.cloud.aiplatform_v1beta1.types.WorkerPoolSpec]): diff --git a/google/cloud/aiplatform_v1beta1/types/featurestore_service.py b/google/cloud/aiplatform_v1beta1/types/featurestore_service.py index 76872d8ec7..ddeea2ccd1 100644 --- a/google/cloud/aiplatform_v1beta1/types/featurestore_service.py +++ b/google/cloud/aiplatform_v1beta1/types/featurestore_service.py @@ -25,6 +25,7 @@ from google.cloud.aiplatform_v1beta1.types import operation from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import interval_pb2 # type: ignore __protobuf__ = proto.module( @@ -467,6 +468,10 @@ class ImportFeatureValuesResponse(proto.Message): - Having a null entityId. - Having a null timestamp. - Not being parsable (applicable for CSV sources). + timestamp_outside_retention_rows_count (int): + The number rows that weren't ingested due to + having feature timestamps outside the retention + boundary. """ imported_entity_count = proto.Field( @@ -481,6 +486,10 @@ class ImportFeatureValuesResponse(proto.Message): proto.INT64, number=6, ) + timestamp_outside_retention_rows_count = proto.Field( + proto.INT64, + number=4, + ) class BatchReadFeatureValuesRequest(proto.Message): @@ -1570,6 +1579,10 @@ class ImportFeatureValuesOperationMetadata(proto.Message): - Having a null entityId. - Having a null timestamp. - Not being parsable (applicable for CSV sources). + timestamp_outside_retention_rows_count (int): + The number rows that weren't ingested due to + having timestamps outside the retention + boundary. """ generic_metadata = proto.Field( @@ -1589,6 +1602,10 @@ class ImportFeatureValuesOperationMetadata(proto.Message): proto.INT64, number=6, ) + timestamp_outside_retention_rows_count = proto.Field( + proto.INT64, + number=7, + ) class ExportFeatureValuesOperationMetadata(proto.Message): @@ -1688,6 +1705,10 @@ class DeleteFeatureValuesRequest(proto.Message): r"""Request message for [FeaturestoreService.DeleteFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.DeleteFeatureValues]. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -1696,6 +1717,11 @@ class DeleteFeatureValuesRequest(proto.Message): Select feature values to be deleted by specifying entities. + This field is a member of `oneof`_ ``DeleteOption``. + select_time_range_and_feature (google.cloud.aiplatform_v1beta1.types.DeleteFeatureValuesRequest.SelectTimeRangeAndFeature): + Select feature values to be deleted by + specifying time range and features. + This field is a member of `oneof`_ ``DeleteOption``. entity_type (str): Required. The resource name of the EntityType grouping the @@ -1722,12 +1748,54 @@ class SelectEntity(proto.Message): message="EntityIdSelector", ) + class SelectTimeRangeAndFeature(proto.Message): + r"""Message to select time range and feature. + Values of the selected feature generated within an inclusive + time range will be deleted. + + Attributes: + time_range (google.type.interval_pb2.Interval): + Required. Select feature generated within a + half-inclusive time range. The time range is + lower inclusive and upper exclusive. + feature_selector (google.cloud.aiplatform_v1beta1.types.FeatureSelector): + Required. Selectors choosing which feature + values to be deleted from the EntityType. + skip_online_storage_delete (bool): + If set, data will not be deleted from online + storage. When time range is older than the data + in online storage, setting this to be true will + make the deletion have no impact on online + serving. + """ + + time_range = proto.Field( + proto.MESSAGE, + number=1, + message=interval_pb2.Interval, + ) + feature_selector = proto.Field( + proto.MESSAGE, + number=2, + message=gca_feature_selector.FeatureSelector, + ) + skip_online_storage_delete = proto.Field( + proto.BOOL, + number=3, + ) + select_entity = proto.Field( proto.MESSAGE, number=2, oneof="DeleteOption", message=SelectEntity, ) + select_time_range_and_feature = proto.Field( + proto.MESSAGE, + number=3, + oneof="DeleteOption", + message=SelectTimeRangeAndFeature, + ) entity_type = proto.Field( proto.STRING, number=1, diff --git a/google/cloud/aiplatform_v1beta1/types/job_service.py b/google/cloud/aiplatform_v1beta1/types/job_service.py index fd099b9ac5..01651d66ff 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_service.py +++ b/google/cloud/aiplatform_v1beta1/types/job_service.py @@ -135,6 +135,9 @@ class ListCustomJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -142,6 +145,8 @@ class ListCustomJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): @@ -296,6 +301,9 @@ class ListDataLabelingJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -303,6 +311,8 @@ class ListDataLabelingJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): @@ -468,6 +478,9 @@ class ListHyperparameterTuningJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -475,6 +488,8 @@ class ListHyperparameterTuningJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): @@ -637,6 +652,9 @@ class ListBatchPredictionJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -644,6 +662,8 @@ class ListBatchPredictionJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): @@ -933,6 +953,9 @@ class ListModelDeploymentMonitoringJobsRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -940,6 +963,8 @@ class ListModelDeploymentMonitoringJobsRequest(proto.Message): - ``state!="JOB_STATE_FAILED" OR display_name="my_job"`` - ``NOT display_name="my_job"`` - ``create_time>"2021-05-18T00:00:00Z"`` + - ``labels.keyA=valueA`` + - ``labels.keyB:*`` page_size (int): The standard list page size. page_token (str): diff --git a/google/cloud/aiplatform_v1beta1/types/metadata_service.py b/google/cloud/aiplatform_v1beta1/types/metadata_service.py index db995197b6..828e180ddc 100644 --- a/google/cloud/aiplatform_v1beta1/types/metadata_service.py +++ b/google/cloud/aiplatform_v1beta1/types/metadata_service.py @@ -57,6 +57,8 @@ "AddContextArtifactsAndExecutionsResponse", "AddContextChildrenRequest", "AddContextChildrenResponse", + "RemoveContextChildrenRequest", + "RemoveContextChildrenResponse", "QueryContextLineageSubgraphRequest", "CreateExecutionRequest", "GetExecutionRequest", @@ -361,6 +363,14 @@ class ListArtifactsRequest(proto.Message): For example: ``display_name = "test" AND metadata.field1.bool_value = true``. + order_by (str): + How the list of messages is ordered. Specify the values to + order by and an ordering operation. The default sorting + order is ascending. To specify descending order for a field, + users append a " desc" suffix; for example: "foo desc, bar". + Subfields are specified with a ``.`` character, such as + foo.bar. see https://google.aip.dev/132#ordering for more + details. """ parent = proto.Field( @@ -379,6 +389,10 @@ class ListArtifactsRequest(proto.Message): proto.STRING, number=4, ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class ListArtifactsResponse(proto.Message): @@ -658,6 +672,14 @@ class ListContextsRequest(proto.Message): For example: ``display_name = "test" AND metadata.field1.bool_value = true``. + order_by (str): + How the list of messages is ordered. Specify the values to + order by and an ordering operation. The default sorting + order is ascending. To specify descending order for a field, + users append a " desc" suffix; for example: "foo desc, bar". + Subfields are specified with a ``.`` character, such as + foo.bar. see https://google.aip.dev/132#ordering for more + details. """ parent = proto.Field( @@ -676,6 +698,10 @@ class ListContextsRequest(proto.Message): proto.STRING, number=4, ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class ListContextsResponse(proto.Message): @@ -925,6 +951,37 @@ class AddContextChildrenResponse(proto.Message): """ +class RemoveContextChildrenRequest(proto.Message): + r"""Request message for + [MetadataService.DeleteContextChildrenRequest][]. + + Attributes: + context (str): + Required. The resource name of the parent Context. + + Format: + ``projects/{project}/locations/{location}/metadataStores/{metadatastore}/contexts/{context}`` + child_contexts (Sequence[str]): + The resource names of the child Contexts. + """ + + context = proto.Field( + proto.STRING, + number=1, + ) + child_contexts = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class RemoveContextChildrenResponse(proto.Message): + r"""Response message for + [MetadataService.RemoveContextChildren][google.cloud.aiplatform.v1beta1.MetadataService.RemoveContextChildren]. + + """ + + class QueryContextLineageSubgraphRequest(proto.Message): r"""Request message for [MetadataService.QueryContextLineageSubgraph][google.cloud.aiplatform.v1beta1.MetadataService.QueryContextLineageSubgraph]. @@ -1050,6 +1107,14 @@ class ListExecutionsRequest(proto.Message): Each of the above supported filters can be combined together using logical operators (``AND`` & ``OR``). For example: ``display_name = "test" AND metadata.field1.bool_value = true``. + order_by (str): + How the list of messages is ordered. Specify the values to + order by and an ordering operation. The default sorting + order is ascending. To specify descending order for a field, + users append a " desc" suffix; for example: "foo desc, bar". + Subfields are specified with a ``.`` character, such as + foo.bar. see https://google.aip.dev/132#ordering for more + details. """ parent = proto.Field( @@ -1068,6 +1133,10 @@ class ListExecutionsRequest(proto.Message): proto.STRING, number=4, ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class ListExecutionsResponse(proto.Message): diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_job.py b/google/cloud/aiplatform_v1beta1/types/pipeline_job.py index 8b0e7d37a1..42fd5821f6 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_job.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_job.py @@ -163,8 +163,34 @@ class RuntimeConfig(proto.Message): set to PIPELINE_FAILURE_POLICY_FAIL_FAST, it will stop scheduling any new tasks when a task has failed. Any scheduled tasks will continue to completion. + input_artifacts (Mapping[str, google.cloud.aiplatform_v1beta1.types.PipelineJob.RuntimeConfig.InputArtifact]): + The runtime artifacts of the PipelineJob. The + key will be the input artifact name and the + value would be one of the InputArtifact. """ + class InputArtifact(proto.Message): + r"""The type of an input artifact. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + artifact_id (str): + Artifact resource id from MLMD. Which is the last portion of + an artifact resource + name(projects/{project}/locations/{location}/metadataStores/default/artifacts/{artifact_id}). + The artifact must stay within the same project, location and + default metadatastore as the pipeline. + + This field is a member of `oneof`_ ``kind``. + """ + + artifact_id = proto.Field( + proto.STRING, + number=1, + oneof="kind", + ) + parameters = proto.MapField( proto.STRING, proto.MESSAGE, @@ -186,6 +212,12 @@ class RuntimeConfig(proto.Message): number=4, enum=pipeline_failure_policy.PipelineFailurePolicy, ) + input_artifacts = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=5, + message="PipelineJob.RuntimeConfig.InputArtifact", + ) name = proto.Field( proto.STRING, diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py index 9bddf47e5a..e123869a5f 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py @@ -103,6 +103,9 @@ class ListTrainingPipelinesRequest(proto.Message): - ``create_time`` supports ``=``, ``!=``,\ ``<``, ``<=``,\ ``>``, ``>=`` comparisons. ``create_time`` must be in RFC 3339 format. + - ``labels`` supports general map functions that is: + ``labels.key=value`` - key:value equality \`labels.key:\* + - key existence Some examples of using the filter are: @@ -335,6 +338,8 @@ class ListPipelineJobsRequest(proto.Message): - ``update_time`` - ``end_time`` - ``start_time`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask specifying which fields to read. """ parent = proto.Field( @@ -357,6 +362,11 @@ class ListPipelineJobsRequest(proto.Message): proto.STRING, number=6, ) + read_mask = proto.Field( + proto.MESSAGE, + number=7, + message=field_mask_pb2.FieldMask, + ) class ListPipelineJobsResponse(proto.Message): diff --git a/google/cloud/aiplatform_v1beta1/types/study.py b/google/cloud/aiplatform_v1beta1/types/study.py index 81b87dd94b..67350886ce 100644 --- a/google/cloud/aiplatform_v1beta1/types/study.py +++ b/google/cloud/aiplatform_v1beta1/types/study.py @@ -296,6 +296,10 @@ class StudySpec(proto.Message): measurement_selection_type (google.cloud.aiplatform_v1beta1.types.StudySpec.MeasurementSelectionType): Describe which measurement selection type will be used + transfer_learning_config (google.cloud.aiplatform_v1beta1.types.StudySpec.TransferLearningConfig): + The configuration info/options for transfer + learning. Currently supported for Vertex AI + Vizier service, not HyperParameterTuningJob """ class Algorithm(proto.Enum): @@ -862,6 +866,31 @@ class ConvexStopConfig(proto.Message): number=5, ) + class TransferLearningConfig(proto.Message): + r"""This contains flag for manually disabling transfer learning + for a study. The names of prior studies being used for transfer + learning (if any) are also listed here. + + Attributes: + disable_transfer_learning (bool): + Flag to to manually prevent vizier from using + transfer learning on a new study. Otherwise, + vizier will automatically determine whether or + not to use transfer learning. + prior_study_names (Sequence[str]): + Output only. Names of previously completed + studies + """ + + disable_transfer_learning = proto.Field( + proto.BOOL, + number=1, + ) + prior_study_names = proto.RepeatedField( + proto.STRING, + number=2, + ) + decay_curve_stopping_spec = proto.Field( proto.MESSAGE, number=4, @@ -911,6 +940,11 @@ class ConvexStopConfig(proto.Message): number=7, enum=MeasurementSelectionType, ) + transfer_learning_config = proto.Field( + proto.MESSAGE, + number=10, + message=TransferLearningConfig, + ) class Measurement(proto.Message): diff --git a/samples/generated_samples/aiplatform_v1_generated_metadata_service_remove_context_children_async.py b/samples/generated_samples/aiplatform_v1_generated_metadata_service_remove_context_children_async.py new file mode 100644 index 0000000000..efd878bc6f --- /dev/null +++ b/samples/generated_samples/aiplatform_v1_generated_metadata_service_remove_context_children_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveContextChildren +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1_generated_MetadataService_RemoveContextChildren_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import aiplatform_v1 + + +async def sample_remove_context_children(): + # Create a client + client = aiplatform_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1.RemoveContextChildrenRequest( + context="context_value", + ) + + # Make the request + response = await client.remove_context_children(request=request) + + # Handle the response + print(response) + +# [END aiplatform_v1_generated_MetadataService_RemoveContextChildren_async] diff --git a/samples/generated_samples/aiplatform_v1_generated_metadata_service_remove_context_children_sync.py b/samples/generated_samples/aiplatform_v1_generated_metadata_service_remove_context_children_sync.py new file mode 100644 index 0000000000..60d9cea88a --- /dev/null +++ b/samples/generated_samples/aiplatform_v1_generated_metadata_service_remove_context_children_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveContextChildren +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1_generated_MetadataService_RemoveContextChildren_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import aiplatform_v1 + + +def sample_remove_context_children(): + # Create a client + client = aiplatform_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1.RemoveContextChildrenRequest( + context="context_value", + ) + + # Make the request + response = client.remove_context_children(request=request) + + # Handle the response + print(response) + +# [END aiplatform_v1_generated_MetadataService_RemoveContextChildren_sync] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_metadata_service_remove_context_children_async.py b/samples/generated_samples/aiplatform_v1beta1_generated_metadata_service_remove_context_children_async.py new file mode 100644 index 0000000000..064b4512f2 --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_metadata_service_remove_context_children_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveContextChildren +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_MetadataService_RemoveContextChildren_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import aiplatform_v1beta1 + + +async def sample_remove_context_children(): + # Create a client + client = aiplatform_v1beta1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.RemoveContextChildrenRequest( + context="context_value", + ) + + # Make the request + response = await client.remove_context_children(request=request) + + # Handle the response + print(response) + +# [END aiplatform_v1beta1_generated_MetadataService_RemoveContextChildren_async] diff --git a/samples/generated_samples/aiplatform_v1beta1_generated_metadata_service_remove_context_children_sync.py b/samples/generated_samples/aiplatform_v1beta1_generated_metadata_service_remove_context_children_sync.py new file mode 100644 index 0000000000..cb072aad6f --- /dev/null +++ b/samples/generated_samples/aiplatform_v1beta1_generated_metadata_service_remove_context_children_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RemoveContextChildren +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-aiplatform + + +# [START aiplatform_v1beta1_generated_MetadataService_RemoveContextChildren_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import aiplatform_v1beta1 + + +def sample_remove_context_children(): + # Create a client + client = aiplatform_v1beta1.MetadataServiceClient() + + # Initialize request argument(s) + request = aiplatform_v1beta1.RemoveContextChildrenRequest( + context="context_value", + ) + + # Make the request + response = client.remove_context_children(request=request) + + # Handle the response + print(response) + +# [END aiplatform_v1beta1_generated_MetadataService_RemoveContextChildren_sync] diff --git a/samples/generated_samples/snippet_metadata_aiplatform_v1.json b/samples/generated_samples/snippet_metadata_aiplatform_v1.json index a94d338d74..96b1c6b980 100644 --- a/samples/generated_samples/snippet_metadata_aiplatform_v1.json +++ b/samples/generated_samples/snippet_metadata_aiplatform_v1.json @@ -18245,6 +18245,175 @@ ], "title": "aiplatform_v1_generated_metadata_service_query_execution_inputs_and_outputs_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.aiplatform_v1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.aiplatform_v1.MetadataServiceAsyncClient.remove_context_children", + "method": { + "fullName": "google.cloud.aiplatform.v1.MetadataService.RemoveContextChildren", + "service": { + "fullName": "google.cloud.aiplatform.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "RemoveContextChildren" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1.types.RemoveContextChildrenRequest" + }, + { + "name": "context", + "type": "str" + }, + { + "name": "child_contexts", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1.types.RemoveContextChildrenResponse", + "shortName": "remove_context_children" + }, + "description": "Sample for RemoveContextChildren", + "file": "aiplatform_v1_generated_metadata_service_remove_context_children_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1_generated_MetadataService_RemoveContextChildren_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1_generated_metadata_service_remove_context_children_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.aiplatform_v1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.aiplatform_v1.MetadataServiceClient.remove_context_children", + "method": { + "fullName": "google.cloud.aiplatform.v1.MetadataService.RemoveContextChildren", + "service": { + "fullName": "google.cloud.aiplatform.v1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "RemoveContextChildren" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1.types.RemoveContextChildrenRequest" + }, + { + "name": "context", + "type": "str" + }, + { + "name": "child_contexts", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1.types.RemoveContextChildrenResponse", + "shortName": "remove_context_children" + }, + "description": "Sample for RemoveContextChildren", + "file": "aiplatform_v1_generated_metadata_service_remove_context_children_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1_generated_MetadataService_RemoveContextChildren_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1_generated_metadata_service_remove_context_children_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/samples/generated_samples/snippet_metadata_aiplatform_v1beta1.json b/samples/generated_samples/snippet_metadata_aiplatform_v1beta1.json index d6423130dc..c1dcf0a70e 100644 --- a/samples/generated_samples/snippet_metadata_aiplatform_v1beta1.json +++ b/samples/generated_samples/snippet_metadata_aiplatform_v1beta1.json @@ -19396,6 +19396,175 @@ ], "title": "aiplatform_v1beta1_generated_metadata_service_query_execution_inputs_and_outputs_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.MetadataServiceAsyncClient", + "shortName": "MetadataServiceAsyncClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.MetadataServiceAsyncClient.remove_context_children", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.MetadataService.RemoveContextChildren", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "RemoveContextChildren" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.RemoveContextChildrenRequest" + }, + { + "name": "context", + "type": "str" + }, + { + "name": "child_contexts", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1beta1.types.RemoveContextChildrenResponse", + "shortName": "remove_context_children" + }, + "description": "Sample for RemoveContextChildren", + "file": "aiplatform_v1beta1_generated_metadata_service_remove_context_children_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_MetadataService_RemoveContextChildren_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_metadata_service_remove_context_children_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.aiplatform_v1beta1.MetadataServiceClient", + "shortName": "MetadataServiceClient" + }, + "fullName": "google.cloud.aiplatform_v1beta1.MetadataServiceClient.remove_context_children", + "method": { + "fullName": "google.cloud.aiplatform.v1beta1.MetadataService.RemoveContextChildren", + "service": { + "fullName": "google.cloud.aiplatform.v1beta1.MetadataService", + "shortName": "MetadataService" + }, + "shortName": "RemoveContextChildren" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.aiplatform_v1beta1.types.RemoveContextChildrenRequest" + }, + { + "name": "context", + "type": "str" + }, + { + "name": "child_contexts", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.aiplatform_v1beta1.types.RemoveContextChildrenResponse", + "shortName": "remove_context_children" + }, + "description": "Sample for RemoveContextChildren", + "file": "aiplatform_v1beta1_generated_metadata_service_remove_context_children_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "aiplatform_v1beta1_generated_MetadataService_RemoveContextChildren_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "aiplatform_v1beta1_generated_metadata_service_remove_context_children_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/tests/unit/gapic/aiplatform_v1/test_metadata_service.py b/tests/unit/gapic/aiplatform_v1/test_metadata_service.py index 522769b7c0..c82d476bb5 100644 --- a/tests/unit/gapic/aiplatform_v1/test_metadata_service.py +++ b/tests/unit/gapic/aiplatform_v1/test_metadata_service.py @@ -5770,6 +5770,257 @@ async def test_add_context_children_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + metadata_service.RemoveContextChildrenRequest, + dict, + ], +) +def test_remove_context_children(request_type, transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_service.RemoveContextChildrenResponse() + response = client.remove_context_children(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_service.RemoveContextChildrenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_service.RemoveContextChildrenResponse) + + +def test_remove_context_children_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + client.remove_context_children() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_service.RemoveContextChildrenRequest() + + +@pytest.mark.asyncio +async def test_remove_context_children_async( + transport: str = "grpc_asyncio", + request_type=metadata_service.RemoveContextChildrenRequest, +): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_service.RemoveContextChildrenResponse() + ) + response = await client.remove_context_children(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_service.RemoveContextChildrenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_service.RemoveContextChildrenResponse) + + +@pytest.mark.asyncio +async def test_remove_context_children_async_from_dict(): + await test_remove_context_children_async(request_type=dict) + + +def test_remove_context_children_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_service.RemoveContextChildrenRequest() + + request.context = "context_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + call.return_value = metadata_service.RemoveContextChildrenResponse() + client.remove_context_children(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "context=context_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_remove_context_children_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_service.RemoveContextChildrenRequest() + + request.context = "context_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_service.RemoveContextChildrenResponse() + ) + await client.remove_context_children(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "context=context_value", + ) in kw["metadata"] + + +def test_remove_context_children_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_service.RemoveContextChildrenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.remove_context_children( + context="context_value", + child_contexts=["child_contexts_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].context + mock_val = "context_value" + assert arg == mock_val + arg = args[0].child_contexts + mock_val = ["child_contexts_value"] + assert arg == mock_val + + +def test_remove_context_children_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_context_children( + metadata_service.RemoveContextChildrenRequest(), + context="context_value", + child_contexts=["child_contexts_value"], + ) + + +@pytest.mark.asyncio +async def test_remove_context_children_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_service.RemoveContextChildrenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_service.RemoveContextChildrenResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.remove_context_children( + context="context_value", + child_contexts=["child_contexts_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].context + mock_val = "context_value" + assert arg == mock_val + arg = args[0].child_contexts + mock_val = ["child_contexts_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_remove_context_children_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.remove_context_children( + metadata_service.RemoveContextChildrenRequest(), + context="context_value", + child_contexts=["child_contexts_value"], + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9570,6 +9821,7 @@ def test_metadata_service_base_transport(): "purge_contexts", "add_context_artifacts_and_executions", "add_context_children", + "remove_context_children", "query_context_lineage_subgraph", "create_execution", "get_execution", diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_service.py index 89e143b2b0..6eb4d25a5c 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_featurestore_service.py @@ -71,6 +71,7 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import interval_pb2 # type: ignore import google.auth diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py index e2cf4eeb84..c0a789e917 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_metadata_service.py @@ -5772,6 +5772,257 @@ async def test_add_context_children_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + metadata_service.RemoveContextChildrenRequest, + dict, + ], +) +def test_remove_context_children(request_type, transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_service.RemoveContextChildrenResponse() + response = client.remove_context_children(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_service.RemoveContextChildrenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_service.RemoveContextChildrenResponse) + + +def test_remove_context_children_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + client.remove_context_children() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_service.RemoveContextChildrenRequest() + + +@pytest.mark.asyncio +async def test_remove_context_children_async( + transport: str = "grpc_asyncio", + request_type=metadata_service.RemoveContextChildrenRequest, +): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_service.RemoveContextChildrenResponse() + ) + response = await client.remove_context_children(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_service.RemoveContextChildrenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_service.RemoveContextChildrenResponse) + + +@pytest.mark.asyncio +async def test_remove_context_children_async_from_dict(): + await test_remove_context_children_async(request_type=dict) + + +def test_remove_context_children_field_headers(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_service.RemoveContextChildrenRequest() + + request.context = "context_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + call.return_value = metadata_service.RemoveContextChildrenResponse() + client.remove_context_children(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "context=context_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_remove_context_children_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_service.RemoveContextChildrenRequest() + + request.context = "context_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_service.RemoveContextChildrenResponse() + ) + await client.remove_context_children(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "context=context_value", + ) in kw["metadata"] + + +def test_remove_context_children_flattened(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_service.RemoveContextChildrenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.remove_context_children( + context="context_value", + child_contexts=["child_contexts_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].context + mock_val = "context_value" + assert arg == mock_val + arg = args[0].child_contexts + mock_val = ["child_contexts_value"] + assert arg == mock_val + + +def test_remove_context_children_flattened_error(): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.remove_context_children( + metadata_service.RemoveContextChildrenRequest(), + context="context_value", + child_contexts=["child_contexts_value"], + ) + + +@pytest.mark.asyncio +async def test_remove_context_children_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.remove_context_children), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_service.RemoveContextChildrenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_service.RemoveContextChildrenResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.remove_context_children( + context="context_value", + child_contexts=["child_contexts_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].context + mock_val = "context_value" + assert arg == mock_val + arg = args[0].child_contexts + mock_val = ["child_contexts_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_remove_context_children_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.remove_context_children( + metadata_service.RemoveContextChildrenRequest(), + context="context_value", + child_contexts=["child_contexts_value"], + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9572,6 +9823,7 @@ def test_metadata_service_base_transport(): "purge_contexts", "add_context_artifacts_and_executions", "add_context_children", + "remove_context_children", "query_context_lineage_subgraph", "create_execution", "get_execution", diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py index 7c36d84b95..8a52fe213b 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py @@ -2006,19 +2006,22 @@ def test_parse_annotated_dataset_path(): def test_dataset_path(): project = "cuttlefish" - dataset = "mussel" - expected = "projects/{project}/datasets/{dataset}".format( + location = "mussel" + dataset = "winkle" + expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, + location=location, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, dataset) + actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "winkle", - "dataset": "nautilus", + "project": "nautilus", + "location": "scallop", + "dataset": "abalone", } path = MigrationServiceClient.dataset_path(**expected) @@ -2028,9 +2031,9 @@ def test_parse_dataset_path(): def test_dataset_path(): - project = "scallop" - location = "abalone" - dataset = "squid" + project = "squid" + location = "clam" + dataset = "whelk" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, location=location, @@ -2042,9 +2045,9 @@ def test_dataset_path(): def test_parse_dataset_path(): expected = { - "project": "clam", - "location": "whelk", - "dataset": "octopus", + "project": "octopus", + "location": "oyster", + "dataset": "nudibranch", } path = MigrationServiceClient.dataset_path(**expected) @@ -2054,22 +2057,19 @@ def test_parse_dataset_path(): def test_dataset_path(): - project = "oyster" - location = "nudibranch" - dataset = "cuttlefish" - expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( + project = "cuttlefish" + dataset = "mussel" + expected = "projects/{project}/datasets/{dataset}".format( project=project, - location=location, dataset=dataset, ) - actual = MigrationServiceClient.dataset_path(project, location, dataset) + actual = MigrationServiceClient.dataset_path(project, dataset) assert expected == actual def test_parse_dataset_path(): expected = { - "project": "mussel", - "location": "winkle", + "project": "winkle", "dataset": "nautilus", } path = MigrationServiceClient.dataset_path(**expected)