From 83b2b9a03a568dd3034f721352080d96824f850a Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 20 Oct 2020 14:59:01 -0600 Subject: [PATCH] chore: update dev with changes to master (#33) * Export aiplatform_v1beta1 as aiplatform.gapic_v1beta1 (#17) Freeing up the google.cloud.aiplatform namespace for MB SDK * GAPIC namespace (#20) * name space google.cloud.aiplatform.gapic * Init Disclaimer (#28) * clean up docstrings that are not rendered properly in reference doc (#24) Co-authored-by: Vinny Senthil * fix: re-add py sessions to noxfile (#22) * fix: re-add py sessions to noxfile Co-authored-by: Vinny Senthil Co-authored-by: Yu-Han Liu Co-authored-by: Vinny Senthil --- README.rst | 14 +- .../services/job_service/client.py | 877 +++++------------- .../services/pipeline_service/client.py | 386 ++------ .../services/prediction_service/client.py | 313 +------ .../prediction_service/transports/grpc.py | 173 +--- .../aiplatform_v1beta1/types/explanation.py | 303 +----- .../types/explanation_metadata.py | 293 +----- .../cloud/aiplatform_v1beta1/types/model.py | 287 +----- synth.metadata | 4 +- synth.py | 95 +- 10 files changed, 553 insertions(+), 2192 deletions(-) diff --git a/README.rst b/README.rst index 209b577ead..3db9fc5022 100644 --- a/README.rst +++ b/README.rst @@ -4,7 +4,13 @@ Python Client for Cloud AI Platform |beta| |pypi| |versions| -`Cloud AI Platform`_: Google Cloud AI Platform is an integrated suite of machine learning tools and services for building and using ML models with AutoML or custom code. It offers both novices and experts the best workbench for the entire machine learning development lifecycle. +:Warning: This library is a pre-release product and is subject to breaking changes. + +`Cloud AI Platform`_: Cloud AI Platform is a suite of machine learning tools that enables + developers to train high-quality models specific to their business needs. + It offers both novices and experts the best workbench for machine learning + development by leveraging Google's state-of-the-art transfer learning and + Neural Architecture Search technology. - `Client Library Documentation`_ - `Product Documentation`_ @@ -15,9 +21,9 @@ Python Client for Cloud AI Platform :target: https://pypi.org/project/google-cloud-aiplatform/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-aiplatform.svg :target: https://pypi.org/project/google-cloud-aiplatform/ -.. _Cloud AI Platform: https://cloud.google.com/ai-platform-unified/docs +.. _Cloud AI Platform: https://cloud.google.com/ai-platform/docs .. _Client Library Documentation: https://googleapis.dev/python/aiplatform/latest -.. _Product Documentation: https://cloud.google.com/ai-platform-unified/docs +.. _Product Documentation: https://cloud.google.com/ai-platform/docs Quick Start ----------- @@ -79,5 +85,5 @@ Next Steps - View this `README`_ to see the full list of Cloud APIs that we cover. -.. _Cloud AI Platform API Product documentation: https://cloud.google.com/ai-platform-unified/docs +.. _Cloud AI Platform API Product documentation: https://cloud.google.com/ai-platform/docs .. _README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/client.py b/google/cloud/aiplatform_v1beta1/services/job_service/client.py index a1eb7c38ce..b56a9a7871 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/client.py @@ -16,24 +16,17 @@ # from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore +import google.api_core.client_options as ClientOptions # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation as ga_operation # type: ignore -from google.api_core import operation_async # type: ignore +from google.api_core import operation as ga_operation from google.cloud.aiplatform_v1beta1.services.job_service import pagers from google.cloud.aiplatform_v1beta1.types import batch_prediction_job from google.cloud.aiplatform_v1beta1.types import ( @@ -46,7 +39,6 @@ from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, ) -from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1beta1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, @@ -63,9 +55,8 @@ from google.rpc import status_pb2 as status # type: ignore from google.type import money_pb2 as money # type: ignore -from .transports.base import JobServiceTransport, DEFAULT_CLIENT_INFO +from .transports.base import JobServiceTransport from .transports.grpc import JobServiceGrpcTransport -from .transports.grpc_asyncio import JobServiceGrpcAsyncIOTransport class JobServiceClientMeta(type): @@ -78,7 +69,6 @@ class JobServiceClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[JobServiceTransport]] _transport_registry["grpc"] = JobServiceGrpcTransport - _transport_registry["grpc_asyncio"] = JobServiceGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[JobServiceTransport]: """Return an appropriate transport class. @@ -102,38 +92,8 @@ def get_transport_class(cls, label: str = None,) -> Type[JobServiceTransport]: class JobServiceClient(metaclass=JobServiceClientMeta): """A service for creating and managing AI Platform's jobs.""" - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "aiplatform.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT + DEFAULT_OPTIONS = ClientOptions.ClientOptions( + api_endpoint="aiplatform.googleapis.com" ) @classmethod @@ -156,14 +116,12 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file - @property - def transport(self) -> JobServiceTransport: - """Return the transport used by the client instance. - - Returns: - JobServiceTransport: The transport used by the client instance. - """ - return self._transport + @staticmethod + def custom_job_path(project: str, location: str, custom_job: str,) -> str: + """Return a fully-qualified custom_job string.""" + return "projects/{project}/locations/{location}/customJobs/{custom_job}".format( + project=project, location=location, custom_job=custom_job, + ) @staticmethod def batch_prediction_job_path( @@ -176,65 +134,6 @@ def batch_prediction_job_path( batch_prediction_job=batch_prediction_job, ) - @staticmethod - def parse_batch_prediction_job_path(path: str) -> Dict[str, str]: - """Parse a batch_prediction_job path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/batchPredictionJobs/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def custom_job_path(project: str, location: str, custom_job: str,) -> str: - """Return a fully-qualified custom_job string.""" - return "projects/{project}/locations/{location}/customJobs/{custom_job}".format( - project=project, location=location, custom_job=custom_job, - ) - - @staticmethod - def parse_custom_job_path(path: str) -> Dict[str, str]: - """Parse a custom_job path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/customJobs/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def data_labeling_job_path( - project: str, location: str, data_labeling_job: str, - ) -> str: - """Return a fully-qualified data_labeling_job string.""" - return "projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}".format( - project=project, location=location, data_labeling_job=data_labeling_job, - ) - - @staticmethod - def parse_data_labeling_job_path(path: str) -> Dict[str, str]: - """Parse a data_labeling_job path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/dataLabelingJobs/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def dataset_path(project: str, location: str, dataset: str,) -> str: - """Return a fully-qualified dataset string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, - ) - - @staticmethod - def parse_dataset_path(path: str) -> Dict[str, str]: - """Parse a dataset path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - @staticmethod def hyperparameter_tuning_job_path( project: str, location: str, hyperparameter_tuning_job: str, @@ -247,96 +146,20 @@ def hyperparameter_tuning_job_path( ) @staticmethod - def parse_hyperparameter_tuning_job_path(path: str) -> Dict[str, str]: - """Parse a hyperparameter_tuning_job path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/hyperparameterTuningJobs/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def model_path(project: str, location: str, model: str,) -> str: - """Return a fully-qualified model string.""" - return "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, - ) - - @staticmethod - def parse_model_path(path: str) -> Dict[str, str]: - """Parse a model path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, location=location, + def data_labeling_job_path( + project: str, location: str, data_labeling_job: str, + ) -> str: + """Return a fully-qualified data_labeling_job string.""" + return "projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}".format( + project=project, location=location, data_labeling_job=data_labeling_job, ) - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, - transport: Union[str, JobServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + credentials: credentials.Credentials = None, + transport: Union[str, JobServiceTransport] = None, + client_options: ClientOptions.ClientOptions = DEFAULT_OPTIONS, ) -> None: """Instantiate the job service client. @@ -349,102 +172,26 @@ def __init__( transport (Union[str, ~.JobServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. + client_options (ClientOptions): Custom options for the client. """ if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) - ) - - ssl_credentials = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - is_mtls = True - else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" - ) + client_options = ClientOptions.from_dict(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, JobServiceTransport): - # transport is a JobServiceTransport instance. - if credentials or client_options.credentials_file: + if credentials: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: - raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." - ) self._transport = transport else: Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, - quota_project_id=client_options.quota_project_id, - client_info=client_info, + host=client_options.api_endpoint or "aiplatform.googleapis.com", ) def create_custom_job( @@ -498,36 +245,28 @@ def create_custom_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, custom_job]) - if request is not None and has_flattened_params: + if request is not None and any([parent, custom_job]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.CreateCustomJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.CreateCustomJobRequest): - request = job_service.CreateCustomJobRequest(request) + request = job_service.CreateCustomJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent - if custom_job is not None: - request.custom_job = custom_job + if parent is not None: + request.parent = parent + if custom_job is not None: + request.custom_job = custom_job # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_custom_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + rpc = gapic_v1.method.wrap_method( + self._transport.create_custom_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -579,29 +318,27 @@ def get_custom_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.GetCustomJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.GetCustomJobRequest): - request = job_service.GetCustomJobRequest(request) + request = job_service.GetCustomJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_custom_job] + rpc = gapic_v1.method.wrap_method( + self._transport.get_custom_job, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -656,29 +393,27 @@ def list_custom_jobs( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: + if request is not None and any([parent]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.ListCustomJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.ListCustomJobsRequest): - request = job_service.ListCustomJobsRequest(request) + request = job_service.ListCustomJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_custom_jobs] + rpc = gapic_v1.method.wrap_method( + self._transport.list_custom_jobs, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -692,7 +427,7 @@ def list_custom_jobs( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListCustomJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, request=request, response=response, ) # Done; return the response. @@ -751,34 +486,26 @@ def delete_custom_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.DeleteCustomJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.DeleteCustomJobRequest): - request = job_service.DeleteCustomJobRequest(request) + request = job_service.DeleteCustomJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_custom_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.delete_custom_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -838,34 +565,26 @@ def cancel_custom_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.CancelCustomJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.CancelCustomJobRequest): - request = job_service.CancelCustomJobRequest(request) + request = job_service.CancelCustomJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_custom_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_custom_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -918,36 +637,28 @@ def create_data_labeling_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_labeling_job]) - if request is not None and has_flattened_params: + if request is not None and any([parent, data_labeling_job]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.CreateDataLabelingJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.CreateDataLabelingJobRequest): - request = job_service.CreateDataLabelingJobRequest(request) + request = job_service.CreateDataLabelingJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent - if data_labeling_job is not None: - request.data_labeling_job = data_labeling_job + if parent is not None: + request.parent = parent + if data_labeling_job is not None: + request.data_labeling_job = data_labeling_job # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_labeling_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + rpc = gapic_v1.method.wrap_method( + self._transport.create_data_labeling_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -995,29 +706,27 @@ def get_data_labeling_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.GetDataLabelingJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.GetDataLabelingJobRequest): - request = job_service.GetDataLabelingJobRequest(request) + request = job_service.GetDataLabelingJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_labeling_job] + rpc = gapic_v1.method.wrap_method( + self._transport.get_data_labeling_job, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -1071,29 +780,27 @@ def list_data_labeling_jobs( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: + if request is not None and any([parent]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.ListDataLabelingJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.ListDataLabelingJobsRequest): - request = job_service.ListDataLabelingJobsRequest(request) + request = job_service.ListDataLabelingJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_labeling_jobs] + rpc = gapic_v1.method.wrap_method( + self._transport.list_data_labeling_jobs, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -1107,7 +814,7 @@ def list_data_labeling_jobs( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDataLabelingJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, request=request, response=response, ) # Done; return the response. @@ -1167,34 +874,26 @@ def delete_data_labeling_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.DeleteDataLabelingJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.DeleteDataLabelingJobRequest): - request = job_service.DeleteDataLabelingJobRequest(request) + request = job_service.DeleteDataLabelingJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_labeling_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.delete_data_labeling_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -1244,34 +943,26 @@ def cancel_data_labeling_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.CancelDataLabelingJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.CancelDataLabelingJobRequest): - request = job_service.CancelDataLabelingJobRequest(request) + request = job_service.CancelDataLabelingJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_data_labeling_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_data_labeling_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -1326,38 +1017,28 @@ def create_hyperparameter_tuning_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, hyperparameter_tuning_job]) - if request is not None and has_flattened_params: + if request is not None and any([parent, hyperparameter_tuning_job]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.CreateHyperparameterTuningJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.CreateHyperparameterTuningJobRequest): - request = job_service.CreateHyperparameterTuningJobRequest(request) + request = job_service.CreateHyperparameterTuningJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent - if hyperparameter_tuning_job is not None: - request.hyperparameter_tuning_job = hyperparameter_tuning_job + if parent is not None: + request.parent = parent + if hyperparameter_tuning_job is not None: + request.hyperparameter_tuning_job = hyperparameter_tuning_job # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.create_hyperparameter_tuning_job - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + rpc = gapic_v1.method.wrap_method( + self._transport.create_hyperparameter_tuning_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -1407,31 +1088,27 @@ def get_hyperparameter_tuning_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.GetHyperparameterTuningJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.GetHyperparameterTuningJobRequest): - request = job_service.GetHyperparameterTuningJobRequest(request) + request = job_service.GetHyperparameterTuningJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.get_hyperparameter_tuning_job - ] + rpc = gapic_v1.method.wrap_method( + self._transport.get_hyperparameter_tuning_job, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -1486,31 +1163,27 @@ def list_hyperparameter_tuning_jobs( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: + if request is not None and any([parent]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.ListHyperparameterTuningJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.ListHyperparameterTuningJobsRequest): - request = job_service.ListHyperparameterTuningJobsRequest(request) + request = job_service.ListHyperparameterTuningJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.list_hyperparameter_tuning_jobs - ] + rpc = gapic_v1.method.wrap_method( + self._transport.list_hyperparameter_tuning_jobs, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -1524,7 +1197,7 @@ def list_hyperparameter_tuning_jobs( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListHyperparameterTuningJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, request=request, response=response, ) # Done; return the response. @@ -1584,36 +1257,26 @@ def delete_hyperparameter_tuning_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.DeleteHyperparameterTuningJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.DeleteHyperparameterTuningJobRequest): - request = job_service.DeleteHyperparameterTuningJobRequest(request) + request = job_service.DeleteHyperparameterTuningJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.delete_hyperparameter_tuning_job - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.delete_hyperparameter_tuning_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -1676,36 +1339,26 @@ def cancel_hyperparameter_tuning_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.CancelHyperparameterTuningJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.CancelHyperparameterTuningJobRequest): - request = job_service.CancelHyperparameterTuningJobRequest(request) + request = job_service.CancelHyperparameterTuningJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.cancel_hyperparameter_tuning_job - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_hyperparameter_tuning_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -1764,38 +1417,28 @@ def create_batch_prediction_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, batch_prediction_job]) - if request is not None and has_flattened_params: + if request is not None and any([parent, batch_prediction_job]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.CreateBatchPredictionJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.CreateBatchPredictionJobRequest): - request = job_service.CreateBatchPredictionJobRequest(request) + request = job_service.CreateBatchPredictionJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent - if batch_prediction_job is not None: - request.batch_prediction_job = batch_prediction_job + if parent is not None: + request.parent = parent + if batch_prediction_job is not None: + request.batch_prediction_job = batch_prediction_job # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.create_batch_prediction_job - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + rpc = gapic_v1.method.wrap_method( + self._transport.create_batch_prediction_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -1848,29 +1491,27 @@ def get_batch_prediction_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.GetBatchPredictionJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.GetBatchPredictionJobRequest): - request = job_service.GetBatchPredictionJobRequest(request) + request = job_service.GetBatchPredictionJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_batch_prediction_job] + rpc = gapic_v1.method.wrap_method( + self._transport.get_batch_prediction_job, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -1925,31 +1566,27 @@ def list_batch_prediction_jobs( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: + if request is not None and any([parent]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.ListBatchPredictionJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.ListBatchPredictionJobsRequest): - request = job_service.ListBatchPredictionJobsRequest(request) + request = job_service.ListBatchPredictionJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.list_batch_prediction_jobs - ] + rpc = gapic_v1.method.wrap_method( + self._transport.list_batch_prediction_jobs, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -1963,7 +1600,7 @@ def list_batch_prediction_jobs( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBatchPredictionJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, request=request, response=response, ) # Done; return the response. @@ -2024,36 +1661,26 @@ def delete_batch_prediction_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.DeleteBatchPredictionJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.DeleteBatchPredictionJobRequest): - request = job_service.DeleteBatchPredictionJobRequest(request) + request = job_service.DeleteBatchPredictionJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.delete_batch_prediction_job - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.delete_batch_prediction_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -2114,36 +1741,26 @@ def cancel_batch_prediction_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a job_service.CancelBatchPredictionJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, job_service.CancelBatchPredictionJobRequest): - request = job_service.CancelBatchPredictionJobRequest(request) + request = job_service.CancelBatchPredictionJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.cancel_batch_prediction_job - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_batch_prediction_job, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -2153,13 +1770,13 @@ def cancel_batch_prediction_job( try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-aiplatform", ).version, ) except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + _client_info = gapic_v1.client_info.ClientInfo() __all__ = ("JobServiceClient",) diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py index e3e7d6aeda..2530414b9a 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py @@ -16,24 +16,17 @@ # from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore +import google.api_core.client_options as ClientOptions # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation as ga_operation # type: ignore -from google.api_core import operation_async # type: ignore +from google.api_core import operation as ga_operation from google.cloud.aiplatform_v1beta1.services.pipeline_service import pagers from google.cloud.aiplatform_v1beta1.types import model from google.cloud.aiplatform_v1beta1.types import operation as gca_operation @@ -48,9 +41,8 @@ from google.protobuf import timestamp_pb2 as timestamp # type: ignore from google.rpc import status_pb2 as status # type: ignore -from .transports.base import PipelineServiceTransport, DEFAULT_CLIENT_INFO +from .transports.base import PipelineServiceTransport from .transports.grpc import PipelineServiceGrpcTransport -from .transports.grpc_asyncio import PipelineServiceGrpcAsyncIOTransport class PipelineServiceClientMeta(type): @@ -65,7 +57,6 @@ class PipelineServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[PipelineServiceTransport]] _transport_registry["grpc"] = PipelineServiceGrpcTransport - _transport_registry["grpc_asyncio"] = PipelineServiceGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[PipelineServiceTransport]: """Return an appropriate transport class. @@ -89,38 +80,8 @@ def get_transport_class(cls, label: str = None,) -> Type[PipelineServiceTranspor class PipelineServiceClient(metaclass=PipelineServiceClientMeta): """A service for creating and managing AI Platform's pipelines.""" - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "aiplatform.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT + DEFAULT_OPTIONS = ClientOptions.ClientOptions( + api_endpoint="aiplatform.googleapis.com" ) @classmethod @@ -143,31 +104,6 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file - @property - def transport(self) -> PipelineServiceTransport: - """Return the transport used by the client instance. - - Returns: - PipelineServiceTransport: The transport used by the client instance. - """ - return self._transport - - @staticmethod - def endpoint_path(project: str, location: str, endpoint: str,) -> str: - """Return a fully-qualified endpoint string.""" - return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, - ) - - @staticmethod - def parse_endpoint_path(path: str) -> Dict[str, str]: - """Parse a endpoint path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/endpoints/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - @staticmethod def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" @@ -175,15 +111,6 @@ def model_path(project: str, location: str, model: str,) -> str: project=project, location=location, model=model, ) - @staticmethod - def parse_model_path(path: str) -> Dict[str, str]: - """Parse a model path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - @staticmethod def training_pipeline_path( project: str, location: str, training_pipeline: str, @@ -193,81 +120,12 @@ def training_pipeline_path( project=project, location=location, training_pipeline=training_pipeline, ) - @staticmethod - def parse_training_pipeline_path(path: str) -> Dict[str, str]: - """Parse a training_pipeline path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/trainingPipelines/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, - transport: Union[str, PipelineServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + credentials: credentials.Credentials = None, + transport: Union[str, PipelineServiceTransport] = None, + client_options: ClientOptions.ClientOptions = DEFAULT_OPTIONS, ) -> None: """Instantiate the pipeline service client. @@ -280,102 +138,26 @@ def __init__( transport (Union[str, ~.PipelineServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. + client_options (ClientOptions): Custom options for the client. """ if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) - ) - - ssl_credentials = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - is_mtls = True - else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" - ) + client_options = ClientOptions.from_dict(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, PipelineServiceTransport): - # transport is a PipelineServiceTransport instance. - if credentials or client_options.credentials_file: + if credentials: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: - raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." - ) self._transport = transport else: Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, - quota_project_id=client_options.quota_project_id, - client_info=client_info, + host=client_options.api_endpoint or "aiplatform.googleapis.com", ) def create_training_pipeline( @@ -428,36 +210,28 @@ def create_training_pipeline( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, training_pipeline]) - if request is not None and has_flattened_params: + if request is not None and any([parent, training_pipeline]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pipeline_service.CreateTrainingPipelineRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pipeline_service.CreateTrainingPipelineRequest): - request = pipeline_service.CreateTrainingPipelineRequest(request) + request = pipeline_service.CreateTrainingPipelineRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent - if training_pipeline is not None: - request.training_pipeline = training_pipeline + if parent is not None: + request.parent = parent + if training_pipeline is not None: + request.training_pipeline = training_pipeline # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_training_pipeline] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + rpc = gapic_v1.method.wrap_method( + self._transport.create_training_pipeline, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -509,29 +283,27 @@ def get_training_pipeline( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pipeline_service.GetTrainingPipelineRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pipeline_service.GetTrainingPipelineRequest): - request = pipeline_service.GetTrainingPipelineRequest(request) + request = pipeline_service.GetTrainingPipelineRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_training_pipeline] + rpc = gapic_v1.method.wrap_method( + self._transport.get_training_pipeline, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -586,29 +358,27 @@ def list_training_pipelines( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: + if request is not None and any([parent]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pipeline_service.ListTrainingPipelinesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pipeline_service.ListTrainingPipelinesRequest): - request = pipeline_service.ListTrainingPipelinesRequest(request) + request = pipeline_service.ListTrainingPipelinesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_training_pipelines] + rpc = gapic_v1.method.wrap_method( + self._transport.list_training_pipelines, + default_timeout=None, + client_info=_client_info, + ) # Certain fields should be provided within the metadata header; # add these here. @@ -622,7 +392,7 @@ def list_training_pipelines( # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListTrainingPipelinesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, request=request, response=response, ) # Done; return the response. @@ -682,34 +452,26 @@ def delete_training_pipeline( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pipeline_service.DeleteTrainingPipelineRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pipeline_service.DeleteTrainingPipelineRequest): - request = pipeline_service.DeleteTrainingPipelineRequest(request) + request = pipeline_service.DeleteTrainingPipelineRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_training_pipeline] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.delete_training_pipeline, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -771,34 +533,26 @@ def cancel_training_pipeline( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: + if request is not None and any([name]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pipeline_service.CancelTrainingPipelineRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pipeline_service.CancelTrainingPipelineRequest): - request = pipeline_service.CancelTrainingPipelineRequest(request) + request = pipeline_service.CancelTrainingPipelineRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_training_pipeline] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_training_pipeline, + default_timeout=None, + client_info=_client_info, ) # Send the request. @@ -808,13 +562,13 @@ def cancel_training_pipeline( try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-aiplatform", ).version, ) except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + _client_info = gapic_v1.client_info.ClientInfo() __all__ = ("PipelineServiceClient",) diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py index 9a5976d697..dbdf226471 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py @@ -16,29 +16,22 @@ # from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore +import google.api_core.client_options as ClientOptions # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import prediction_service from google.protobuf import struct_pb2 as struct # type: ignore -from .transports.base import PredictionServiceTransport, DEFAULT_CLIENT_INFO +from .transports.base import PredictionServiceTransport from .transports.grpc import PredictionServiceGrpcTransport -from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport class PredictionServiceClientMeta(type): @@ -53,7 +46,6 @@ class PredictionServiceClientMeta(type): OrderedDict() ) # type: Dict[str, Type[PredictionServiceTransport]] _transport_registry["grpc"] = PredictionServiceGrpcTransport - _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport def get_transport_class( cls, label: str = None, @@ -79,38 +71,8 @@ def get_transport_class( class PredictionServiceClient(metaclass=PredictionServiceClientMeta): """A service for online predictions and explanations.""" - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "aiplatform.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT + DEFAULT_OPTIONS = ClientOptions.ClientOptions( + api_endpoint="aiplatform.googleapis.com" ) @classmethod @@ -133,97 +95,12 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file - @property - def transport(self) -> PredictionServiceTransport: - """Return the transport used by the client instance. - - Returns: - PredictionServiceTransport: The transport used by the client instance. - """ - return self._transport - - @staticmethod - def endpoint_path(project: str, location: str, endpoint: str,) -> str: - """Return a fully-qualified endpoint string.""" - return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, - ) - - @staticmethod - def parse_endpoint_path(path: str) -> Dict[str, str]: - """Parse a endpoint path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/endpoints/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, - transport: Union[str, PredictionServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + credentials: credentials.Credentials = None, + transport: Union[str, PredictionServiceTransport] = None, + client_options: ClientOptions.ClientOptions = DEFAULT_OPTIONS, ) -> None: """Instantiate the prediction service client. @@ -236,102 +113,26 @@ def __init__( transport (Union[str, ~.PredictionServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. + client_options (ClientOptions): Custom options for the client. """ if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) - ) - - ssl_credentials = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - is_mtls = True - else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" - ) + client_options = ClientOptions.from_dict(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, PredictionServiceTransport): - # transport is a PredictionServiceTransport instance. - if credentials or client_options.credentials_file: + if credentials: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: - raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." - ) self._transport = transport else: Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, - quota_project_id=client_options.quota_project_id, - client_info=client_info, + host=client_options.api_endpoint or "aiplatform.googleapis.com", ) def predict( @@ -399,39 +200,28 @@ def predict( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([endpoint, instances, parameters]) - if request is not None and has_flattened_params: + if request is not None and any([endpoint, instances, parameters]): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a prediction_service.PredictRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, prediction_service.PredictRequest): - request = prediction_service.PredictRequest(request) + request = prediction_service.PredictRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if endpoint is not None: - request.endpoint = endpoint - if parameters is not None: - request.parameters = parameters - - if instances: - request.instances.extend(instances) + if endpoint is not None: + request.endpoint = endpoint + if instances is not None: + request.instances.extend(instances) + if parameters is not None: + request.parameters = parameters # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.predict] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("endpoint", request.endpoint),)), + rpc = gapic_v1.method.wrap_method( + self._transport.predict, default_timeout=None, client_info=_client_info, ) # Send the request. @@ -454,13 +244,11 @@ def explain( ) -> prediction_service.ExplainResponse: r"""Perform an online explanation. - If - ``deployed_model_id`` - is specified, the corresponding DeployModel must have + If [ExplainRequest.deployed_model_id] is specified, the + corresponding DeployModel must have ``explanation_spec`` - populated. If - ``deployed_model_id`` - is not specified, all DeployedModels must have + populated. If [ExplainRequest.deployed_model_id] is not + specified, all DeployedModels must have ``explanation_spec`` populated. Only deployed AutoML tabular Models have explanation_spec. @@ -524,41 +312,32 @@ def explain( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([endpoint, instances, parameters, deployed_model_id]) - if request is not None and has_flattened_params: + if request is not None and any( + [endpoint, instances, parameters, deployed_model_id] + ): raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a prediction_service.ExplainRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, prediction_service.ExplainRequest): - request = prediction_service.ExplainRequest(request) + request = prediction_service.ExplainRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if endpoint is not None: - request.endpoint = endpoint - if parameters is not None: - request.parameters = parameters - if deployed_model_id is not None: - request.deployed_model_id = deployed_model_id - - if instances: - request.instances.extend(instances) + if endpoint is not None: + request.endpoint = endpoint + if instances is not None: + request.instances.extend(instances) + if parameters is not None: + request.parameters = parameters + if deployed_model_id is not None: + request.deployed_model_id = deployed_model_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.explain] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("endpoint", request.endpoint),)), + rpc = gapic_v1.method.wrap_method( + self._transport.explain, default_timeout=None, client_info=_client_info, ) # Send the request. @@ -569,13 +348,13 @@ def explain( try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-aiplatform", ).version, ) except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + _client_info = gapic_v1.client_info.ClientInfo() __all__ = ("PredictionServiceClient",) diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py index 1a102e1a61..55824a233c 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py @@ -15,20 +15,16 @@ # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore from google.auth import credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.aiplatform_v1beta1.types import prediction_service -from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO +from .base import PredictionServiceTransport class PredictionServiceGrpcTransport(PredictionServiceTransport): @@ -44,21 +40,12 @@ class PredictionServiceGrpcTransport(PredictionServiceTransport): top of HTTP/2); the ``grpcio`` package must be installed. """ - _stubs: Dict[str, Callable] - def __init__( self, *, host: str = "aiplatform.googleapis.com", credentials: credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + channel: grpc.Channel = None ) -> None: """Instantiate the transport. @@ -70,123 +57,28 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. """ - self._ssl_channel_credentials = ssl_channel_credentials - + # Sanity check: Ensure that channel and credentials are not both + # provided. if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - + # Run the base constructor. + super().__init__(host=host, credentials=credentials) self._stubs = {} # type: Dict[str, Callable] - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # If a channel was explicitly provided, set it. + if channel: + self._grpc_channel = channel @classmethod def create_channel( cls, host: str = "aiplatform.googleapis.com", credentials: credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, + **kwargs ) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -196,37 +88,30 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - **kwargs, + host, credentials=credentials, scopes=cls.AUTH_SCOPES, **kwargs ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. return self._grpc_channel @property @@ -267,13 +152,11 @@ def explain( Perform an online explanation. - If - ``deployed_model_id`` - is specified, the corresponding DeployModel must have + If [ExplainRequest.deployed_model_id] is specified, the + corresponding DeployModel must have ``explanation_spec`` - populated. If - ``deployed_model_id`` - is not specified, all DeployedModels must have + populated. If [ExplainRequest.deployed_model_id] is not + specified, all DeployedModels must have ``explanation_spec`` populated. Only deployed AutoML tabular Models have explanation_spec. diff --git a/google/cloud/aiplatform_v1beta1/types/explanation.py b/google/cloud/aiplatform_v1beta1/types/explanation.py index 7a495fff1e..5e20ef2699 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation.py @@ -31,20 +31,17 @@ "ExplanationSpec", "ExplanationParameters", "SampledShapleyAttribution", - "IntegratedGradientsAttribution", - "XraiAttribution", - "SmoothGradConfig", - "FeatureNoiseSigma", }, ) class Explanation(proto.Message): - r"""Explanation of a prediction (provided in - ``PredictResponse.predictions``) - produced by the Model on a given + r"""Explanation of a ``prediction`` produced + by the Model on a given ``instance``. + Currently, only AutoML tabular Models support explanation. + Attributes: attributions (Sequence[~.explanation.Attribution]): Output only. Feature attributions grouped by predicted @@ -59,16 +56,6 @@ class Explanation(proto.Message): ``Attribution.output_index`` can be used to identify which output this attribution is explaining. - - If users set - ``ExplanationParameters.top_k``, - the attributions are sorted by - ``instance_output_value`` - in descending order. If - ``ExplanationParameters.output_indices`` - is specified, the attributions are stored by - ``Attribution.output_index`` - in the same order as they appear in the output_indices. """ attributions = proto.RepeatedField(proto.MESSAGE, number=1, message="Attribution",) @@ -77,6 +64,8 @@ class Explanation(proto.Message): class ModelExplanation(proto.Message): r"""Aggregated explanation metrics for a Model over a set of instances. + Currently, only AutoML tabular Models support aggregated + explanation. Attributes: mean_attributions (Sequence[~.explanation.Attribution]): @@ -125,8 +114,9 @@ class Attribution(proto.Message): The field name of the output is determined by the key in ``ExplanationMetadata.outputs``. - If the Model's predicted output has multiple dimensions - (rank > 1), this is the value in the output located by + If the Model predicted output is a tensor value (for + example, an ndarray), this is the value in the output + located by ``output_index``. If there are multiple baselines, their output values are @@ -137,15 +127,16 @@ class Attribution(proto.Message): name of the output is determined by the key in ``ExplanationMetadata.outputs``. - If the Model predicted output has multiple dimensions, this - is the value in the output located by + If the Model predicted output is a tensor value (for + example, an ndarray), this is the value in the output + located by ``output_index``. feature_attributions (~.struct.Value): Output only. Attributions of each explained feature. Features are extracted from the [prediction instances][google.cloud.aiplatform.v1beta1.ExplainRequest.instances] - according to [explanation metadata for - inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. + according to [explanation input + metadata][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. The value is a struct, whose keys are the name of the feature. The values are how much the feature in the @@ -183,11 +174,11 @@ class Attribution(proto.Message): output. If the prediction output is a scalar value, output_index is - not populated. If the prediction output has multiple - dimensions, the length of the output_index list is the same - as the number of dimensions of the output. The i-th element - in output_index is the element index of the i-th dimension - of the output vector. Indices start from 0. + not populated. If the prediction output is a tensor value + (for example, an ndarray), the length of output_index is the + same as the number of dimensions of the output. The i-th + element in output_index is the element index of the i-th + dimension of the output vector. Indexes start from 0. output_display_name (str): Output only. The display name of the output identified by ``output_index``, @@ -204,48 +195,24 @@ class Attribution(proto.Message): caused by approximation used in the explanation method. Lower value means more precise attributions. - - For [Sampled Shapley - attribution][ExplanationParameters.sampled_shapley_attribution], - increasing - ``path_count`` - may reduce the error. - - For [Integrated Gradients - attribution][ExplanationParameters.integrated_gradients_attribution], - increasing - ``step_count`` - may reduce the error. - - For [XRAI - attribution][ExplanationParameters.xrai_attribution], - increasing - ``step_count`` - may reduce the error. - - Refer to AI Explanations Whitepaper for more details: - - https://storage.googleapis.com/cloud-ai-whitepapers/AI%20Explainability%20Whitepaper.pdf - output_name (str): - Output only. Name of the explain output. Specified as the - key in - ``ExplanationMetadata.outputs``. + For Sampled Shapley + ``attribution``, + increasing + ``path_count`` + might reduce the error. """ baseline_output_value = proto.Field(proto.DOUBLE, number=1) - instance_output_value = proto.Field(proto.DOUBLE, number=2) - feature_attributions = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) - output_index = proto.RepeatedField(proto.INT32, number=4) - output_display_name = proto.Field(proto.STRING, number=5) - approximation_error = proto.Field(proto.DOUBLE, number=6) - output_name = proto.Field(proto.STRING, number=7) - class ExplanationSpec(proto.Message): r"""Specification of Model explanation. + Currently, only AutoML tabular Models support explanation. Attributes: parameters (~.explanation.ExplanationParameters): @@ -257,7 +224,6 @@ class ExplanationSpec(proto.Message): """ parameters = proto.Field(proto.MESSAGE, number=1, message="ExplanationParameters",) - metadata = proto.Field( proto.MESSAGE, number=2, message=explanation_metadata.ExplanationMetadata, ) @@ -272,69 +238,13 @@ class ExplanationParameters(proto.Message): Shapley values for features that contribute to the label being predicted. A sampling strategy is used to approximate the value rather than - considering all subsets of features. Refer to - this paper for model details: - https://arxiv.org/abs/1306.4265. - integrated_gradients_attribution (~.explanation.IntegratedGradientsAttribution): - An attribution method that computes Aumann- - hapley values taking advantage of the model's - fully differentiable structure. Refer to this - paper for more details: - https://arxiv.org/abs/1703.01365 - xrai_attribution (~.explanation.XraiAttribution): - An attribution method that redistributes - Integrated Gradients attribution to segmented - regions, taking advantage of the model's fully - differentiable structure. Refer to this paper - for more details: - https://arxiv.org/abs/1906.02825 - XRAI currently performs better on natural - images, like a picture of a house or an animal. - If the images are taken in artificial - environments, like a lab or manufacturing line, - or from diagnostic equipment, like x-rays or - quality-control cameras, use Integrated - Gradients instead. - top_k (int): - If populated, returns attributions for top K - indices of outputs (defaults to 1). Only applies - to Models that predicts more than one outputs - (e,g, multi-class Models). When set to -1, - returns explanations for all outputs. - output_indices (~.struct.ListValue): - If populated, only returns attributions that have - ``output_index`` contained in - output_indices. It must be an ndarray of integers, with the - same shape of the output it's explaining. - - If not populated, returns attributions for - ``top_k`` - indices of outputs. If neither top_k nor output_indeices is - populated, returns the argmax index of the outputs. - - Only applicable to Models that predict multiple outputs - (e,g, multi-class Models that predict multiple classes). + considering all subsets of features. """ sampled_shapley_attribution = proto.Field( - proto.MESSAGE, number=1, oneof="method", message="SampledShapleyAttribution", + proto.MESSAGE, number=1, message="SampledShapleyAttribution", ) - integrated_gradients_attribution = proto.Field( - proto.MESSAGE, - number=2, - oneof="method", - message="IntegratedGradientsAttribution", - ) - - xrai_attribution = proto.Field( - proto.MESSAGE, number=3, oneof="method", message="XraiAttribution", - ) - - top_k = proto.Field(proto.INT32, number=4) - - output_indices = proto.Field(proto.MESSAGE, number=5, message=struct.ListValue,) - class SampledShapleyAttribution(proto.Message): r"""An attribution method that approximates Shapley values for @@ -353,163 +263,4 @@ class SampledShapleyAttribution(proto.Message): path_count = proto.Field(proto.INT32, number=1) -class IntegratedGradientsAttribution(proto.Message): - r"""An attribution method that computes the Aumann-Shapley value - taking advantage of the model's fully differentiable structure. - Refer to this paper for more details: - https://arxiv.org/abs/1703.01365 - - Attributes: - step_count (int): - Required. The number of steps for approximating the path - integral. A good value to start is 50 and gradually increase - until the sum to diff property is within the desired error - range. - - Valid range of its value is [1, 100], inclusively. - smooth_grad_config (~.explanation.SmoothGradConfig): - Config for SmoothGrad approximation of - gradients. - When enabled, the gradients are approximated by - averaging the gradients from noisy samples in - the vicinity of the inputs. Adding noise can - help improve the computed gradients. Refer to - this paper for more details: - https://arxiv.org/pdf/1706.03825.pdf - """ - - step_count = proto.Field(proto.INT32, number=1) - - smooth_grad_config = proto.Field( - proto.MESSAGE, number=2, message="SmoothGradConfig", - ) - - -class XraiAttribution(proto.Message): - r"""An explanation method that redistributes Integrated Gradients - attributions to segmented regions, taking advantage of the model's - fully differentiable structure. Refer to this paper for more - details: https://arxiv.org/abs/1906.02825 - - Only supports image Models (``modality`` is - IMAGE). - - Attributes: - step_count (int): - Required. The number of steps for approximating the path - integral. A good value to start is 50 and gradually increase - until the sum to diff property is met within the desired - error range. - - Valid range of its value is [1, 100], inclusively. - smooth_grad_config (~.explanation.SmoothGradConfig): - Config for SmoothGrad approximation of - gradients. - When enabled, the gradients are approximated by - averaging the gradients from noisy samples in - the vicinity of the inputs. Adding noise can - help improve the computed gradients. Refer to - this paper for more details: - https://arxiv.org/pdf/1706.03825.pdf - """ - - step_count = proto.Field(proto.INT32, number=1) - - smooth_grad_config = proto.Field( - proto.MESSAGE, number=2, message="SmoothGradConfig", - ) - - -class SmoothGradConfig(proto.Message): - r"""Config for SmoothGrad approximation of gradients. - When enabled, the gradients are approximated by averaging the - gradients from noisy samples in the vicinity of the inputs. - Adding noise can help improve the computed gradients. Refer to - this paper for more details: - https://arxiv.org/pdf/1706.03825.pdf - - Attributes: - noise_sigma (float): - This is a single float value and will be used to add noise - to all the features. Use this field when all features are - normalized to have the same distribution: scale to range [0, - 1], [-1, 1] or z-scoring, where features are normalized to - have 0-mean and 1-variance. Refer to this doc for more - details about normalization: - - https://developers.google.com/machine-learning/data-prep/transform/normalization. - - For best results the recommended value is about 10% - 20% of - the standard deviation of the input feature. Refer to - section 3.2 of the SmoothGrad paper: - https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1. - - If the distribution is different per feature, set - ``feature_noise_sigma`` - instead for each feature. - feature_noise_sigma (~.explanation.FeatureNoiseSigma): - This is similar to - ``noise_sigma``, - but provides additional flexibility. A separate noise sigma - can be provided for each feature, which is useful if their - distributions are different. No noise is added to features - that are not set. If this field is unset, - ``noise_sigma`` - will be used for all features. - noisy_sample_count (int): - The number of gradient samples to use for approximation. The - higher this number, the more accurate the gradient is, but - the runtime complexity increases by this factor as well. - Valid range of its value is [1, 50]. Defaults to 3. - """ - - noise_sigma = proto.Field(proto.FLOAT, number=1, oneof="GradientNoiseSigma") - - feature_noise_sigma = proto.Field( - proto.MESSAGE, - number=2, - oneof="GradientNoiseSigma", - message="FeatureNoiseSigma", - ) - - noisy_sample_count = proto.Field(proto.INT32, number=3) - - -class FeatureNoiseSigma(proto.Message): - r"""Noise sigma by features. Noise sigma represents the standard - deviation of the gaussian kernel that will be used to add noise - to interpolated inputs prior to computing gradients. - - Attributes: - noise_sigma (Sequence[~.explanation.FeatureNoiseSigma.NoiseSigmaForFeature]): - Noise sigma per feature. No noise is added to - features that are not set. - """ - - class NoiseSigmaForFeature(proto.Message): - r"""Noise sigma for a single feature. - - Attributes: - name (str): - The name of the input feature for which noise sigma is - provided. The features are defined in [explanation metadata - inputs][google.cloud.aiplatform.v1beta1.ExplanationMetadata.inputs]. - sigma (float): - This represents the standard deviation of the Gaussian - kernel that will be used to add noise to the feature prior - to computing gradients. Similar to - ``noise_sigma`` - but represents the noise added to the current feature. - Defaults to 0.1. - """ - - name = proto.Field(proto.STRING, number=1) - - sigma = proto.Field(proto.FLOAT, number=2) - - noise_sigma = proto.RepeatedField( - proto.MESSAGE, number=1, message=NoiseSigmaForFeature, - ) - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py index 7261c064f8..1b9f005857 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py @@ -40,24 +40,12 @@ class ExplanationMetadata(proto.Message): which has the name specified as the key in ``ExplanationMetadata.inputs``. The baseline of the empty feature is chosen by AI Platform. - - For AI Platform provided Tensorflow images, the key can be - any friendly name of the feature . Once specified, [ - featureAttributions][Attribution.feature_attributions] will - be keyed by this key (if not grouped with another feature). - - For custom images, the key must match with the key in - ``instance``. outputs (Sequence[~.explanation_metadata.ExplanationMetadata.OutputsEntry]): Required. Map from output names to output metadata. - For AI Platform provided Tensorflow images, keys - can be any string user defines. - - For custom images, keys are the name of the - output field in the prediction to be explained. - - Currently only one key is allowed. + Keys are the name of the output field in the + prediction to be explained. Currently only one + key is allowed. feature_attributions_schema_uri (str): Points to a YAML file stored on Google Cloud Storage describing the format of the [feature @@ -74,11 +62,6 @@ class ExplanationMetadata(proto.Message): class InputMetadata(proto.Message): r"""Metadata of the input of a feature. - Fields other than - ``InputMetadata.input_baselines`` - are applicable only for Models that are using AI Platform-provided - images for Tensorflow. - Attributes: input_baselines (Sequence[~.struct.Value]): Baseline inputs for this feature. @@ -88,271 +71,20 @@ class InputMetadata(proto.Message): specified, AI Platform returns the average attributions across them in [Attributions.baseline_attribution][]. - For AI Platform provided Tensorflow images (both 1.x and - 2.x), the shape of each baseline must match the shape of the - input tensor. If a scalar is provided, we broadcast to the - same shape as the input tensor. - - For custom images, the element of the baselines must be in - the same format as the feature's input in the + The element of the baselines must be in the same format as + the feature's input in the ``instance``[]. The schema of any single instance may be specified via Endpoint's DeployedModels' [Model's][google.cloud.aiplatform.v1beta1.DeployedModel.model] [PredictSchemata's][google.cloud.aiplatform.v1beta1.Model.predict_schemata] ``instance_schema_uri``. - input_tensor_name (str): - Name of the input tensor for this feature. - Required and is only applicable to AI Platform - provided images for Tensorflow. - encoding (~.explanation_metadata.ExplanationMetadata.InputMetadata.Encoding): - Defines how the feature is encoded into the - input tensor. Defaults to IDENTITY. - modality (str): - Modality of the feature. Valid values are: - numeric, image. Defaults to numeric. - feature_value_domain (~.explanation_metadata.ExplanationMetadata.InputMetadata.FeatureValueDomain): - The domain details of the input feature - value. Like min/max, original mean or standard - deviation if normalized. - indices_tensor_name (str): - Specifies the index of the values of the input tensor. - Required when the input tensor is a sparse representation. - Refer to Tensorflow documentation for more details: - https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor. - dense_shape_tensor_name (str): - Specifies the shape of the values of the input if the input - is a sparse representation. Refer to Tensorflow - documentation for more details: - https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor. - index_feature_mapping (Sequence[str]): - A list of feature names for each index in the input tensor. - Required when the input - ``InputMetadata.encoding`` - is BAG_OF_FEATURES, BAG_OF_FEATURES_SPARSE, INDICATOR. - encoded_tensor_name (str): - Encoded tensor is a transformation of the input tensor. Must - be provided if choosing [Integrated Gradients - attribution][ExplanationParameters.integrated_gradients_attribution] - or [XRAI - attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution] - and the input tensor is not differentiable. - - An encoded tensor is generated if the input tensor is - encoded by a lookup table. - encoded_baselines (Sequence[~.struct.Value]): - A list of baselines for the encoded tensor. - The shape of each baseline should match the - shape of the encoded tensor. If a scalar is - provided, AI Platform broadcast to the same - shape as the encoded tensor. - visualization (~.explanation_metadata.ExplanationMetadata.InputMetadata.Visualization): - Visualization configurations for image - explanation. - group_name (str): - Name of the group that the input belongs to. Features with - the same group name will be treated as one feature when - computing attributions. Features grouped together can have - different shapes in value. If provided, there will be one - single attribution generated in [ - featureAttributions][Attribution.feature_attributions], - keyed by the group name. """ - class Encoding(proto.Enum): - r"""Defines how the feature is encoded to [encoded_tensor][]. Defaults - to IDENTITY. - """ - ENCODING_UNSPECIFIED = 0 - IDENTITY = 1 - BAG_OF_FEATURES = 2 - BAG_OF_FEATURES_SPARSE = 3 - INDICATOR = 4 - COMBINED_EMBEDDING = 5 - CONCAT_EMBEDDING = 6 - - class FeatureValueDomain(proto.Message): - r"""Domain details of the input feature value. Provides numeric - information about the feature, such as its range (min, max). If the - feature has been pre-processed, for example with z-scoring, then it - provides information about how to recover the original feature. For - example, if the input feature is an image and it has been - pre-processed to obtain 0-mean and stddev = 1 values, then - original_mean, and original_stddev refer to the mean and stddev of - the original feature (e.g. image tensor) from which input feature - (with mean = 0 and stddev = 1) was obtained. - - Attributes: - min_value (float): - The minimum permissible value for this - feature. - max_value (float): - The maximum permissible value for this - feature. - original_mean (float): - If this input feature has been normalized to a mean value of - 0, the original_mean specifies the mean value of the domain - prior to normalization. - original_stddev (float): - If this input feature has been normalized to a standard - deviation of 1.0, the original_stddev specifies the standard - deviation of the domain prior to normalization. - """ - - min_value = proto.Field(proto.FLOAT, number=1) - - max_value = proto.Field(proto.FLOAT, number=2) - - original_mean = proto.Field(proto.FLOAT, number=3) - - original_stddev = proto.Field(proto.FLOAT, number=4) - - class Visualization(proto.Message): - r"""Visualization configurations for image explanation. - - Attributes: - type_ (~.explanation_metadata.ExplanationMetadata.InputMetadata.Visualization.Type): - Type of the image visualization. Only applicable to - [Integrated Gradients attribution] - [ExplanationParameters.integrated_gradients_attribution]. - OUTLINES shows regions of attribution, while PIXELS shows - per-pixel attribution. Defaults to OUTLINES. - polarity (~.explanation_metadata.ExplanationMetadata.InputMetadata.Visualization.Polarity): - Whether to only highlight pixels with - positive contributions, negative or both. - Defaults to POSITIVE. - color_map (~.explanation_metadata.ExplanationMetadata.InputMetadata.Visualization.ColorMap): - The color scheme used for the highlighted areas. - - Defaults to PINK_GREEN for [Integrated Gradients - attribution][ExplanationParameters.integrated_gradients_attribution], - which shows positive attributions in green and negative in - pink. - - Defaults to VIRIDIS for [XRAI - attribution][google.cloud.aiplatform.v1beta1.ExplanationParameters.xrai_attribution], - which highlights the most influential regions in yellow and - the least influential in blue. - clip_percent_upperbound (float): - Excludes attributions above the specified percentile from - the highlighted areas. Using the clip_percent_upperbound and - clip_percent_lowerbound together can be useful for filtering - out noise and making it easier to see areas of strong - attribution. Defaults to 99.9. - clip_percent_lowerbound (float): - Excludes attributions below the specified - percentile, from the highlighted areas. Defaults - to 35. - overlay_type (~.explanation_metadata.ExplanationMetadata.InputMetadata.Visualization.OverlayType): - How the original image is displayed in the - visualization. Adjusting the overlay can help - increase visual clarity if the original image - makes it difficult to view the visualization. - Defaults to NONE. - """ - - class Type(proto.Enum): - r"""Type of the image visualization. Only applicable to [Integrated - Gradients attribution] - [ExplanationParameters.integrated_gradients_attribution]. - """ - TYPE_UNSPECIFIED = 0 - PIXELS = 1 - OUTLINES = 2 - - class Polarity(proto.Enum): - r"""Whether to only highlight pixels with positive contributions, - negative or both. Defaults to POSITIVE. - """ - POLARITY_UNSPECIFIED = 0 - POSITIVE = 1 - NEGATIVE = 2 - BOTH = 3 - - class ColorMap(proto.Enum): - r"""The color scheme used for highlighting areas.""" - COLOR_MAP_UNSPECIFIED = 0 - PINK_GREEN = 1 - VIRIDIS = 2 - RED = 3 - GREEN = 4 - RED_GREEN = 6 - PINK_WHITE_GREEN = 5 - - class OverlayType(proto.Enum): - r"""How the original image is displayed in the visualization.""" - OVERLAY_TYPE_UNSPECIFIED = 0 - NONE = 1 - ORIGINAL = 2 - GRAYSCALE = 3 - MASK_BLACK = 4 - - type_ = proto.Field( - proto.ENUM, - number=1, - enum="ExplanationMetadata.InputMetadata.Visualization.Type", - ) - - polarity = proto.Field( - proto.ENUM, - number=2, - enum="ExplanationMetadata.InputMetadata.Visualization.Polarity", - ) - - color_map = proto.Field( - proto.ENUM, - number=3, - enum="ExplanationMetadata.InputMetadata.Visualization.ColorMap", - ) - - clip_percent_upperbound = proto.Field(proto.FLOAT, number=4) - - clip_percent_lowerbound = proto.Field(proto.FLOAT, number=5) - - overlay_type = proto.Field( - proto.ENUM, - number=6, - enum="ExplanationMetadata.InputMetadata.Visualization.OverlayType", - ) - input_baselines = proto.RepeatedField( proto.MESSAGE, number=1, message=struct.Value, ) - input_tensor_name = proto.Field(proto.STRING, number=2) - - encoding = proto.Field( - proto.ENUM, number=3, enum="ExplanationMetadata.InputMetadata.Encoding", - ) - - modality = proto.Field(proto.STRING, number=4) - - feature_value_domain = proto.Field( - proto.MESSAGE, - number=5, - message="ExplanationMetadata.InputMetadata.FeatureValueDomain", - ) - - indices_tensor_name = proto.Field(proto.STRING, number=6) - - dense_shape_tensor_name = proto.Field(proto.STRING, number=7) - - index_feature_mapping = proto.RepeatedField(proto.STRING, number=8) - - encoded_tensor_name = proto.Field(proto.STRING, number=9) - - encoded_baselines = proto.RepeatedField( - proto.MESSAGE, number=10, message=struct.Value, - ) - - visualization = proto.Field( - proto.MESSAGE, - number=11, - message="ExplanationMetadata.InputMetadata.Visualization", - ) - - group_name = proto.Field(proto.STRING, number=12) - class OutputMetadata(proto.Message): r"""Metadata of the prediction output to be explained. @@ -384,30 +116,19 @@ class OutputMetadata(proto.Message): of the outputs, so that it can be located by ``Attribution.output_index`` for a specific output. - output_tensor_name (str): - Name of the output tensor. Required and is - only applicable to AI Platform provided images - for Tensorflow. """ index_display_name_mapping = proto.Field( - proto.MESSAGE, number=1, oneof="display_name_mapping", message=struct.Value, - ) - - display_name_mapping_key = proto.Field( - proto.STRING, number=2, oneof="display_name_mapping" + proto.MESSAGE, number=1, message=struct.Value, ) - - output_tensor_name = proto.Field(proto.STRING, number=3) + display_name_mapping_key = proto.Field(proto.STRING, number=2) inputs = proto.MapField( proto.STRING, proto.MESSAGE, number=1, message=InputMetadata, ) - outputs = proto.MapField( proto.STRING, proto.MESSAGE, number=2, message=OutputMetadata, ) - feature_attributions_schema_uri = proto.Field(proto.STRING, number=3) diff --git a/google/cloud/aiplatform_v1beta1/types/model.py b/google/cloud/aiplatform_v1beta1/types/model.py index 21e8c41034..39cb44206e 100644 --- a/google/cloud/aiplatform_v1beta1/types/model.py +++ b/google/cloud/aiplatform_v1beta1/types/model.py @@ -70,7 +70,7 @@ class Model(proto.Message): supported_export_formats (Sequence[~.model.Model.ExportFormat]): Output only. The formats in which this Model may be exported. If empty, this Model is not - available for export. + avaiable for export. training_pipeline (str): Output only. The resource name of the TrainingPipeline that uploaded this Model, if @@ -272,55 +272,36 @@ class ExportableContent(proto.Enum): IMAGE = 2 id = proto.Field(proto.STRING, number=1) - exportable_contents = proto.RepeatedField( proto.ENUM, number=2, enum="Model.ExportFormat.ExportableContent", ) name = proto.Field(proto.STRING, number=1) - display_name = proto.Field(proto.STRING, number=2) - description = proto.Field(proto.STRING, number=3) - predict_schemata = proto.Field(proto.MESSAGE, number=4, message="PredictSchemata",) - metadata_schema_uri = proto.Field(proto.STRING, number=5) - metadata = proto.Field(proto.MESSAGE, number=6, message=struct.Value,) - supported_export_formats = proto.RepeatedField( proto.MESSAGE, number=20, message=ExportFormat, ) - training_pipeline = proto.Field(proto.STRING, number=7) - container_spec = proto.Field(proto.MESSAGE, number=9, message="ModelContainerSpec",) - artifact_uri = proto.Field(proto.STRING, number=26) - supported_deployment_resources_types = proto.RepeatedField( proto.ENUM, number=10, enum=DeploymentResourcesType, ) - supported_input_storage_formats = proto.RepeatedField(proto.STRING, number=11) - supported_output_storage_formats = proto.RepeatedField(proto.STRING, number=12) - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) - deployed_models = proto.RepeatedField( proto.MESSAGE, number=15, message=deployed_model_ref.DeployedModelRef, ) - explanation_spec = proto.Field( proto.MESSAGE, number=23, message=explanation.ExplanationSpec, ) - etag = proto.Field(proto.STRING, number=16) - labels = proto.MapField(proto.STRING, proto.STRING, number=17) @@ -382,253 +363,75 @@ class PredictSchemata(proto.Message): """ instance_schema_uri = proto.Field(proto.STRING, number=1) - parameters_schema_uri = proto.Field(proto.STRING, number=2) - prediction_schema_uri = proto.Field(proto.STRING, number=3) class ModelContainerSpec(proto.Message): - r"""Specification of a container for serving predictions. This message - is a subset of the Kubernetes Container v1 core - `specification `__. + r"""Specification of the container to be deployed for this Model. The + ModelContainerSpec is based on the Kubernetes Container + `specification `__. Attributes: image_uri (str): - Required. Immutable. URI of the Docker image to be used as - the custom container for serving predictions. This URI must - identify an image in Artifact Registry or Container - Registry. Learn more about the container publishing - requirements, including permissions requirements for the AI - Platform Service Agent, - `here `__. - - The container image is ingested upon + Required. Immutable. The URI of the Model serving container + file in the Container Registry. The container image is + ingested upon ``ModelService.UploadModel``, stored internally, and this original path is afterwards not used. - - To learn about the requirements for the Docker image itself, - see `Custom container - requirements `__. command (Sequence[str]): - Immutable. Specifies the command that runs when the - container starts. This overrides the container's - `ENTRYPOINT `__. - Specify this field as an array of executable and arguments, - similar to a Docker ``ENTRYPOINT``'s "exec" form, not its - "shell" form. - - If you do not specify this field, then the container's - ``ENTRYPOINT`` runs, in conjunction with the - ``args`` - field or the container's - ```CMD`` `__, - if either exists. If this field is not specified and the - container does not have an ``ENTRYPOINT``, then refer to the - Docker documentation about how ``CMD`` and ``ENTRYPOINT`` - `interact `__. - - If you specify this field, then you can also specify the - ``args`` field to provide additional arguments for this - command. However, if you specify this field, then the - container's ``CMD`` is ignored. See the `Kubernetes - documentation `__ about how - the ``command`` and ``args`` fields interact with a - container's ``ENTRYPOINT`` and ``CMD``. - - In this field, you can reference environment variables `set - by AI - Platform `__ - and environment variables set in the - ``env`` - field. You cannot reference environment variables set in the - Docker image. In order for environment variables to be - expanded, reference them by using the following syntax: - $(VARIABLE_NAME) Note that this differs from Bash variable - expansion, which does not use parentheses. If a variable - cannot be resolved, the reference in the input string is - used unchanged. To avoid variable expansion, you can escape - this syntax with ``$$``; for example: $$(VARIABLE_NAME) This - field corresponds to the ``command`` field of the Kubernetes - Containers `v1 core - API `__. + Immutable. The command with which the container is run. Not + executed within a shell. The Docker image's ENTRYPOINT is + used if this is not provided. Variable references + $(VAR_NAME) are expanded using the container's environment. + If a variable cannot be resolved, the reference in the input + string will be unchanged. The $(VAR_NAME) syntax can be + escaped with a double $$, ie: $$(VAR_NAME). Escaped + references will never be expanded, regardless of whether the + variable exists or not. More info: + https://tinyurl.com/y42hmlxe args (Sequence[str]): - Immutable. Specifies arguments for the command that runs - when the container starts. This overrides the container's - ```CMD`` `__. - Specify this field as an array of executable and arguments, - similar to a Docker ``CMD``'s "default parameters" form. - - If you don't specify this field but do specify the - ``command`` - field, then the command from the ``command`` field runs - without any additional arguments. See the `Kubernetes - documentation `__ about how - the ``command`` and ``args`` fields interact with a - container's ``ENTRYPOINT`` and ``CMD``. - - If you don't specify this field and don't specify the - ``command`` field, then the container's - ```ENTRYPOINT`` `__ - and ``CMD`` determine what runs based on their default - behavior. See the Docker documentation about how ``CMD`` and - ``ENTRYPOINT`` `interact `__. - - In this field, you can reference environment variables `set - by AI - Platform `__ - and environment variables set in the - ``env`` - field. You cannot reference environment variables set in the - Docker image. In order for environment variables to be - expanded, reference them by using the following syntax: - $(VARIABLE_NAME) Note that this differs from Bash variable - expansion, which does not use parentheses. If a variable - cannot be resolved, the reference in the input string is - used unchanged. To avoid variable expansion, you can escape - this syntax with ``$$``; for example: $$(VARIABLE_NAME) This - field corresponds to the ``args`` field of the Kubernetes - Containers `v1 core - API `__. + Immutable. The arguments to the command. The Docker image's + CMD is used if this is not provided. Variable references + $(VAR_NAME) are expanded using the container's environment. + If a variable cannot be resolved, the reference in the input + string will be unchanged. The $(VAR_NAME) syntax can be + escaped with a double $$, ie: $$(VAR_NAME). Escaped + references will never be expanded, regardless of whether the + variable exists or not. More info: + https://tinyurl.com/y42hmlxe env (Sequence[~.env_var.EnvVar]): - Immutable. List of environment variables to set in the - container. After the container starts running, code running - in the container can read these environment variables. - - Additionally, the - ``command`` - and - ``args`` - fields can reference these variables. Later entries in this - list can also reference earlier entries. For example, the - following example sets the variable ``VAR_2`` to have the - value ``foo bar``: - - .. code:: json - - [ - { - "name": "VAR_1", - "value": "foo" - }, - { - "name": "VAR_2", - "value": "$(VAR_1) bar" - } - ] - - If you switch the order of the variables in the example, - then the expansion does not occur. - - This field corresponds to the ``env`` field of the - Kubernetes Containers `v1 core - API `__. + Immutable. The environment variables that are + to be present in the container. ports (Sequence[~.model.Port]): - Immutable. List of ports to expose from the container. AI - Platform sends any prediction requests that it receives to - the first port on this list. AI Platform also sends - `liveness and health - checks `__ to - this port. - - If you do not specify this field, it defaults to following - value: - - .. code:: json - - [ - { - "containerPort": 8080 - } - ] - - AI Platform does not use ports other than the first one - listed. This field corresponds to the ``ports`` field of the - Kubernetes Containers `v1 core - API `__. + Immutable. Declaration of ports that are + exposed by the container. This field is + primarily informational, it gives AI Platform + information about the network connections the + container uses. Listing or not a port here has + no impact on whether the port is actually + exposed, any port listening on the default + "0.0.0.0" address inside a container will be + accessible from the network. predict_route (str): - Immutable. HTTP path on the container to send prediction - requests to. AI Platform forwards requests sent using - ``projects.locations.endpoints.predict`` - to this path on the container's IP address and port. AI - Platform then returns the container's response in the API - response. - - For example, if you set this field to ``/foo``, then when AI - Platform receives a prediction request, it forwards the - request body in a POST request to the following URL on the - container: localhost:PORT/foo PORT refers to the first value - of this ``ModelContainerSpec``'s - ``ports`` - field. - - If you don't specify this field, it defaults to the - following value when you [deploy this Model to an - Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: - /v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict - The placeholders in this value are replaced as follows: - - - ENDPOINT: The last segment (following ``endpoints/``)of - the Endpoint.name][] field of the Endpoint where this - Model has been deployed. (AI Platform makes this value - available to your container code as the - ```AIP_ENDPOINT_ID`` `__ - environment variable.) - - - DEPLOYED_MODEL: - ``DeployedModel.id`` - of the ``DeployedModel``. (AI Platform makes this value - available to your container code as the - ```AIP_DEPLOYED_MODEL_ID`` environment - variable `__.) + Immutable. An HTTP path to send prediction + requests to the container, and which must be + supported by it. If not specified a default HTTP + path will be used by AI Platform. health_route (str): - Immutable. HTTP path on the container to send health checkss - to. AI Platform intermittently sends GET requests to this - path on the container's IP address and port to check that - the container is healthy. Read more about `health - checks `__. - - For example, if you set this field to ``/bar``, then AI - Platform intermittently sends a GET request to the following - URL on the container: localhost:PORT/bar PORT refers to the - first value of this ``ModelContainerSpec``'s - ``ports`` - field. - - If you don't specify this field, it defaults to the - following value when you [deploy this Model to an - Endpoint][google.cloud.aiplatform.v1beta1.EndpointService.DeployModel]: - /v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict - The placeholders in this value are replaced as follows: - - - ENDPOINT: The last segment (following ``endpoints/``)of - the Endpoint.name][] field of the Endpoint where this - Model has been deployed. (AI Platform makes this value - available to your container code as the - ```AIP_ENDPOINT_ID`` `__ - environment variable.) - - - DEPLOYED_MODEL: - ``DeployedModel.id`` - of the ``DeployedModel``. (AI Platform makes this value - available to your container code as the - ```AIP_DEPLOYED_MODEL_ID`` `__ - environment variable.) + Immutable. An HTTP path to send health check + requests to the container, and which must be + supported by it. If not specified a standard + HTTP path will be used by AI Platform. """ image_uri = proto.Field(proto.STRING, number=1) - command = proto.RepeatedField(proto.STRING, number=2) - args = proto.RepeatedField(proto.STRING, number=3) - env = proto.RepeatedField(proto.MESSAGE, number=4, message=env_var.EnvVar,) - ports = proto.RepeatedField(proto.MESSAGE, number=5, message="Port",) - predict_route = proto.Field(proto.STRING, number=6) - health_route = proto.Field(proto.STRING, number=7) diff --git a/synth.metadata b/synth.metadata index 9399d8c2e3..2f581d2593 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,14 +4,14 @@ "git": { "name": ".", "remote": "https://github.com/dizcology/python-aiplatform.git", - "sha": "81da030c0af8902fd54c8e7b5e92255a532d0efb" + "sha": "7e83ff65457e88aa155e68ddd959933a68da46af" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "ba9918cd22874245b55734f57470c719b577e591" + "sha": "487eba79f8260e34205d8ceb1ebcc65685085e19" } } ], diff --git a/synth.py b/synth.py index 7b92c5fd3b..31c3e11493 100644 --- a/synth.py +++ b/synth.py @@ -35,20 +35,18 @@ # version="v1beta1", # bazel_target="//google/cloud/aiplatform/v1beta1:aiplatform-v1beta1-py", # ) -library = gapic.py_library( - 'aiplatform', - 'v1beta1', - generator_version='0.20' -) +library = gapic.py_library("aiplatform", "v1beta1", generator_version="0.20") s.move( - library, - excludes=[ - "setup.py", - "README.rst", - "docs/index.rst", + library, + excludes=[ + ".kokoro", + "setup.py", + "README.rst", + "docs/index.rst", "google/cloud/aiplatform/__init__.py", - ] + "tests/unit/aiplatform_v1beta1/test_prediction_service.py", + ], ) # ---------------------------------------------------------------------------- @@ -56,32 +54,81 @@ # ---------------------------------------------------------------------------- # https://github.com/googleapis/gapic-generator-python/issues/336 -s.replace( - '**/client.py', - ' operation.from_gapic', - ' ga_operation.from_gapic' -) +s.replace("**/client.py", " operation.from_gapic", " ga_operation.from_gapic") s.replace( - '**/client.py', - 'client_options: ClientOptions = ', - 'client_options: ClientOptions.ClientOptions = ' + "**/client.py", + "client_options: ClientOptions = ", + "client_options: ClientOptions.ClientOptions = ", ) # https://github.com/googleapis/gapic-generator-python/issues/413 s.replace( - 'google/cloud/aiplatform_v1beta1/services/prediction_service/client.py', - 'request.instances = instances', - 'request.instances.extend(instances)' + "google/cloud/aiplatform_v1beta1/services/prediction_service/client.py", + "request.instances = instances", + "request.instances.extend(instances)", ) +# post processing to fix the generated reference doc +from synthtool import transforms as st +import re + +# https://github.com/googleapis/gapic-generator-python/issues/479 +paths = st._filter_files(st._expand_paths("google/cloud/**/*.py", ".")) + +pattern = r"(:\w+:``[^`]+``)" +expr = re.compile(pattern, flags=re.MULTILINE) +replaces = [] +for path in paths: + with path.open("r+") as fh: + content = fh.read() + matches = re.findall(expr, content) + if matches: + for match in matches: + before = match + after = match.replace("``", "`") + replaces.append((path, before, after)) + +for path, before, after in replaces: + s.replace([path], before, after) + + +# https://github.com/googleapis/gapic-generator-python/issues/483 +paths = st._filter_files(st._expand_paths("google/cloud/**/*.py", ".")) +pattern = r"(?P\[(?P[\w.]+)\]\[(?P[\w.]+)\])" +expr = re.compile(pattern, flags=re.MULTILINE) +replaces = [] +for path in paths: + with path.open("r+") as fh: + content = fh.read() + for match in expr.finditer(content): + before = match.groupdict()["full"].replace("[", "\[").replace("]", "\]") + after = match.groupdict()["first"] + after = f"``{after}``" + replaces.append((path, before, after)) + +for path, before, after in replaces: + s.replace([path], before, after) + + +s.replace("google/cloud/**/*.py", "\]\(\n\n\s*", "](") + +s.replace("google/cloud/**/*.py", "\s*//\n\s*", "") + +s.replace("google/cloud/**/*.py", "https:[\n]*\s*//", "https://") + +s.replace("google/cloud/**/*.py", "[\n]*\s*//\s*/", "/") + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(cov_level=99, microgenerator=True) s.move( - templated_files, excludes=[".coveragerc"] -) # the microgenerator has a good coveragerc file + templated_files, excludes=[".coveragerc"] +) # the microgenerator has a good coveragerc file + +# Don't treat docs warnings as errors +s.replace("noxfile.py", """["']-W["'], # warnings as errors""", "") s.shell.run(["nox", "-s", "blacken"], hide_output=False)