Skip to content

Commit

Permalink
Merge branch 'main' into fix--Fixed-getitng-project-ID-when-running-o…
Browse files Browse the repository at this point in the history
…n-Vertex-AI
  • Loading branch information
Ark-kun authored Mar 17, 2022
2 parents 692ae9e + 79aeec1 commit 235a6d1
Show file tree
Hide file tree
Showing 39 changed files with 621 additions and 80 deletions.
4 changes: 2 additions & 2 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
# For syntax help see:
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax

# yoshi-python is the default owner
* @googleapis/yoshi-python
# @googleapis/cdpe-cloudai and yoshi-python are the default owners
* @googleapis/cdpe-cloudai @googleapis/yoshi-python

# The AI Platform GAPIC libraries are owned by Cloud AI DPE
/google/cloud/aiplatform_*/** @googleapis/cdpe-cloudai
Expand Down
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ Initialize the SDK to store common configurations that you use with the SDK.
staging_bucket='gs://my_staging_bucket',
# custom google.auth.credentials.Credentials
# environment default creds used if not set
# environment default credentials used if not set
credentials=my_credentials,
# customer managed encryption key resource name
Expand Down Expand Up @@ -188,7 +188,7 @@ Please visit `Using a managed dataset in a custom training application`_ for a d

.. _Using a managed dataset in a custom training application: https://cloud.google.com/vertex-ai/docs/training/using-managed-datasets

It must write the model artifact to the environment variable populated by the traing service:
It must write the model artifact to the environment variable populated by the training service:

.. code-block:: Python
Expand Down
21 changes: 15 additions & 6 deletions google/cloud/aiplatform/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def log_create_complete(
cls (VertexAiResourceNoun):
Vertex AI Resource Noun class that is being created.
resource (proto.Message):
Vertex AI Resourc proto.Message
Vertex AI Resource proto.Message
variable_name (str): Name of variable to use for code snippet
"""
self._logger.info(f"{cls.__name__} created. Resource name: {resource.name}")
Expand All @@ -121,7 +121,7 @@ def log_create_complete_with_getter(
cls (VertexAiResourceNoun):
Vertex AI Resource Noun class that is being created.
resource (proto.Message):
Vertex AI Resourc proto.Message
Vertex AI Resource proto.Message
variable_name (str): Name of variable to use for code snippet
"""
self._logger.info(f"{cls.__name__} created. Resource name: {resource.name}")
Expand Down Expand Up @@ -462,7 +462,7 @@ def __init__(
Args:
project(str): Project of the resource noun.
location(str): The location of the resource noun.
credentials(google.auth.crendentials.Crendentials): Optional custom
credentials(google.auth.credentials.Credentials): Optional custom
credentials to use when accessing interacting with resource noun.
resource_name(str): A fully-qualified resource name or ID.
"""
Expand Down Expand Up @@ -655,6 +655,15 @@ def gca_resource(self) -> proto.Message:
self._assert_gca_resource_is_available()
return self._gca_resource

@property
def _resource_is_available(self) -> bool:
"""Returns True if GCA resource has been created and is available, otherwise False"""
try:
self._assert_gca_resource_is_available()
return True
except RuntimeError:
return False

def _assert_gca_resource_is_available(self) -> None:
"""Helper method to raise when property is not accessible.
Expand Down Expand Up @@ -833,7 +842,7 @@ def __init__(
Args:
project (str): Optional. Project of the resource noun.
location (str): Optional. The location of the resource noun.
credentials(google.auth.crendentials.Crendentials):
credentials(google.auth.credentials.Credentials):
Optional. custom credentials to use when accessing interacting with
resource noun.
resource_name(str): A fully-qualified resource name or ID.
Expand Down Expand Up @@ -863,7 +872,7 @@ def _empty_constructor(
Args:
project (str): Optional. Project of the resource noun.
location (str): Optional. The location of the resource noun.
credentials(google.auth.crendentials.Crendentials):
credentials(google.auth.credentials.Credentials):
Optional. custom credentials to use when accessing interacting with
resource noun.
resource_name(str): A fully-qualified resource name or ID.
Expand Down Expand Up @@ -1153,7 +1162,7 @@ def delete(self, sync: bool = True) -> None:
_LOGGER.log_action_completed_against_resource("deleted.", "", self)

def __repr__(self) -> str:
if self._gca_resource:
if self._gca_resource and self._resource_is_available:
return VertexAiResourceNoun.__repr__(self)

return FutureManager.__repr__(self)
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/datasets/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def create(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
project (str):
Expand Down Expand Up @@ -488,7 +488,7 @@ def import_data(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
sync (bool):
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/datasets/image_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def create(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
project (str):
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/datasets/text_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def create(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
project (str):
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/datasets/video_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def create(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
project (str):
Expand Down
6 changes: 3 additions & 3 deletions google/cloud/aiplatform/explain/lit.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,23 +29,23 @@
except ImportError:
raise ImportError(
"LIT is not installed and is required to get Dataset as the return format. "
'Please install the SDK using "pip install python-aiplatform[lit]"'
'Please install the SDK using "pip install google-cloud-aiplatform[lit]"'
)

try:
import tensorflow as tf
except ImportError:
raise ImportError(
"Tensorflow is not installed and is required to load saved model. "
'Please install the SDK using "pip install pip install python-aiplatform[lit]"'
'Please install the SDK using "pip install google-cloud-aiplatform[lit]"'
)

try:
import pandas as pd
except ImportError:
raise ImportError(
"Pandas is not installed and is required to read the dataset. "
'Please install Pandas using "pip install python-aiplatform[lit]"'
'Please install Pandas using "pip install google-cloud-aiplatform[lit]"'
)


Expand Down
10 changes: 5 additions & 5 deletions google/cloud/aiplatform/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ def output_info(self,) -> Optional[aiplatform.gapic.BatchPredictionJob.OutputInf
"""Information describing the output of this job, including output location
into which prediction output is written.
This is only available for batch predicition jobs that have run successfully.
This is only available for batch prediction jobs that have run successfully.
"""
self._assert_gca_resource_is_available()
return self._gca_resource.output_info
Expand Down Expand Up @@ -839,7 +839,7 @@ def __init__(
Args:
project(str): Project of the resource noun.
location(str): The location of the resource noun.
credentials(google.auth.crendentials.Crendentials): Optional custom
credentials(google.auth.credentials.Credentials): Optional custom
credentials to use when accessing interacting with resource noun.
"""

Expand Down Expand Up @@ -1023,7 +1023,7 @@ def __init__(
encryption_spec_key_name: Optional[str] = None,
staging_bucket: Optional[str] = None,
):
"""Cosntruct a Custom Job with Worker Pool Specs.
"""Constructs a Custom Job with Worker Pool Specs.
```
Example usage:
Expand Down Expand Up @@ -1569,7 +1569,7 @@ def __init__(
Required. Configured CustomJob. The worker pool spec from this custom job
applies to the CustomJobs created in all the trials.
metric_spec: Dict[str, str]
Required. Dicionary representing metrics to optimize. The dictionary key is the metric_id,
Required. Dictionary representing metrics to optimize. The dictionary key is the metric_id,
which is reported by your training job, and the dictionary value is the
optimization goal of the metric('minimize' or 'maximize'). example:
Expand All @@ -1594,7 +1594,7 @@ def __init__(
DoubleParameterSpec, IntegerParameterSpec, CategoricalParameterSpace, DiscreteParameterSpec
max_trial_count (int):
Reuired. The desired total number of Trials.
Required. The desired total number of Trials.
parallel_trial_count (int):
Required. The desired number of Trials to run in parallel.
max_failed_trial_count (int):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -535,6 +535,8 @@ def deploy_index(
MatchingEngineIndexEndpoint - IndexEndpoint resource object
"""

self.wait()

_LOGGER.log_action_start_against_resource(
"Deploying index", "index_endpoint", self,
)
Expand Down Expand Up @@ -591,6 +593,8 @@ def undeploy_index(
MatchingEngineIndexEndpoint - IndexEndpoint resource object
"""

self.wait()

_LOGGER.log_action_start_against_resource(
"Undeploying index", "index_endpoint", self,
)
Expand Down Expand Up @@ -652,6 +656,8 @@ def mutate_deployed_index(
Optional. Strings which should be sent along with the request as metadata.
"""

self.wait()

_LOGGER.log_action_start_against_resource(
"Mutating index", "index_endpoint", self,
)
Expand Down Expand Up @@ -691,6 +697,7 @@ def deployed_indexes(
Returns:
List[gca_matching_engine_index_endpoint.DeployedIndex] - Deployed indexes
"""
self._assert_gca_resource_is_available()
return self._gca_resource.deployed_indexes

@base.optional_sync()
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/metadata/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def log_metrics(self, metrics: Dict[str, Union[float, int]]):
Args:
metrics (Dict):
Required. Metrics key/value pairs. Only flot and int are supported format for value.
Required. Metrics key/value pairs. Only float and int are supported format for value.
Raises:
TypeError: If value contains unsupported types.
ValueError: If Experiment or Run is not set.
Expand Down Expand Up @@ -263,7 +263,7 @@ def _validate_metrics_value_type(metrics: Dict[str, Union[float, int]]):
Args:
metrics (Dict):
Required. Metrics key/value pairs. Only flot and int are supported format for value.
Required. Metrics key/value pairs. Only float and int are supported format for value.
Raises:
TypeError: If value contains unsupported types.
"""
Expand Down
4 changes: 4 additions & 0 deletions google/cloud/aiplatform/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1575,6 +1575,8 @@ def update(
ValueError: If `labels` is not the correct format.
"""

self.wait()

current_model_proto = self.gca_resource
copied_model_proto = current_model_proto.__class__(current_model_proto)

Expand Down Expand Up @@ -2496,6 +2498,8 @@ def export_model(
ValueError: If invalid arguments or export formats are provided.
"""

self.wait()

# Model does not support exporting
if not self.supported_export_formats:
raise ValueError(f"The model `{self.resource_name}` is not exportable.")
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/tensorboard/uploader_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@ def get_or_create(
Returns:
time_series (tensorboard_time_series.TensorboardTimeSeries):
A new or existing tensorboard_time_series.TensorbaordTimeSeries.
A new or existing tensorboard_time_series.TensorboardTimeSeries.
Raises:
exceptions.InvalidArgument:
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/training_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ def _create_input_data_config(
- AIP_TEST_DATA_URI = "bigquery_destination.dataset_*.test"
Raises:
ValueError: When more than 1 type of split configuration is passed or when
the split configuartion passed is incompatible with the dataset schema.
the split configuration passed is incompatible with the dataset schema.
"""

input_data_config = None
Expand Down Expand Up @@ -5811,7 +5811,7 @@ def __init__(
multiple objects in shots and segments. You can use these
models to track objects in your videos according to your
own pre-defined, custom labels.
"action_recognition" - A video action reconition model pinpoints
"action_recognition" - A video action recognition model pinpoints
the location of actions with short temporal durations (~1 second).
model_type: str = "CLOUD"
Required. One of the following:
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/utils/pipeline_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def _get_vertex_value(
inputs, or value is none.
"""
if value is None:
raise ValueError("None values should be filterd out.")
raise ValueError("None values should be filtered out.")

if name not in self._parameter_types:
raise ValueError(
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/utils/source_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class _TrainingScriptPythonPackager:
packager = TrainingScriptPythonPackager('my_script.py', ['pandas', 'pytorch'])
gcs_path = packager.package_and_copy_to_gcs(
gcs_staging_dir='my-bucket',
project='my-prject')
project='my-project')
module_name = packager.module_name
The package after installed can be executed as:
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/utils/worker_spec_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def chief_worker_pool(
reduction_server_replica_count: int = 0,
reduction_server_machine_type: str = None,
) -> "_DistributedTrainingSpec":
"""Parameterizes Config to support only chief with worker replicas.
"""Parametrizes Config to support only chief with worker replicas.
For replica is assigned to chief and the remainder to workers. All spec have the
same machine type, accelerator count, and accelerator type.
Expand Down
2 changes: 2 additions & 0 deletions google/cloud/aiplatform_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@
from .types.feature_selector import FeatureSelector
from .types.feature_selector import IdMatcher
from .types.featurestore import Featurestore
from .types.featurestore_monitoring import FeaturestoreMonitoringConfig
from .types.featurestore_online_service import FeatureValue
from .types.featurestore_online_service import FeatureValueList
from .types.featurestore_online_service import ReadFeatureValuesRequest
Expand Down Expand Up @@ -672,6 +673,7 @@
"FeatureValueDestination",
"FeatureValueList",
"Featurestore",
"FeaturestoreMonitoringConfig",
"FeaturestoreOnlineServingServiceClient",
"FeaturestoreServiceClient",
"FilterSplit",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
from google.cloud.aiplatform_v1.types import feature as gca_feature
from google.cloud.aiplatform_v1.types import featurestore
from google.cloud.aiplatform_v1.types import featurestore as gca_featurestore
from google.cloud.aiplatform_v1.types import featurestore_monitoring
from google.cloud.aiplatform_v1.types import featurestore_service
from google.cloud.aiplatform_v1.types import operation as gca_operation
from google.protobuf import empty_pb2 # type: ignore
Expand Down Expand Up @@ -1202,7 +1203,12 @@ def sample_update_entity_type():
- ``description``
- ``labels``
- ``monitoring_config.snapshot_analysis.disabled``
- ``monitoring_config.snapshot_analysis.monitoring_interval``
- ``monitoring_config.snapshot_analysis.monitoring_interval_days``
- ``monitoring_config.snapshot_analysis.staleness_days``
- ``monitoring_config.import_features_analysis.state``
- ``monitoring_config.import_features_analysis.anomaly_detection_baseline``
- ``monitoring_config.numerical_threshold_config.value``
- ``monitoring_config.categorical_threshold_config.value``
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
Expand Down Expand Up @@ -1912,8 +1918,7 @@ def sample_update_feature():
- ``description``
- ``labels``
- ``monitoring_config.snapshot_analysis.disabled``
- ``monitoring_config.snapshot_analysis.monitoring_interval``
- ``disable_monitoring``
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
Expand Down
Loading

0 comments on commit 235a6d1

Please sign in to comment.