diff --git a/CHANGELOG.md b/CHANGELOG.md index dbfd175a62..bfe647facd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,28 @@ # Changelog +## [1.19.0](https://github.com/googleapis/python-aiplatform/compare/v1.18.3...v1.19.0) (2022-11-17) + + +### Features + +* Add Feature Store: Streaming Ingestion (write_feature_values()) and introduce Preview namespace to Vertex SDK ([bae0315](https://github.com/googleapis/python-aiplatform/commit/bae03158c06865d1b61c06a1c8af64e876ce76dd)) +* Add bq_dataset_id parameter to batch_serve_to_df ([bb72562](https://github.com/googleapis/python-aiplatform/commit/bb72562f4515b6ace73a735477584ca0b5a30f58)) +* Add annotation_labels to ImportDataConfig in aiplatform v1 dataset.proto ([43e2805](https://github.com/googleapis/python-aiplatform/commit/43e28052d798c380de6e102edbe257a0100738cd)) +* Add support for ordery_by in Metadata SDK list methods for Artifact, Execution and Context. ([2377606](https://github.com/googleapis/python-aiplatform/commit/23776066909b5b7f77f704722d2719e1a1733ad4)) +* Support global network parameter. ([c7f57ad](https://github.com/googleapis/python-aiplatform/commit/c7f57ad505b7251b9c663538e2312998445db691)) + + +### Bug Fixes + +* Correct data file gcs path for import_data_text_sentiment_analysis_sample test ([86df4b5](https://github.com/googleapis/python-aiplatform/commit/86df4b5d79118caf8f45c3845c92afe6585c24e9)) +* Print error for schema classes ([13e2165](https://github.com/googleapis/python-aiplatform/commit/13e216518f20a32c7e18e6ea5b497a5fcb1d77a0)) + + +### Documentation + +* Update README with new link for AI Platform API ([35b83d9](https://github.com/googleapis/python-aiplatform/commit/35b83d90649ec396b736469278def4aaaf80621e)) + ## [1.18.3](https://github.com/googleapis/python-aiplatform/compare/v1.18.2...v1.18.3) (2022-11-01) diff --git a/google/cloud/aiplatform/_pipeline_based_service/pipeline_based_service.py b/google/cloud/aiplatform/_pipeline_based_service/pipeline_based_service.py index 98e647d029..2ef1db5ad5 100644 --- a/google/cloud/aiplatform/_pipeline_based_service/pipeline_based_service.py +++ b/google/cloud/aiplatform/_pipeline_based_service/pipeline_based_service.py @@ -391,13 +391,21 @@ def list( for pipeline_execution in filtered_pipeline_executions: if "pipeline_job_resource_name" in pipeline_execution.metadata: - service_pipeline_job = cls( - pipeline_execution.metadata["pipeline_job_resource_name"], - project=project, - location=location, - credentials=credentials, - ) - service_pipeline_jobs.append(service_pipeline_job) + # This is wrapped in a try/except for cases when both + # `_coponent_identifier` and `_template_name_identifier` are + # set. In that case, even though all pipelines returned by the + # Execution.list() call will match the `_component_identifier`, + # some may not match the `_template_name_identifier` + try: + service_pipeline_job = cls( + pipeline_execution.metadata["pipeline_job_resource_name"], + project=project, + location=location, + credentials=credentials, + ) + service_pipeline_jobs.append(service_pipeline_job) + except ValueError: + continue return service_pipeline_jobs diff --git a/google/cloud/aiplatform/constants/prediction.py b/google/cloud/aiplatform/constants/prediction.py index 76e4875a20..52a69ab692 100644 --- a/google/cloud/aiplatform/constants/prediction.py +++ b/google/cloud/aiplatform/constants/prediction.py @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,6 +79,12 @@ ] TF_CONTAINER_URIS = [ + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-10:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-10:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-10:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-10:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-10:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-10:latest", "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-9:latest", "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-9:latest", "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-9:latest", @@ -130,6 +136,9 @@ "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest", "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest", "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-1:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-1:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-1:latest", "us-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest", "europe-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest", "asia-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest", diff --git a/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py b/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py index 81b43145b3..e532db4222 100644 --- a/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py +++ b/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py @@ -17,11 +17,15 @@ """A plugin to handle remote tensoflow profiler sessions for Vertex AI.""" -from google.cloud.aiplatform.training_utils.cloud_profiler import cloud_profiler_utils +from google.cloud.aiplatform.training_utils.cloud_profiler import ( + cloud_profiler_utils, +) try: import tensorflow as tf - from tensorboard_plugin_profile.profile_plugin import ProfilePlugin + from tensorboard_plugin_profile.profile_plugin import ( + ProfilePlugin, + ) except ImportError as err: raise ImportError(cloud_profiler_utils.import_error_msg) from err @@ -36,10 +40,14 @@ import tensorboard.plugins.base_plugin as tensorboard_base_plugin from werkzeug import Response -from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import profile_uploader +from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import ( + profile_uploader, +) from google.cloud.aiplatform.training_utils import environment_variables from google.cloud.aiplatform.training_utils.cloud_profiler import wsgi_types -from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import base_plugin +from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import ( + base_plugin, +) from google.cloud.aiplatform.training_utils.cloud_profiler.plugins.tensorflow import ( tensorboard_api, ) @@ -68,8 +76,7 @@ def _get_tf_versioning() -> Optional[Version]: versioning = version.split(".") if len(versioning) != 3: return - - return Version(int(versioning[0]), int(versioning[1]), int(versioning[2])) + return Version(int(versioning[0]), int(versioning[1]), versioning[2]) def _is_compatible_version(version: Version) -> bool: @@ -228,7 +235,7 @@ def warn_tensorboard_env_var(var_name: str): Required. The name of the missing environment variable. """ logging.warning( - f"Environment variable `{var_name}` must be set. " + _BASE_TB_ENV_WARNING + "Environment variable `%s` must be set. %s", var_name, _BASE_TB_ENV_WARNING ) diff --git a/google/cloud/aiplatform/version.py b/google/cloud/aiplatform/version.py index 776d33fe9d..92e84b924b 100644 --- a/google/cloud/aiplatform/version.py +++ b/google/cloud/aiplatform/version.py @@ -15,4 +15,4 @@ # limitations under the License. # -__version__ = "1.18.3" +__version__ = "1.19.0" diff --git a/tests/unit/aiplatform/test_cloud_profiler.py b/tests/unit/aiplatform/test_cloud_profiler.py index 388405d034..b686419361 100644 --- a/tests/unit/aiplatform/test_cloud_profiler.py +++ b/tests/unit/aiplatform/test_cloud_profiler.py @@ -31,8 +31,12 @@ from google.api_core import exceptions from google.cloud import aiplatform from google.cloud.aiplatform import training_utils -from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import profile_uploader -from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import base_plugin +from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import ( + profile_uploader, +) +from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import ( + base_plugin, +) from google.cloud.aiplatform.training_utils.cloud_profiler.plugins.tensorflow import ( tf_profiler, ) @@ -175,15 +179,21 @@ def tf_import_mock(name, *args, **kwargs): def testCanInitializeTFVersion(self): import tensorflow - with mock.patch.object(tensorflow, "__version__", return_value="1.2.3.4"): + with mock.patch.object(tensorflow, "__version__", "1.2.3.4"): assert not TFProfiler.can_initialize() def testCanInitializeOldTFVersion(self): import tensorflow - with mock.patch.object(tensorflow, "__version__", return_value="2.3.0"): + with mock.patch.object(tensorflow, "__version__", "2.3.0"): assert not TFProfiler.can_initialize() + def testCanInitializeRcTFVersion(self): + import tensorflow as tf + + with mock.patch.object(tf, "__version__", "2.4.0-rc2"): + assert TFProfiler.can_initialize() + def testCanInitializeNoProfilePlugin(self): orig_find_spec = importlib.util.find_spec diff --git a/tests/unit/aiplatform/test_pipeline_based_service.py b/tests/unit/aiplatform/test_pipeline_based_service.py index 4a9acd600b..f751671462 100644 --- a/tests/unit/aiplatform/test_pipeline_based_service.py +++ b/tests/unit/aiplatform/test_pipeline_based_service.py @@ -596,15 +596,9 @@ def test_create_and_submit_pipeline_job( == test_backing_pipeline_job.resource_name ) - @pytest.mark.parametrize( - "job_spec_json", - [_TEST_PIPELINE_JOB], - ) def test_list_pipeline_based_service( self, mock_pipeline_based_service_get, - mock_load_yaml_and_json, - job_spec_json, get_execution_mock, list_executions_mock, ): @@ -635,3 +629,25 @@ def test_list_pipeline_based_service( # only 1 of the 2 executions in list_executions_mock matches the # properties of FakePipelineBasedService assert len(test_list_request) == 1 + + def test_list_pipeline_based_service_with_template_name_identifier( + self, + mock_pipeline_based_service_get, + get_execution_mock, + list_executions_mock, + ): + aiplatform.init( + project=_TEST_PROJECT, + location=_TEST_LOCATION, + credentials=_TEST_CREDENTIALS, + ) + + self.FakePipelineBasedService._template_name_identifier = ( + _TEST_INVALID_PIPELINE_NAME_IDENTIFIER + ) + + test_list_request = self.FakePipelineBasedService.list() + + # None of the mock pipelines match the `_template_name_identifier` + # set above, so the returned list should be empty + assert len(test_list_request) == 0