From 5ad93c305ca1ad14e49c8a3ce06558d57a5887a4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 17 Nov 2022 08:27:17 -0800 Subject: [PATCH 1/6] Copybara import of the project: -- 2fb2b63d3965f7535921bb3c306793ef7cdd7f6d by release-please[bot] <55107282+release-please[bot]@users.noreply.github.com>: chore(main): release 1.19.0 COPYBARA_INTEGRATE_REVIEW=https://github.com/googleapis/python-aiplatform/pull/1768 from googleapis:release-please--branches--main 2fb2b63d3965f7535921bb3c306793ef7cdd7f6d PiperOrigin-RevId: 489217395 --- CHANGELOG.md | 20 ++++++++++++++++++++ google/cloud/aiplatform/version.py | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dbfd175a62..5492a6e8ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,26 @@ # Changelog +## [1.19.0](https://github.com/googleapis/python-aiplatform/compare/v1.18.3...v1.19.0) (2022-11-17) + + +### Features + +* Add Feature Store: Streaming Ingestion (write_feature_values()) and introduce Preview namespace to Vertex SDK ([bae0315](https://github.com/googleapis/python-aiplatform/commit/bae03158c06865d1b61c06a1c8af64e876ce76dd)) +* Add support for ordery_by in Metadata SDK list methods for Artifact, Execution and Context. ([2377606](https://github.com/googleapis/python-aiplatform/commit/23776066909b5b7f77f704722d2719e1a1733ad4)) +* Support global network parameter. ([c7f57ad](https://github.com/googleapis/python-aiplatform/commit/c7f57ad505b7251b9c663538e2312998445db691)) + + +### Bug Fixes + +* Correct data file gcs path for import_data_text_sentiment_analysis_sample test ([86df4b5](https://github.com/googleapis/python-aiplatform/commit/86df4b5d79118caf8f45c3845c92afe6585c24e9)) +* Print error for schema classes ([13e2165](https://github.com/googleapis/python-aiplatform/commit/13e216518f20a32c7e18e6ea5b497a5fcb1d77a0)) + + +### Documentation + +* Update README with new link for AI Platform API ([35b83d9](https://github.com/googleapis/python-aiplatform/commit/35b83d90649ec396b736469278def4aaaf80621e)) + ## [1.18.3](https://github.com/googleapis/python-aiplatform/compare/v1.18.2...v1.18.3) (2022-11-01) diff --git a/google/cloud/aiplatform/version.py b/google/cloud/aiplatform/version.py index 776d33fe9d..92e84b924b 100644 --- a/google/cloud/aiplatform/version.py +++ b/google/cloud/aiplatform/version.py @@ -15,4 +15,4 @@ # limitations under the License. # -__version__ = "1.18.3" +__version__ = "1.19.0" From d438e58dd50491c7a535cde4f88230a7b41f1345 Mon Sep 17 00:00:00 2001 From: Sara Robinson Date: Thu, 17 Nov 2022 10:54:39 -0800 Subject: [PATCH 2/6] chore: update 1.19.0 release notes PiperOrigin-RevId: 489253566 --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5492a6e8ec..bfe647facd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ ### Features * Add Feature Store: Streaming Ingestion (write_feature_values()) and introduce Preview namespace to Vertex SDK ([bae0315](https://github.com/googleapis/python-aiplatform/commit/bae03158c06865d1b61c06a1c8af64e876ce76dd)) +* Add bq_dataset_id parameter to batch_serve_to_df ([bb72562](https://github.com/googleapis/python-aiplatform/commit/bb72562f4515b6ace73a735477584ca0b5a30f58)) +* Add annotation_labels to ImportDataConfig in aiplatform v1 dataset.proto ([43e2805](https://github.com/googleapis/python-aiplatform/commit/43e28052d798c380de6e102edbe257a0100738cd)) * Add support for ordery_by in Metadata SDK list methods for Artifact, Execution and Context. ([2377606](https://github.com/googleapis/python-aiplatform/commit/23776066909b5b7f77f704722d2719e1a1733ad4)) * Support global network parameter. ([c7f57ad](https://github.com/googleapis/python-aiplatform/commit/c7f57ad505b7251b9c663538e2312998445db691)) From 369ab65c34c18bcc1fadad32752c8a257d1ab84c Mon Sep 17 00:00:00 2001 From: Sara Robinson Date: Fri, 18 Nov 2022 07:01:17 -0800 Subject: [PATCH 3/6] chore: fix list method in _VertexAiPipelineBasedService class PiperOrigin-RevId: 489464748 --- .../pipeline_based_service.py | 22 ++++++++++----- .../aiplatform/test_pipeline_based_service.py | 28 +++++++++++++++---- 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/google/cloud/aiplatform/_pipeline_based_service/pipeline_based_service.py b/google/cloud/aiplatform/_pipeline_based_service/pipeline_based_service.py index 98e647d029..2ef1db5ad5 100644 --- a/google/cloud/aiplatform/_pipeline_based_service/pipeline_based_service.py +++ b/google/cloud/aiplatform/_pipeline_based_service/pipeline_based_service.py @@ -391,13 +391,21 @@ def list( for pipeline_execution in filtered_pipeline_executions: if "pipeline_job_resource_name" in pipeline_execution.metadata: - service_pipeline_job = cls( - pipeline_execution.metadata["pipeline_job_resource_name"], - project=project, - location=location, - credentials=credentials, - ) - service_pipeline_jobs.append(service_pipeline_job) + # This is wrapped in a try/except for cases when both + # `_coponent_identifier` and `_template_name_identifier` are + # set. In that case, even though all pipelines returned by the + # Execution.list() call will match the `_component_identifier`, + # some may not match the `_template_name_identifier` + try: + service_pipeline_job = cls( + pipeline_execution.metadata["pipeline_job_resource_name"], + project=project, + location=location, + credentials=credentials, + ) + service_pipeline_jobs.append(service_pipeline_job) + except ValueError: + continue return service_pipeline_jobs diff --git a/tests/unit/aiplatform/test_pipeline_based_service.py b/tests/unit/aiplatform/test_pipeline_based_service.py index 4a9acd600b..f751671462 100644 --- a/tests/unit/aiplatform/test_pipeline_based_service.py +++ b/tests/unit/aiplatform/test_pipeline_based_service.py @@ -596,15 +596,9 @@ def test_create_and_submit_pipeline_job( == test_backing_pipeline_job.resource_name ) - @pytest.mark.parametrize( - "job_spec_json", - [_TEST_PIPELINE_JOB], - ) def test_list_pipeline_based_service( self, mock_pipeline_based_service_get, - mock_load_yaml_and_json, - job_spec_json, get_execution_mock, list_executions_mock, ): @@ -635,3 +629,25 @@ def test_list_pipeline_based_service( # only 1 of the 2 executions in list_executions_mock matches the # properties of FakePipelineBasedService assert len(test_list_request) == 1 + + def test_list_pipeline_based_service_with_template_name_identifier( + self, + mock_pipeline_based_service_get, + get_execution_mock, + list_executions_mock, + ): + aiplatform.init( + project=_TEST_PROJECT, + location=_TEST_LOCATION, + credentials=_TEST_CREDENTIALS, + ) + + self.FakePipelineBasedService._template_name_identifier = ( + _TEST_INVALID_PIPELINE_NAME_IDENTIFIER + ) + + test_list_request = self.FakePipelineBasedService.list() + + # None of the mock pipelines match the `_template_name_identifier` + # set above, so the returned list should be empty + assert len(test_list_request) == 0 From 3e95e8d85752332d8147c1159b03db73257f2c51 Mon Sep 17 00:00:00 2001 From: A Vertex SDK engineer Date: Fri, 18 Nov 2022 18:08:00 -0800 Subject: [PATCH 4/6] chore: Add OP 2.10 images to first-party support. PiperOrigin-RevId: 489600866 --- google/cloud/aiplatform/constants/prediction.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/google/cloud/aiplatform/constants/prediction.py b/google/cloud/aiplatform/constants/prediction.py index 76e4875a20..88581c8e44 100644 --- a/google/cloud/aiplatform/constants/prediction.py +++ b/google/cloud/aiplatform/constants/prediction.py @@ -79,6 +79,12 @@ ] TF_CONTAINER_URIS = [ + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-10:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-10:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-10:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-10:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-10:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-10:latest", "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-9:latest", "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-9:latest", "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-9:latest", From 8779df5362a6851372cf3cea005a1c6c3096b19e Mon Sep 17 00:00:00 2001 From: A Vertex SDK engineer Date: Tue, 29 Nov 2022 10:52:23 -0800 Subject: [PATCH 5/6] fix: Fix bug that broke profiler with '0-rc2' tensorflow versions. PiperOrigin-RevId: 491683085 --- .../plugins/tensorflow/tf_profiler.py | 21 ++++++++++++------- tests/unit/aiplatform/test_cloud_profiler.py | 18 ++++++++++++---- 2 files changed, 28 insertions(+), 11 deletions(-) diff --git a/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py b/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py index 81b43145b3..e532db4222 100644 --- a/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py +++ b/google/cloud/aiplatform/training_utils/cloud_profiler/plugins/tensorflow/tf_profiler.py @@ -17,11 +17,15 @@ """A plugin to handle remote tensoflow profiler sessions for Vertex AI.""" -from google.cloud.aiplatform.training_utils.cloud_profiler import cloud_profiler_utils +from google.cloud.aiplatform.training_utils.cloud_profiler import ( + cloud_profiler_utils, +) try: import tensorflow as tf - from tensorboard_plugin_profile.profile_plugin import ProfilePlugin + from tensorboard_plugin_profile.profile_plugin import ( + ProfilePlugin, + ) except ImportError as err: raise ImportError(cloud_profiler_utils.import_error_msg) from err @@ -36,10 +40,14 @@ import tensorboard.plugins.base_plugin as tensorboard_base_plugin from werkzeug import Response -from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import profile_uploader +from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import ( + profile_uploader, +) from google.cloud.aiplatform.training_utils import environment_variables from google.cloud.aiplatform.training_utils.cloud_profiler import wsgi_types -from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import base_plugin +from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import ( + base_plugin, +) from google.cloud.aiplatform.training_utils.cloud_profiler.plugins.tensorflow import ( tensorboard_api, ) @@ -68,8 +76,7 @@ def _get_tf_versioning() -> Optional[Version]: versioning = version.split(".") if len(versioning) != 3: return - - return Version(int(versioning[0]), int(versioning[1]), int(versioning[2])) + return Version(int(versioning[0]), int(versioning[1]), versioning[2]) def _is_compatible_version(version: Version) -> bool: @@ -228,7 +235,7 @@ def warn_tensorboard_env_var(var_name: str): Required. The name of the missing environment variable. """ logging.warning( - f"Environment variable `{var_name}` must be set. " + _BASE_TB_ENV_WARNING + "Environment variable `%s` must be set. %s", var_name, _BASE_TB_ENV_WARNING ) diff --git a/tests/unit/aiplatform/test_cloud_profiler.py b/tests/unit/aiplatform/test_cloud_profiler.py index 388405d034..b686419361 100644 --- a/tests/unit/aiplatform/test_cloud_profiler.py +++ b/tests/unit/aiplatform/test_cloud_profiler.py @@ -31,8 +31,12 @@ from google.api_core import exceptions from google.cloud import aiplatform from google.cloud.aiplatform import training_utils -from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import profile_uploader -from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import base_plugin +from google.cloud.aiplatform.tensorboard.plugins.tf_profiler import ( + profile_uploader, +) +from google.cloud.aiplatform.training_utils.cloud_profiler.plugins import ( + base_plugin, +) from google.cloud.aiplatform.training_utils.cloud_profiler.plugins.tensorflow import ( tf_profiler, ) @@ -175,15 +179,21 @@ def tf_import_mock(name, *args, **kwargs): def testCanInitializeTFVersion(self): import tensorflow - with mock.patch.object(tensorflow, "__version__", return_value="1.2.3.4"): + with mock.patch.object(tensorflow, "__version__", "1.2.3.4"): assert not TFProfiler.can_initialize() def testCanInitializeOldTFVersion(self): import tensorflow - with mock.patch.object(tensorflow, "__version__", return_value="2.3.0"): + with mock.patch.object(tensorflow, "__version__", "2.3.0"): assert not TFProfiler.can_initialize() + def testCanInitializeRcTFVersion(self): + import tensorflow as tf + + with mock.patch.object(tf, "__version__", "2.4.0-rc2"): + assert TFProfiler.can_initialize() + def testCanInitializeNoProfilePlugin(self): orig_find_spec = importlib.util.find_spec From cdd557e5e86b0b4d4cf401509aba5914e0bab8b7 Mon Sep 17 00:00:00 2001 From: Jaycee Li Date: Tue, 29 Nov 2022 13:48:39 -0800 Subject: [PATCH 6/6] fix: Add pre-built container(tf2-gpu-2-1) to the container URI list PiperOrigin-RevId: 491727625 --- google/cloud/aiplatform/constants/prediction.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/constants/prediction.py b/google/cloud/aiplatform/constants/prediction.py index 88581c8e44..52a69ab692 100644 --- a/google/cloud/aiplatform/constants/prediction.py +++ b/google/cloud/aiplatform/constants/prediction.py @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -136,6 +136,9 @@ "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest", "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest", "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-1:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-1:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-1:latest", "us-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest", "europe-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest", "asia-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest",