Skip to content

Commit

Permalink
change compat.DEFAULT_VERSION to compat.V1; change tests services and…
Browse files Browse the repository at this point in the history
… types to V1 for general; add fixtures to support V1BETA1 tests for explanation
  • Loading branch information
morgandu committed Apr 6, 2021
1 parent a663fbf commit 15af9f1
Show file tree
Hide file tree
Showing 18 changed files with 550 additions and 321 deletions.
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/compat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
V1BETA1 = "v1beta1"
V1 = "v1"

DEFAULT_VERSION = V1BETA1
DEFAULT_VERSION = V1

if DEFAULT_VERSION == V1BETA1:

Expand Down
20 changes: 15 additions & 5 deletions google/cloud/aiplatform/initializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,19 +20,23 @@
import logging
import pkg_resources
import os
from typing import Optional, Type
from typing import Optional, Type, Union

from google.api_core import client_options
from google.api_core import gapic_v1
import google.auth
from google.auth import credentials as auth_credentials
from google.auth.exceptions import GoogleAuthError

from google.cloud.aiplatform import compat
from google.cloud.aiplatform import constants
from google.cloud.aiplatform import utils

from google.cloud.aiplatform.compat.types import encryption_spec as gca_encryption_spec

from google.cloud.aiplatform.compat.types import (
encryption_spec as gca_encryption_spec_compat,
encryption_spec_v1 as gca_encryption_spec_v1,
encryption_spec_v1beta1 as gca_encryption_spec_v1beta1,
)

class _Config:
"""Stores common parameters and options for API calls."""
Expand Down Expand Up @@ -93,17 +97,23 @@ def init(
self._encryption_spec_key_name = encryption_spec_key_name

def get_encryption_spec(
self, encryption_spec_key_name: Optional[str]
) -> Optional[gca_encryption_spec.EncryptionSpec]:
self,
encryption_spec_key_name: Optional[str],
select_version: Optional[str] = compat.DEFAULT_VERSION,
) -> Optional[Union[gca_encryption_spec_v1.EncryptionSpec, gca_encryption_spec_v1beta1.EncryptionSpec]]:
"""Creates a gca_encryption_spec.EncryptionSpec instance from the given key name.
If the provided key name is None, it uses the default key name if provided.
Args:
encryption_spec_key_name (Optional[str]): The default encryption key name to use when creating resources.
select_version: The default version is set to compat.DEFAULT_VERSION
"""
kms_key_name = encryption_spec_key_name or self.encryption_spec_key_name
encryption_spec = None
if kms_key_name:
gca_encryption_spec = gca_encryption_spec_compat
if select_version == compat.V1BETA1:
gca_encryption_spec = gca_encryption_spec_v1beta1
encryption_spec = gca_encryption_spec.EncryptionSpec(
kms_key_name=kms_key_name
)
Expand Down
5 changes: 4 additions & 1 deletion google/cloud/aiplatform/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,10 +439,12 @@ def create(
gca_bp_job = gca_bp_job_compat
gca_io = gca_io_compat
gca_machine_resources = gca_machine_resources_compat
select_version = compat.DEFAULT_VERSION
if generate_explanation:
gca_bp_job = gca_bp_job_v1beta1
gca_io = gca_io_v1beta1
gca_machine_resources = gca_machine_resources_v1beta1
select_version = compat.V1BETA1

gapic_batch_prediction_job = gca_bp_job.BatchPredictionJob()

Expand Down Expand Up @@ -484,7 +486,8 @@ def create(

# Optional Fields
gapic_batch_prediction_job.encryption_spec = initializer.global_config.get_encryption_spec(
encryption_spec_key_name=encryption_spec_key_name
encryption_spec_key_name=encryption_spec_key_name,
select_version=select_version
)

if model_parameters:
Expand Down
7 changes: 5 additions & 2 deletions google/cloud/aiplatform/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@
machine_resources_v1beta1 as gca_machine_resources_v1beta1,
model as gca_model_compat,
model_v1beta1 as gca_model_v1beta1,
env_var as gca_env_var,
env_var as gca_env_var_compat,
env_var_v1beta1 as gca_env_var_v1beta1,
)

from google.protobuf import json_format
Expand Down Expand Up @@ -1319,9 +1320,11 @@ def upload(

gca_endpoint = gca_endpoint_compat
gca_model = gca_model_compat
gca_env_var = gca_env_var_compat
if explanation_metadata and explanation_parameters:
gca_endpoint = gca_endpoint_v1beta1
gca_model = gca_model_v1beta1
gca_env_var = gca_env_var_v1beta1

api_client = cls._instantiate_client(location, credentials)
env = None
Expand Down Expand Up @@ -1357,7 +1360,7 @@ def upload(

# TODO(b/182388545) initializer.global_config.get_encryption_spec from a sync function
encryption_spec = initializer.global_config.get_encryption_spec(
encryption_spec_key_name=encryption_spec_key_name
encryption_spec_key_name=encryption_spec_key_name,
)

managed_model = gca_model.Model(
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/training_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@
training_pipeline as gca_training_pipeline,
)

from google.cloud.aiplatform.v1beta1.schema.trainingjob import (
definition_v1beta1 as training_job_inputs,
from google.cloud.aiplatform.v1.schema.trainingjob import (
definition_v1 as training_job_inputs,
)

from google.cloud import storage
Expand Down
20 changes: 10 additions & 10 deletions tests/unit/aiplatform/test_automl_image_training_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,26 @@
from google.protobuf import struct_pb2

from google.cloud import aiplatform
from google.cloud.aiplatform import schema
from google.cloud.aiplatform import models

from google.cloud.aiplatform import datasets
from google.cloud.aiplatform import initializer

from google.cloud.aiplatform import models
from google.cloud.aiplatform import schema
from google.cloud.aiplatform import training_jobs

from google.cloud.aiplatform_v1beta1.services.model_service import (
from google.cloud.aiplatform_v1.services.model_service import (
client as model_service_client,
)
from google.cloud.aiplatform_v1beta1.services.pipeline_service import (
from google.cloud.aiplatform_v1.services.pipeline_service import (
client as pipeline_service_client,
)
from google.cloud.aiplatform_v1beta1.types import model as gca_model
from google.cloud.aiplatform_v1beta1.types import pipeline_state as gca_pipeline_state
from google.cloud.aiplatform_v1beta1.types import (
from google.cloud.aiplatform_v1.types import (
dataset as gca_dataset,
encryption_spec as gca_encryption_spec,
model as gca_model,
pipeline_state as gca_pipeline_state,
training_pipeline as gca_training_pipeline,
)
from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset
from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec

_TEST_PROJECT = "test-project"
_TEST_LOCATION = "us-central1"
Expand Down
32 changes: 16 additions & 16 deletions tests/unit/aiplatform/test_automl_tabular_training_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,25 @@
from unittest import mock

from google.cloud import aiplatform

from google.cloud.aiplatform import datasets
from google.cloud.aiplatform import initializer
from google.cloud.aiplatform import schema
from google.cloud.aiplatform.training_jobs import AutoMLTabularTrainingJob
from google.cloud.aiplatform import training_jobs

from google.cloud.aiplatform_v1beta1.services.model_service import (
from google.cloud.aiplatform_v1.services.model_service import (
client as model_service_client,
)
from google.cloud.aiplatform_v1beta1.services.pipeline_service import (
from google.cloud.aiplatform_v1.services.pipeline_service import (
client as pipeline_service_client,
)
from google.cloud.aiplatform_v1beta1.types import model as gca_model
from google.cloud.aiplatform_v1beta1.types import pipeline_state as gca_pipeline_state
from google.cloud.aiplatform_v1beta1.types import (
from google.cloud.aiplatform_v1.types import (
dataset as gca_dataset,
encryption_spec as gca_encryption_spec,
model as gca_model,
pipeline_state as gca_pipeline_state,
training_pipeline as gca_training_pipeline,
)
from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec
from google.cloud.aiplatform_v1beta1 import Dataset as GapicDataset

from google.protobuf import json_format
from google.protobuf import struct_pb2

Expand Down Expand Up @@ -148,7 +148,7 @@ def mock_dataset_tabular():
ds = mock.MagicMock(datasets.Dataset)
ds.name = _TEST_DATASET_NAME
ds._latest_future = None
ds._gca_resource = GapicDataset(
ds._gca_resource = gca_dataset.Dataset(
display_name=_TEST_DATASET_DISPLAY_NAME,
metadata_schema_uri=_TEST_METADATA_SCHEMA_URI_TABULAR,
labels={},
Expand All @@ -163,7 +163,7 @@ def mock_dataset_nontabular():
ds = mock.MagicMock(datasets.Dataset)
ds.name = _TEST_DATASET_NAME
ds._latest_future = None
ds._gca_resource = GapicDataset(
ds._gca_resource = gca_dataset.Dataset(
display_name=_TEST_DATASET_DISPLAY_NAME,
metadata_schema_uri=_TEST_METADATA_SCHEMA_URI_NONTABULAR,
labels={},
Expand Down Expand Up @@ -195,7 +195,7 @@ def test_run_call_pipeline_service_create(
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)

job = AutoMLTabularTrainingJob(
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
Expand Down Expand Up @@ -276,7 +276,7 @@ def test_run_call_pipeline_if_no_model_display_name(
):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)

job = AutoMLTabularTrainingJob(
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
Expand Down Expand Up @@ -340,7 +340,7 @@ def test_run_called_twice_raises(
):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)

job = AutoMLTabularTrainingJob(
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
Expand Down Expand Up @@ -377,7 +377,7 @@ def test_run_raises_if_pipeline_fails(

aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)

job = AutoMLTabularTrainingJob(
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
Expand Down Expand Up @@ -406,7 +406,7 @@ def test_run_raises_if_pipeline_fails(
def test_raises_before_run_is_called(self, mock_pipeline_service_create):
aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME)

job = AutoMLTabularTrainingJob(
job = training_jobs.AutoMLTabularTrainingJob(
display_name=_TEST_DISPLAY_NAME,
optimization_prediction_type=_TEST_TRAINING_OPTIMIZATION_PREDICTION_TYPE,
optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME,
Expand Down
24 changes: 12 additions & 12 deletions tests/unit/aiplatform/test_automl_text_training_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,28 +3,28 @@
from unittest import mock

from google.cloud import aiplatform
from google.cloud.aiplatform import schema
from google.cloud.aiplatform import models

from google.cloud.aiplatform import datasets
from google.cloud.aiplatform import initializer

from google.cloud.aiplatform import models
from google.cloud.aiplatform import schema
from google.cloud.aiplatform import training_jobs

from google.cloud.aiplatform_v1beta1.services.model_service import (
from google.cloud.aiplatform_v1.services.model_service import (
client as model_service_client,
)
from google.cloud.aiplatform_v1beta1.services.pipeline_service import (
from google.cloud.aiplatform_v1.services.pipeline_service import (
client as pipeline_service_client,
)
from google.cloud.aiplatform_v1beta1.types import model as gca_model
from google.cloud.aiplatform_v1beta1.types import pipeline_state as gca_pipeline_state
from google.cloud.aiplatform_v1beta1.types import (
from google.cloud.aiplatform_v1.types import (
dataset as gca_dataset,
encryption_spec as gca_encryption_spec,
model as gca_model,
pipeline_state as gca_pipeline_state,
training_pipeline as gca_training_pipeline,
)
from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset
from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec
from google.cloud.aiplatform.v1beta1.schema.trainingjob import (
definition_v1beta1 as training_job_inputs,
from google.cloud.aiplatform.v1.schema.trainingjob import (
definition_v1 as training_job_inputs,
)

_TEST_PROJECT = "test-project"
Expand Down
20 changes: 10 additions & 10 deletions tests/unit/aiplatform/test_automl_video_training_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,26 @@
from google.protobuf import struct_pb2

from google.cloud import aiplatform
from google.cloud.aiplatform import schema
from google.cloud.aiplatform import models

from google.cloud.aiplatform import datasets
from google.cloud.aiplatform import initializer

from google.cloud.aiplatform import models
from google.cloud.aiplatform import schema
from google.cloud.aiplatform import training_jobs

from google.cloud.aiplatform_v1beta1.services.model_service import (
from google.cloud.aiplatform_v1.services.model_service import (
client as model_service_client,
)
from google.cloud.aiplatform_v1beta1.services.pipeline_service import (
from google.cloud.aiplatform_v1.services.pipeline_service import (
client as pipeline_service_client,
)
from google.cloud.aiplatform_v1beta1.types import model as gca_model
from google.cloud.aiplatform_v1beta1.types import pipeline_state as gca_pipeline_state
from google.cloud.aiplatform_v1beta1.types import (
from google.cloud.aiplatform_v1.types import (
dataset as gca_dataset,
encryption_spec as gca_encryption_spec,
model as gca_model,
pipeline_state as gca_pipeline_state,
training_pipeline as gca_training_pipeline,
)
from google.cloud.aiplatform_v1beta1.types import dataset as gca_dataset
from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec

_TEST_PROJECT = "test-project"
_TEST_LOCATION = "us-central1"
Expand Down
Loading

0 comments on commit 15af9f1

Please sign in to comment.