Skip to content

Commit

Permalink
fix: add resource manager utils to get project ID from project number (
Browse files Browse the repository at this point in the history
…#1068)

* fix: add resource manager utils to get project ID from project number

* fix: add docstring

Co-authored-by: gcf-merge-on-green[bot] <60162190+gcf-merge-on-green[bot]@users.noreply.github.com>
  • Loading branch information
morgandu and gcf-merge-on-green[bot] authored Mar 10, 2022
1 parent bbe105d commit f10a1d4
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 16 deletions.
11 changes: 6 additions & 5 deletions google/cloud/aiplatform/featurestore/entity_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from google.cloud.aiplatform import featurestore
from google.cloud.aiplatform import initializer
from google.cloud.aiplatform import utils
from google.cloud.aiplatform.utils import featurestore_utils
from google.cloud.aiplatform.utils import featurestore_utils, resource_manager_utils

from google.cloud import bigquery

Expand Down Expand Up @@ -1259,10 +1259,11 @@ def ingest_from_df(
"-", "_"
)

# TODO(b/216497263): Add support for resource project does not match initializer.global_config.project
temp_bq_dataset_id = f"{initializer.global_config.project}.{temp_bq_dataset_name}"[
:1024
]
project_id = resource_manager_utils.get_project_id(
project_number=entity_type_name_components["project"],
credentials=self.credentials,
)
temp_bq_dataset_id = f"{project_id}.{temp_bq_dataset_name}"[:1024]
temp_bq_table_id = f"{temp_bq_dataset_id}.{entity_type_id}"

temp_bq_dataset = bigquery.Dataset(dataset_ref=temp_bq_dataset_id)
Expand Down
11 changes: 6 additions & 5 deletions google/cloud/aiplatform/featurestore/featurestore.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from google.cloud.aiplatform import featurestore
from google.cloud.aiplatform import initializer
from google.cloud.aiplatform import utils
from google.cloud.aiplatform.utils import featurestore_utils
from google.cloud.aiplatform.utils import featurestore_utils, resource_manager_utils

from google.cloud import bigquery

Expand Down Expand Up @@ -1147,10 +1147,11 @@ def batch_serve_to_df(
"-", "_"
)

# TODO(b/216497263): Add support for resource project does not match initializer.global_config.project
temp_bq_dataset_id = f"{initializer.global_config.project}.{temp_bq_dataset_name}"[
:1024
]
project_id = resource_manager_utils.get_project_id(
project_number=featurestore_name_components["project"],
credentials=self.credentials,
)
temp_bq_dataset_id = f"{project_id}.{temp_bq_dataset_name}"[:1024]
temp_bq_dataset = bigquery.Dataset(dataset_ref=temp_bq_dataset_id)
temp_bq_dataset.location = self.location
temp_bq_dataset = bigquery_client.create_dataset(temp_bq_dataset)
Expand Down
49 changes: 49 additions & 0 deletions google/cloud/aiplatform/utils/resource_manager_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# -*- coding: utf-8 -*-

# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#


from typing import Optional

from google.auth import credentials as auth_credentials
from google.cloud import resourcemanager

from google.cloud.aiplatform import initializer


def get_project_id(
project_number: str, credentials: Optional[auth_credentials.Credentials] = None,
) -> str:
"""Gets project ID given the project number
Args:
project_number (str):
Required. The automatically generated unique identifier for your GCP project.
credentials: The custom credentials to use when making API calls.
Optional. If not provided, default credentials will be used.
Returns:
str - The unique string used to differentiate your GCP project from all others in Google Cloud.
"""

credentials = credentials or initializer.global_config.credentials

projects_client = resourcemanager.ProjectsClient(credentials=credentials)

project = projects_client.get_project(name=f"projects/{project_number}")

return project.project_id
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@
"packaging >= 14.3",
"google-cloud-storage >= 1.32.0, < 3.0.0dev",
"google-cloud-bigquery >= 1.15.0, < 3.0.0dev",
"google-cloud-resource-manager >= 1.3.3, < 3.0.0dev",
),
extras_require={
"full": full_extra_require,
Expand Down
40 changes: 34 additions & 6 deletions tests/unit/aiplatform/test_featurestores.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from google.cloud.aiplatform import base
from google.cloud.aiplatform import initializer
from google.cloud.aiplatform import utils
from google.cloud.aiplatform.utils import resource_manager_utils

from google.cloud.aiplatform.utils import featurestore_utils
from google.cloud.aiplatform_v1.services.featurestore_service import (
Expand All @@ -54,11 +55,13 @@

from google.cloud import bigquery
from google.cloud import bigquery_storage
from google.cloud import resourcemanager

from google.cloud.bigquery_storage_v1.types import stream as gcbqs_stream

# project
_TEST_PROJECT = "test-project"
_TEST_PROJECT_DIFF = "test-project-diff"
_TEST_LOCATION = "us-central1"
_TEST_PARENT = f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}"

Expand Down Expand Up @@ -340,6 +343,18 @@ def uuid_mock():
return uuid.UUID(int=1)


# All Resource Manager Mocks
@pytest.fixture
def get_project_mock():
with patch.object(
resourcemanager.ProjectsClient, "get_project"
) as get_project_mock:
get_project_mock.return_value = resourcemanager.Project(
project_id=_TEST_PROJECT,
)
yield get_project_mock


# All BigQuery Mocks
@pytest.fixture
def bq_client_mock():
Expand Down Expand Up @@ -1286,10 +1301,13 @@ def test_batch_serve_to_gcs_with_invalid_gcs_destination_type(self):
"bq_delete_dataset_mock",
"bqs_init_client_mock",
"bqs_create_read_session",
"get_project_mock",
)
@patch("uuid.uuid4", uuid_mock)
def test_batch_serve_to_df(self, batch_read_feature_values_mock):
aiplatform.init(project=_TEST_PROJECT)

aiplatform.init(project=_TEST_PROJECT_DIFF)

my_featurestore = aiplatform.Featurestore(
featurestore_name=_TEST_FEATURESTORE_NAME
)
Expand All @@ -1299,7 +1317,7 @@ def test_batch_serve_to_df(self, batch_read_feature_values_mock):
expected_temp_bq_dataset_name = f"temp_{_TEST_FEATURESTORE_ID}_{uuid.uuid4()}".replace(
"-", "_"
)
expecte_temp_bq_dataset_id = f"{initializer.global_config.project}.{expected_temp_bq_dataset_name}"[
expecte_temp_bq_dataset_id = f"{_TEST_PROJECT}.{expected_temp_bq_dataset_name}"[
:1024
]
expected_temp_bq_read_instances_table_id = (
Expand Down Expand Up @@ -1695,6 +1713,7 @@ def test_ingest_from_gcs_with_invalid_gcs_source_type(self):
"bq_init_dataset_mock",
"bq_create_dataset_mock",
"bq_delete_dataset_mock",
"get_project_mock",
)
@patch("uuid.uuid4", uuid_mock)
def test_ingest_from_df_using_column(
Expand All @@ -1704,7 +1723,7 @@ def test_ingest_from_df_using_column(
bq_init_schema_field_mock,
):

aiplatform.init(project=_TEST_PROJECT)
aiplatform.init(project=_TEST_PROJECT_DIFF)

my_entity_type = aiplatform.EntityType(entity_type_name=_TEST_ENTITY_TYPE_NAME)
df_source = pd.DataFrame()
Expand All @@ -1717,7 +1736,7 @@ def test_ingest_from_df_using_column(
expected_temp_bq_dataset_name = f"temp_{_TEST_FEATURESTORE_ID}_{uuid.uuid4()}".replace(
"-", "_"
)
expecte_temp_bq_dataset_id = f"{initializer.global_config.project}.{expected_temp_bq_dataset_name}"[
expecte_temp_bq_dataset_id = f"{_TEST_PROJECT}.{expected_temp_bq_dataset_name}"[
:1024
]
expected_temp_bq_table_id = (
Expand Down Expand Up @@ -1755,6 +1774,7 @@ def test_ingest_from_df_using_column(
"bq_init_dataset_mock",
"bq_create_dataset_mock",
"bq_delete_dataset_mock",
"get_project_mock",
)
@patch("uuid.uuid4", uuid_mock)
def test_ingest_from_df_using_datetime(
Expand All @@ -1763,7 +1783,8 @@ def test_ingest_from_df_using_datetime(
bq_load_table_from_dataframe_mock,
bq_init_schema_field_mock,
):
aiplatform.init(project=_TEST_PROJECT)

aiplatform.init(project=_TEST_PROJECT_DIFF)

my_entity_type = aiplatform.EntityType(entity_type_name=_TEST_ENTITY_TYPE_NAME)
df_source = pd.DataFrame()
Expand All @@ -1777,7 +1798,7 @@ def test_ingest_from_df_using_datetime(
expected_temp_bq_dataset_name = f"temp_{_TEST_FEATURESTORE_ID}_{uuid.uuid4()}".replace(
"-", "_"
)
expecte_temp_bq_dataset_id = f"{initializer.global_config.project}.{expected_temp_bq_dataset_name}"[
expecte_temp_bq_dataset_id = f"{_TEST_PROJECT}.{expected_temp_bq_dataset_name}"[
:1024
]
expected_temp_bq_table_id = (
Expand Down Expand Up @@ -2431,3 +2452,10 @@ def test_create_feature(self, create_feature_mock, sync):
),
metadata=_TEST_REQUEST_METADATA,
)


class TestResourceManagerUtils:
@pytest.mark.usefixtures("get_project_mock")
def test_get_project_id(self):
project_id = resource_manager_utils.get_project_id(project_number="123456")
assert project_id == _TEST_PROJECT

0 comments on commit f10a1d4

Please sign in to comment.