diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_artifacts/_artifact_utilities.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_artifacts/_artifact_utilities.py index beddfd90f350..70934356af95 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_artifacts/_artifact_utilities.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_artifacts/_artifact_utilities.py @@ -362,7 +362,7 @@ def _update_gen2_metadata(name, version, indicator_file, storage_client) -> None def _check_and_upload_path( artifact: T, - asset_operations: Union["DataOperations", "ModelOperations", "CodeOperations", "FeatureSetOperations"], + asset_operations: Union["DataOperations", "ModelOperations", "CodeOperations", "_FeatureSetOperations"], artifact_type: str, datastore_name: Optional[str] = None, sas_uri: Optional[str] = None, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py index cce96fdbfa66..a2d5c3b0f836 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py @@ -59,7 +59,7 @@ from azure.ai.ml._utils._experimental import experimental from azure.ai.ml._utils._http_utils import HttpPipeline from azure.ai.ml._utils._registry_utils import get_registry_client -from azure.ai.ml._utils.utils import _is_https_url +from azure.ai.ml._utils.utils import _is_https_url, is_private_preview_enabled from azure.ai.ml.constants._common import AzureMLResourceType from azure.ai.ml.entities import ( BatchDeployment, @@ -100,6 +100,9 @@ from azure.ai.ml.operations._local_deployment_helper import _LocalDeploymentHelper from azure.ai.ml.operations._local_endpoint_helper import _LocalEndpointHelper from azure.ai.ml.operations._schedule_operations import ScheduleOperations +from azure.ai.ml.operations._feature_set_operations import _FeatureSetOperations +from azure.ai.ml.operations._feature_store_operations import _FeatureStoreOperations +from azure.ai.ml.operations._feature_store_entity_operations import _FeatureStoreEntityOperations module_logger = logging.getLogger(__name__) @@ -478,6 +481,31 @@ def __init__( self._virtual_clusters = VirtualClusterOperations(self._operation_scope, self._credential, **ops_kwargs) + self._featurestores = _FeatureStoreOperations( + self._operation_scope, + self._rp_service_client, + self._operation_container, + self._credential, + **app_insights_handler_kwargs, + ) + + self._featuresets = _FeatureSetOperations( + self._operation_scope, + self._operation_config, + self._service_client_02_2023_preview, + self._datastores, + **ops_kwargs, + ) + + self._featurestoreentities = _FeatureStoreEntityOperations( + self._operation_scope, self._operation_config, self._service_client_02_2023_preview, **ops_kwargs + ) + + if is_private_preview_enabled(): + self._operation_container.add(AzureMLResourceType.FEATURE_STORE, self._featurestores) + self._operation_container.add(AzureMLResourceType.FEATURE_SET, self._featuresets) + self._operation_container.add(AzureMLResourceType.FEATURE_STORE_ENTITY, self._featurestoreentities) + @classmethod def from_config( cls, @@ -603,6 +631,39 @@ def registries(self) -> RegistryOperations: """ return self._registries + @property + @experimental + def _feature_stores(self) -> _FeatureStoreOperations: + """A collection of feature-store related operations. + :return: Featurestore operations + :rtype: _FeatureStoreOperations + """ + if is_private_preview_enabled(): + return self._featurestores + raise Exception("feature store operations not supported") + + @property + @experimental + def _feature_sets(self) -> _FeatureSetOperations: + """A collection of feature set related operations. + :return: FeatureSet operations + :rtype: _FeatureSetOperations + """ + if is_private_preview_enabled(): + return self._featuresets + raise Exception("feature set operations not supported") + + @property + @experimental + def _feature_store_entities(self) -> _FeatureStoreEntityOperations: + """A collection of feature store entity related operations. + :return: FeatureStoreEntity operations + :rtype: _FeatureStoreEntityOperations + """ + if is_private_preview_enabled(): + return self._featurestoreentities + raise Exception("feature store entity operations not supported") + @property def connections(self) -> WorkspaceConnectionsOperations: """A collection of workspace connection related operations. diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/__init__.py index 3c6c8d611203..200c3c97f77b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/__init__.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/__init__.py @@ -8,7 +8,7 @@ from .feature_schema import FeatureSchema from .feature_set_schema import FeatureSetSchema from .featureset_spec_schema import FeaturesetSpecSchema -from .featureset_specification_schema import FeaturesetSpecificationSchema +from .feature_set_specification_schema import FeatureSetSpecificationSchema from .materialization_settings_schema import MaterializationSettingsSchema from .source_metadata_schema import SourceMetadataSchema from .timestamp_column_metadata_schema import TimestampColumnMetadataSchema @@ -18,7 +18,7 @@ "FeatureSchema", "FeatureSetSchema", "FeaturesetSpecSchema", - "FeaturesetSpecificationSchema", + "FeatureSetSpecificationSchema", "MaterializationSettingsSchema", "SourceMetadataSchema", "TimestampColumnMetadataSchema", diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_schema.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_schema.py index cc3c6e93671f..4670ef07ffe6 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_schema.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_schema.py @@ -23,6 +23,6 @@ class FeatureSchema(metaclass=PatchedSchemaMeta): @post_load def make(self, data, **kwargs): - from azure.ai.ml.entities._feature_set.feature import Feature + from azure.ai.ml.entities._feature_set.feature import _Feature - return Feature(data_type=type, description=data.pop("description", None), **data) + return _Feature(data_type=type, description=data.pop("description", None), **data) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_set_schema.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_set_schema.py index 0d2ef8f5f42c..29df8355bac0 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_set_schema.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_set_schema.py @@ -10,13 +10,13 @@ from azure.ai.ml._schema.core.schema import YamlFileSchema from .materialization_settings_schema import MaterializationSettingsSchema -from .featureset_specification_schema import FeaturesetSpecificationSchema +from .feature_set_specification_schema import FeatureSetSpecificationSchema class FeatureSetSchema(YamlFileSchema): name = fields.Str(required=True, allow_none=False) version = fields.Str(required=True, allow_none=False) - specification = NestedField(FeaturesetSpecificationSchema, required=True, allow_none=False) + specification = NestedField(FeatureSetSpecificationSchema, required=True, allow_none=False) entities = fields.List(fields.Str, required=True, allow_none=False) stage = fields.Str() description = fields.Str() diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/featureset_specification_schema.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_set_specification_schema.py similarity index 67% rename from sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/featureset_specification_schema.py rename to sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_set_specification_schema.py index b6f5dd2cdb45..4216fa6254e1 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/featureset_specification_schema.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_set_specification_schema.py @@ -9,11 +9,11 @@ from azure.ai.ml._schema.core.schema import PatchedSchemaMeta -class FeaturesetSpecificationSchema(metaclass=PatchedSchemaMeta): +class FeatureSetSpecificationSchema(metaclass=PatchedSchemaMeta): path = fields.Str(required=True, allow_none=False) @post_load def make(self, data, **kwargs): - from azure.ai.ml.entities._feature_set.featureset_specification import FeaturesetSpecification + from azure.ai.ml.entities._feature_set.feature_set_specification import _FeatureSetSpecification - return FeaturesetSpecification(**data) + return _FeatureSetSpecification(**data) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_transformation_code_schema.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_transformation_code_schema.py index 5ad9c5fdfa74..260f394cd207 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_transformation_code_schema.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/feature_transformation_code_schema.py @@ -15,6 +15,6 @@ class FeatureTransformationCodeSchema(metaclass=PatchedSchemaMeta): @post_load def make(self, data, **kwargs): - from azure.ai.ml.entities._feature_set.delay_metadata import DelayMetadata + from azure.ai.ml.entities._feature_set.feature_transformation_code import FeatureTransformationCode - return DelayMetadata(**data) + return FeatureTransformationCode(**data) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/materialization_settings_schema.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/materialization_settings_schema.py index b687d6fa65ce..93b1c8e4883b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/materialization_settings_schema.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_set/materialization_settings_schema.py @@ -17,9 +17,9 @@ class MaterializationComputeResourceSchema(metaclass=PatchedSchemaMeta): @post_load def make(self, data, **kwargs): - from azure.ai.ml.entities._feature_set.materialization_compute_resource import MaterializationComputeResource + from azure.ai.ml.entities._feature_set.materialization_compute_resource import _MaterializationComputeResource - return MaterializationComputeResource(instance_type=data.pop("instance_type"), **data) + return _MaterializationComputeResource(instance_type=data.pop("instance_type"), **data) class MaterializationSettingsSchema(metaclass=PatchedSchemaMeta): @@ -32,6 +32,6 @@ class MaterializationSettingsSchema(metaclass=PatchedSchemaMeta): @post_load def make(self, data, **kwargs): - from azure.ai.ml.entities._feature_set.materialization_settings import MaterializationSettings + from azure.ai.ml.entities._feature_set.materialization_settings import _MaterializationSettings - return MaterializationSettings(**data) + return _MaterializationSettings(**data) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store/compute_runtime_schema.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store/compute_runtime_schema.py index c7ba7e770e73..9b037dd38247 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store/compute_runtime_schema.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store/compute_runtime_schema.py @@ -14,6 +14,6 @@ class ComputeRuntimeSchema(metaclass=PatchedSchemaMeta): @post_load def make(self, data, **kwargs): - from azure.ai.ml.entities._workspace.compute_runtime import ComputeRuntime + from azure.ai.ml.entities._workspace.compute_runtime import _ComputeRuntime - return ComputeRuntime(spark_runtime_version=data.pop("spark_runtime_version")) + return _ComputeRuntime(spark_runtime_version=data.pop("spark_runtime_version")) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store/materialization_store_schema.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store/materialization_store_schema.py index 22caf2895cea..88a3f014ae05 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store/materialization_store_schema.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store/materialization_store_schema.py @@ -15,9 +15,9 @@ class MaterializationStoreSchema(metaclass=PatchedSchemaMeta): @post_load def make(self, data, **kwargs): - from azure.ai.ml.entities._feature_store.materialization_store import MaterializationStore + from azure.ai.ml.entities._feature_store.materialization_store import _MaterializationStore - return MaterializationStore( + return _MaterializationStore( type=data.pop("type"), target=data.pop("target"), ) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store_entity/data_column_schema.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store_entity/data_column_schema.py index b3a07893f51a..6fad97a32983 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store_entity/data_column_schema.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_feature_store_entity/data_column_schema.py @@ -22,6 +22,6 @@ class DataColumnSchema(metaclass=PatchedSchemaMeta): @post_load def make(self, data, **kwargs): - from azure.ai.ml.entities._feature_store_entity.data_column import DataColumn + from azure.ai.ml.entities._feature_store_entity.data_column import _DataColumn - return DataColumn(**data) + return _DataColumn(**data) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_notification/notification_schema.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_notification/notification_schema.py index d8bc839f8153..044ae4af78c8 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_notification/notification_schema.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/_notification/notification_schema.py @@ -19,6 +19,6 @@ class NotificationSchema(metaclass=PatchedSchemaMeta): @post_load def make(self, data, **kwargs): - from azure.ai.ml.entities._notification.notification import Notification + from azure.ai.ml.entities._notification.notification import _Notification - return Notification(**data) + return _Notification(**data) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/__init__.py index bb94ea1d056c..adbc379e2a25 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/__init__.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/__init__.py @@ -137,6 +137,20 @@ from ._workspace.workspace import Workspace from ._workspace.workspace_keys import ContainerRegistryCredential, NotebookAccessKeys, WorkspaceKeys +from ._assets._artifacts.feature_set import _FeatureSet +from ._workspace.compute_runtime import _ComputeRuntime +from ._workspace.feature_store_settings import _FeatureStoreSettings +from ._feature_store_entity.feature_store_entity import _FeatureStoreEntity +from ._feature_store_entity.data_column import _DataColumn +from ._feature_store_entity.data_column_type import _DataColumnType +from ._feature_set.feature_set_specification import _FeatureSetSpecification +from ._feature_set.materialization_compute_resource import _MaterializationComputeResource +from ._feature_set.materialization_settings import _MaterializationSettings +from ._feature_set.materialization_type import _MaterializationType +from ._feature_store.feature_store import _FeatureStore +from ._feature_store.materialization_store import _MaterializationStore +from ._notification.notification import _Notification + # TODO: enable in PuP # from ._job.import_job import ImportJob # from ._component.import_component import ImportComponent @@ -272,6 +286,19 @@ "AutoScaleSettings", "AutoPauseSettings", "WorkspaceModelReference", + "_FeatureSet", + "_ComputeRuntime", + "_FeatureStoreSettings", + "_FeatureStoreEntity", + "_DataColumn", + "_DataColumnType", + "_FeatureSetSpecification", + "_MaterializationComputeResource", + "_MaterializationSettings", + "_MaterializationType", + "_FeatureStore", + "_MaterializationStore", + "_Notification", # builders "Command", "Parallel", diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_assets/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_assets/__init__.py index 783c42ff5f58..eabe32f48918 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_assets/__init__.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_assets/__init__.py @@ -9,8 +9,7 @@ from ._artifacts.code import Code from ._artifacts.data import Data from ._artifacts.model import Model -from ._artifacts.feature_set import FeatureSet from .environment import Environment from .workspace_asset_reference import WorkspaceAssetReference -__all__ = ["Artifact", "Model", "Code", "Data", "Environment", "FeatureSet", "WorkspaceAssetReference"] +__all__ = ["Artifact", "Model", "Code", "Data", "Environment", "WorkspaceAssetReference"] diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_assets/_artifacts/feature_set.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_assets/_artifacts/feature_set.py index 0d629bdf452b..a8eba268edbf 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_assets/_artifacts/feature_set.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_assets/_artifacts/feature_set.py @@ -25,24 +25,24 @@ PARAMS_OVERRIDE_KEY, ) from azure.ai.ml.entities._assets import Artifact -from azure.ai.ml.entities._feature_set.featureset_specification import FeaturesetSpecification -from azure.ai.ml.entities._feature_set.materialization_settings import MaterializationSettings +from azure.ai.ml.entities._feature_set.feature_set_specification import _FeatureSetSpecification +from azure.ai.ml.entities._feature_set.materialization_settings import _MaterializationSettings from .artifact import ArtifactStorageInfo @experimental -class FeatureSet(Artifact): +class _FeatureSet(Artifact): def __init__( self, *, name: str, version: str, entities: List[str], - specification: FeaturesetSpecification, + specification: _FeatureSetSpecification, stage: Optional[str] = None, description: Optional[str] = None, - materialization_settings: Optional[MaterializationSettings] = None, + materialization_settings: Optional[_MaterializationSettings] = None, tags: Optional[Dict] = None, properties: Optional[Dict[str, str]] = None, **kwargs, @@ -56,7 +56,7 @@ def __init__( :param entities: Specifies list of entities. :type entities: list[str] :param specification: Specifies the feature spec details. - :type specification: ~azure.ai.ml.entities.FeaturesetSpecification + :type specification: ~azure.ai.ml.entities._FeatureSetSpecification :param description: Description of the resource. :type description: str :param tags: Tag dictionary. Tags can be added, removed, and updated. @@ -64,7 +64,7 @@ def __init__( :param properties: The asset property dictionary. :type properties: dict[str, str] :param materialization_settings: Specifies the materialization settings. - :type materialization_settings: ~azure.ai.ml.entities.MaterializationSettings + :type materialization_settings: ~azure.ai.ml.entities._MaterializationSettings :param kwargs: A dictionary of additional configuration parameters. :type kwargs: dict """ @@ -98,12 +98,12 @@ def _to_rest_object(self) -> FeaturesetVersion: return FeaturesetVersion(name=self.name, properties=featureset_version_properties) @classmethod - def _from_rest_object(cls, featureset_rest_object: FeaturesetVersion) -> "FeatureSet": + def _from_rest_object(cls, featureset_rest_object: FeaturesetVersion) -> "_FeatureSet": if not featureset_rest_object: return None featureset_rest_object_details: FeaturesetVersionProperties = featureset_rest_object.properties arm_id_object = get_arm_id_object_from_id(featureset_rest_object.id) - featureset = FeatureSet( + featureset = _FeatureSet( id=featureset_rest_object.id, name=arm_id_object.asset_name, version=arm_id_object.asset_version, @@ -111,25 +111,25 @@ def _from_rest_object(cls, featureset_rest_object: FeaturesetVersion) -> "Featur tags=featureset_rest_object_details.tags, properties=featureset_rest_object_details.properties, entities=featureset_rest_object_details.entities, - materialization_settings=MaterializationSettings._from_rest_object( + materialization_settings=_MaterializationSettings._from_rest_object( featureset_rest_object_details.materialization_settings ), - specification=FeaturesetSpecification._from_rest_object(featureset_rest_object_details.specification), + specification=_FeatureSetSpecification._from_rest_object(featureset_rest_object_details.specification), stage=featureset_rest_object_details.stage, ) return featureset @classmethod - def _from_container_rest_object(cls, rest_obj: FeaturesetContainer) -> "FeatureSet": + def _from_container_rest_object(cls, rest_obj: FeaturesetContainer) -> "_FeatureSet": rest_object_details: FeaturesetContainerProperties = rest_obj.properties arm_id_object = get_arm_id_object_from_id(rest_obj.id) - featureset = FeatureSet( + featureset = _FeatureSet( name=arm_id_object.asset_name, description=rest_object_details.description, tags=rest_object_details.tags, properties=rest_object_details.properties, entities=[], - specification=FeaturesetSpecification(), + specification=_FeatureSetSpecification(), version="", ) featureset.latest_version = rest_object_details.latest_version @@ -142,7 +142,7 @@ def _load( yaml_path: Optional[Union[PathLike, str]] = None, params_override: Optional[list] = None, **kwargs, - ) -> "FeatureSet": + ) -> "_FeatureSet": data = data or {} params_override = params_override or [] context = { @@ -150,7 +150,7 @@ def _load( PARAMS_OVERRIDE_KEY: params_override, } loaded_schema = load_from_dict(FeatureSetSchema, data, context, **kwargs) - return FeatureSet(**loaded_schema) + return _FeatureSet(**loaded_schema) def _to_dict(self) -> Dict: # pylint: disable=no-member diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/feature.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/feature.py index e347858c7ed4..1aea8c9ac2dd 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/feature.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/feature.py @@ -5,11 +5,11 @@ # pylint: disable=unused-argument from typing import Optional -from azure.ai.ml.entities._feature_store_entity.data_column_type import DataColumnType +from azure.ai.ml.entities._feature_store_entity.data_column_type import _DataColumnType -class Feature(object): - def __init__(self, *, name: str, data_type: DataColumnType, description: Optional[str], **kwargs): +class _Feature(object): + def __init__(self, *, name: str, data_type: _DataColumnType, description: Optional[str], **kwargs): self.name = name self.type = data_type self.description = description diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/featureset_specification.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/feature_set_specification.py similarity index 86% rename from sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/featureset_specification.py rename to sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/feature_set_specification.py index 517906f18c70..97737c45d99d 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/featureset_specification.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/feature_set_specification.py @@ -10,7 +10,7 @@ @experimental -class FeaturesetSpecification(RestTranslatableMixin): +class _FeatureSetSpecification(RestTranslatableMixin): def __init__(self, *, path: Optional[str] = None, **kwargs): # pylint: disable=unused-argument """ :param path: Specifies the spec path. @@ -22,7 +22,7 @@ def _to_rest_object(self) -> RestFeaturesetSpecification: return RestFeaturesetSpecification(path=self.path) @classmethod - def _from_rest_object(cls, obj: RestFeaturesetSpecification) -> "FeaturesetSpecification": + def _from_rest_object(cls, obj: RestFeaturesetSpecification) -> "_FeatureSetSpecification": if not obj: return None - return FeaturesetSpecification(path=obj.path) + return _FeatureSetSpecification(path=obj.path) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/featureset_spec.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/featureset_spec.py index 9154c9c5b766..298c8dd890fc 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/featureset_spec.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/featureset_spec.py @@ -13,9 +13,9 @@ from azure.ai.ml._utils.utils import load_yaml from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY from azure.ai.ml.entities._util import load_from_dict -from azure.ai.ml.entities._feature_store_entity.data_column import DataColumn +from azure.ai.ml.entities._feature_store_entity.data_column import _DataColumn -from .feature import Feature +from .feature import _Feature from .source_metadata import SourceMetadata from .delay_metadata import DelayMetadata from .feature_transformation_code import FeatureTransformationCode @@ -29,8 +29,8 @@ def __init__( *, source: SourceMetadata, feature_transformation_code: Optional[FeatureTransformationCode] = None, - features: List[Feature], - index_columns: List[DataColumn], + features: List[_Feature], + index_columns: List[_DataColumn], source_lookback: Optional[DelayMetadata] = None, temporal_join_lookback: Optional[DelayMetadata] = None, **_kwargs, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_compute_resource.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_compute_resource.py index e2c60e2e521d..56b3207d66aa 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_compute_resource.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_compute_resource.py @@ -10,7 +10,7 @@ @experimental -class MaterializationComputeResource(RestTranslatableMixin): +class _MaterializationComputeResource(RestTranslatableMixin): def __init__(self, *, instance_type: str, **kwargs): # pylint: disable=unused-argument """ :keyword instance_type: Specifies the instance type. @@ -22,7 +22,7 @@ def _to_rest_object(self) -> RestMaterializationComputeResource: return RestMaterializationComputeResource(instance_type=self.instance_type) @classmethod - def _from_rest_object(cls, obj: RestMaterializationComputeResource) -> "MaterializationComputeResource": + def _from_rest_object(cls, obj: RestMaterializationComputeResource) -> "_MaterializationComputeResource": if not obj: return None - return MaterializationComputeResource(instance_type=obj.instance_type) + return _MaterializationComputeResource(instance_type=obj.instance_type) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_settings.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_settings.py index af1e65ee52bc..a7f30d62df0a 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_settings.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_settings.py @@ -10,26 +10,26 @@ ) from azure.ai.ml.entities._mixins import RestTranslatableMixin from azure.ai.ml.entities._schedule.trigger import RecurrenceTrigger -from azure.ai.ml.entities._notification.notification import Notification -from azure.ai.ml.entities._feature_set.materialization_compute_resource import MaterializationComputeResource +from azure.ai.ml.entities._notification.notification import _Notification +from azure.ai.ml.entities._feature_set.materialization_compute_resource import _MaterializationComputeResource from azure.ai.ml._utils._experimental import experimental @experimental -class MaterializationSettings(RestTranslatableMixin): +class _MaterializationSettings(RestTranslatableMixin): def __init__( self, *, schedule: RecurrenceTrigger, offline_enabled: Optional[bool] = None, online_enabled: Optional[bool] = None, - notification: Optional[Notification] = None, - resource: Optional[MaterializationComputeResource] = None, + notification: Optional[_Notification] = None, + resource: Optional[_MaterializationComputeResource] = None, spark_configuration: Optional[Dict[str, str]] = None, **kwargs # pylint: disable=unused-argument ): - """MaterializationSettings. + """_MaterializationSettings. :param schedule: Specifies the schedule details. :type schedule: ~azure.ai.ml.entities.RecurrenceTrigger @@ -38,9 +38,9 @@ def __init__( :param online_enabled: Specifies if online store is enabled. :type online_enabled: bool :param notification: Specifies the notification details. - :type notification: ~azure.ai.ml.entities.Notification + :type notification: ~azure.ai.ml.entities._Notification :param resource: Specifies the compute resource settings. - :type resource: ~azure.ai.ml.entities.MaterializationComputeResource + :type resource: ~azure.ai.ml.entities._MaterializationComputeResource :param spark_configuration: Specifies the spark compute settings. :type spark_configuration: dict[str, str] """ @@ -74,13 +74,15 @@ def _to_rest_object(self) -> RestMaterializationSettings: ) @classmethod - def _from_rest_object(cls, obj: RestMaterializationSettings) -> "MaterializationSettings": + def _from_rest_object(cls, obj: RestMaterializationSettings) -> "_MaterializationSettings": if not obj: return None - return MaterializationSettings( + return _MaterializationSettings( schedule=RecurrenceTrigger._from_rest_object(obj.schedule), # pylint: disable=protected-access - notification=Notification._from_rest_object(obj.notification), # pylint: disable=protected-access - resource=MaterializationComputeResource._from_rest_object(obj.resource), # pylint: disable=protected-access + notification=_Notification._from_rest_object(obj.notification), # pylint: disable=protected-access + resource=_MaterializationComputeResource._from_rest_object( # pylint: disable=protected-access + obj.resource + ), spark_configuration=obj.spark_configuration, offline_enabled=obj.store_type == MaterializationStoreType.OFFLINE, online_enabled=obj.store_type == MaterializationStoreType.ONLINE, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_type.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_type.py index 2c9d545c6088..dc6c341d2a60 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_type.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_set/materialization_type.py @@ -8,6 +8,6 @@ @experimental -class MaterializationType(Enum): +class _MaterializationType(Enum): RecurrentMaterialization = 1 BackfillMaterialization = 2 diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store/feature_store.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store/feature_store.py index 3089bcd80e66..6eff9bbedd36 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store/feature_store.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store/feature_store.py @@ -13,26 +13,26 @@ from azure.ai.ml._restclient.v2022_12_01_preview.models import Workspace as RestWorkspace from azure.ai.ml._schema._feature_store.feature_store_schema import FeatureStoreSchema -from azure.ai.ml.entities._workspace.feature_store_settings import FeatureStoreSettings -from azure.ai.ml.entities._workspace.compute_runtime import ComputeRuntime +from azure.ai.ml.entities._workspace.feature_store_settings import _FeatureStoreSettings +from azure.ai.ml.entities._workspace.compute_runtime import _ComputeRuntime from azure.ai.ml.entities import Workspace, CustomerManagedKey from azure.ai.ml.entities._util import load_from_dict from azure.ai.ml.entities._credentials import IdentityConfiguration, ManagedIdentityConfiguration from azure.ai.ml._utils._experimental import experimental from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY -from .materialization_store import MaterializationStore +from .materialization_store import _MaterializationStore from ._constants import OFFLINE_STORE_CONNECTION_NAME, DEFAULT_SPARK_RUNTIME_VERSION, FEATURE_STORE_KIND @experimental -class FeatureStore(Workspace): +class _FeatureStore(Workspace): def __init__( self, *, name: str, - compute_runtime: Optional[ComputeRuntime] = None, - offline_store: Optional[MaterializationStore] = None, + compute_runtime: Optional[_ComputeRuntime] = None, + offline_store: Optional[_MaterializationStore] = None, materialization_identity: Optional[ManagedIdentityConfiguration] = None, description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, @@ -57,10 +57,10 @@ def __init__( :param name: Name of the feature store. :type name: str :param compute_runtime: Compute runtime of the feature store. - :type compute_runtime: ~azure.ai.ml.entities.ComputeRuntime + :type compute_runtime: ~azure.ai.ml.entities._ComputeRuntime :param offline_store: Offline store for feature store. materialization_identity is required when offline_store is passed. - :type offline_store: ~azure.ai.ml.entities.MaterializationStore + :type offline_store: ~azure.ai.ml.entities._MaterializationStore :param materialization_identity: Identity used for materialization. :type materialization_identity: ~azure.ai.ml.entities.ManagedIdentityConfiguration :param description: Description of the feature store. @@ -109,10 +109,10 @@ def __init__( if offline_store and not materialization_identity: raise ValidationError("materialization_identity is required to setup offline store") - feature_store_settings = FeatureStoreSettings( + feature_store_settings = _FeatureStoreSettings( compute_runtime=compute_runtime if compute_runtime - else ComputeRuntime(spark_runtime_version=DEFAULT_SPARK_RUNTIME_VERSION), + else _ComputeRuntime(spark_runtime_version=DEFAULT_SPARK_RUNTIME_VERSION), offline_store_connection_name=( OFFLINE_STORE_CONNECTION_NAME if materialization_identity and offline_store else None ), @@ -143,17 +143,17 @@ def __init__( self.identity = identity @classmethod - def _from_rest_object(cls, rest_obj: RestWorkspace) -> "FeatureStore": + def _from_rest_object(cls, rest_obj: RestWorkspace) -> "_FeatureStore": if not rest_obj: return None workspace_object = Workspace._from_rest_object(rest_obj) - return FeatureStore( + return _FeatureStore( name=workspace_object.name, description=workspace_object.description, tags=workspace_object.tags, - compute_runtime=ComputeRuntime._from_rest_object( + compute_runtime=_ComputeRuntime._from_rest_object( workspace_object._feature_store_settings.compute_runtime if workspace_object._feature_store_settings else None @@ -180,7 +180,7 @@ def _load( yaml_path: Optional[Union[PathLike, str]] = None, params_override: Optional[list] = None, **kwargs, - ) -> "FeatureStore": + ) -> "_FeatureStore": data = data or {} params_override = params_override or [] context = { @@ -188,7 +188,7 @@ def _load( PARAMS_OVERRIDE_KEY: params_override, } loaded_schema = load_from_dict(FeatureStoreSchema, data, context, **kwargs) - return FeatureStore(**loaded_schema) + return _FeatureStore(**loaded_schema) def _to_dict(self) -> Dict: # pylint: disable=no-member diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store/materialization_store.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store/materialization_store.py index dee538bd3541..30f3a3a94fdc 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store/materialization_store.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store/materialization_store.py @@ -6,10 +6,10 @@ @experimental -class MaterializationStore: +class _MaterializationStore: def __init__(self, type: str, target: str): # pylint: disable=redefined-builtin - """MaterializationStore. + """_MaterializationStore. :param type: store type. :type type: str :param target: store target. diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/data_column.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/data_column.py index f202e10cce88..8ca47bd2b3b4 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/data_column.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/data_column.py @@ -11,41 +11,41 @@ from azure.ai.ml.entities._mixins import RestTranslatableMixin from azure.ai.ml._utils._experimental import experimental -from .data_column_type import DataColumnType - -DataColumnTypeMap: Dict[DataColumnType, FeatureDataType] = { - DataColumnType.string: FeatureDataType.STRING, - DataColumnType.integer: FeatureDataType.INTEGER, - DataColumnType.long: FeatureDataType.LONG, - DataColumnType.float: FeatureDataType.FLOAT, - DataColumnType.double: FeatureDataType.DOUBLE, - DataColumnType.binary: FeatureDataType.BINARY, - DataColumnType.datetime: FeatureDataType.DATETIME, - DataColumnType.boolean: FeatureDataType.BOOLEAN, +from .data_column_type import _DataColumnType + +DataColumnTypeMap: Dict[_DataColumnType, FeatureDataType] = { + _DataColumnType.string: FeatureDataType.STRING, + _DataColumnType.integer: FeatureDataType.INTEGER, + _DataColumnType.long: FeatureDataType.LONG, + _DataColumnType.float: FeatureDataType.FLOAT, + _DataColumnType.double: FeatureDataType.DOUBLE, + _DataColumnType.binary: FeatureDataType.BINARY, + _DataColumnType.datetime: FeatureDataType.DATETIME, + _DataColumnType.boolean: FeatureDataType.BOOLEAN, } -FeatureDataTypeMap: Dict[str, DataColumnType] = { - "String": DataColumnType.string, - "Integer": DataColumnType.integer, - "Long": DataColumnType.long, - "Float": DataColumnType.float, - "Double": DataColumnType.double, - "Binary": DataColumnType.binary, - "Datetime": DataColumnType.datetime, - "Boolean": DataColumnType.boolean, +FeatureDataTypeMap: Dict[str, _DataColumnType] = { + "String": _DataColumnType.string, + "Integer": _DataColumnType.integer, + "Long": _DataColumnType.long, + "Float": _DataColumnType.float, + "Double": _DataColumnType.double, + "Binary": _DataColumnType.binary, + "Datetime": _DataColumnType.datetime, + "Boolean": _DataColumnType.boolean, } @experimental -class DataColumn(RestTranslatableMixin): +class _DataColumn(RestTranslatableMixin): """A dataframe column :param name: The column name :type name: str, required :param type: Column data type :type type: str, one of [string, integer, long, float, double, binary, datetime, boolean] or - ~azure.ai.ml.entities.DataColumnType, optional""" + ~azure.ai.ml.entities._DataColumnType, optional""" - def __init__(self, *, name: str, type: DataColumnType = None, **kwargs): + def __init__(self, *, name: str, type: _DataColumnType = None, **kwargs): self.name = name self.type = type @@ -53,5 +53,5 @@ def _to_rest_object(self) -> IndexColumn: return IndexColumn(column_name=self.name, data_type=DataColumnTypeMap.get(self.type, None)) @classmethod - def _from_rest_object(cls, obj: IndexColumn) -> "DataColumn": - return DataColumn(name=obj.column_name, type=FeatureDataTypeMap.get(obj.data_type, None)) + def _from_rest_object(cls, obj: IndexColumn) -> "_DataColumn": + return _DataColumn(name=obj.column_name, type=FeatureDataTypeMap.get(obj.data_type, None)) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/data_column_type.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/data_column_type.py index 45feabaedd27..ddbb0d24ffd0 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/data_column_type.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/data_column_type.py @@ -8,7 +8,7 @@ @experimental -class DataColumnType(Enum): +class _DataColumnType(Enum): string = 1 integer = 2 long = 3 diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/feature_store_entity.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/feature_store_entity.py index e3e7d4e61164..2d21ba471088 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/feature_store_entity.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_feature_store_entity/feature_store_entity.py @@ -22,30 +22,30 @@ from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY from azure.ai.ml.entities._assets.asset import Asset -from .data_column import DataColumn +from .data_column import _DataColumn @experimental -class FeatureStoreEntity(Asset): +class _FeatureStoreEntity(Asset): def __init__( self, *, name: str, version: str, - index_columns: List[DataColumn], + index_columns: List[_DataColumn], description: Optional[str] = None, tags: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, **kwargs, ): - """FeatureStoreEntity + """_FeatureStoreEntity :param name: Name of the resource. :type name: str :param version: Version of the resource. :type version: str :param index_columns: Specifies index columns. - :type index_columns: list[~azure.ai.ml.entities.DataColumn] + :type index_columns: list[~azure.ai.ml.entities._DataColumn] :param description: Description of the resource. :type description: str :param tags: Tag dictionary. Tags can be added, removed, and updated. @@ -77,13 +77,13 @@ def _to_rest_object(self) -> FeaturestoreEntityVersion: return FeaturestoreEntityVersion(properties=feature_store_entity_version_properties) @classmethod - def _from_rest_object(cls, rest_obj: FeaturestoreEntityVersion) -> "FeatureStoreEntity": + def _from_rest_object(cls, rest_obj: FeaturestoreEntityVersion) -> "_FeatureStoreEntity": rest_object_details: FeaturestoreEntityVersionProperties = rest_obj.properties arm_id_object = get_arm_id_object_from_id(rest_obj.id) - featurestoreEntity = FeatureStoreEntity( + featurestoreEntity = _FeatureStoreEntity( name=arm_id_object.asset_name, version=arm_id_object.asset_version, - index_columns=[DataColumn._from_rest_object(column) for column in rest_object_details.index_columns], + index_columns=[_DataColumn._from_rest_object(column) for column in rest_object_details.index_columns], description=rest_object_details.description, tags=rest_object_details.tags, properties=rest_object_details.properties, @@ -91,10 +91,10 @@ def _from_rest_object(cls, rest_obj: FeaturestoreEntityVersion) -> "FeatureStore return featurestoreEntity @classmethod - def _from_container_rest_object(cls, rest_obj: FeaturestoreEntityContainer) -> "FeatureStoreEntity": + def _from_container_rest_object(cls, rest_obj: FeaturestoreEntityContainer) -> "_FeatureStoreEntity": rest_object_details: FeaturestoreEntityContainerProperties = rest_obj.properties arm_id_object = get_arm_id_object_from_id(rest_obj.id) - featurestoreEntity = FeatureStoreEntity( + featurestoreEntity = _FeatureStoreEntity( name=arm_id_object.asset_name, description=rest_object_details.description, tags=rest_object_details.tags, @@ -112,7 +112,7 @@ def _load( yaml_path: Optional[Union[PathLike, str]] = None, params_override: Optional[list] = None, **kwargs, - ) -> "FeatureStoreEntity": + ) -> "_FeatureStoreEntity": data = data or {} params_override = params_override or [] context = { @@ -120,7 +120,7 @@ def _load( PARAMS_OVERRIDE_KEY: params_override, } loaded_schema = load_from_dict(FeatureStoreEntitySchema, data, context, **kwargs) - return FeatureStoreEntity(**loaded_schema) + return _FeatureStoreEntity(**loaded_schema) def _to_dict(self) -> Dict: # pylint: disable=no-member diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_load_functions.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_load_functions.py index db9494c5fb0b..c7818241097c 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_load_functions.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_load_functions.py @@ -13,7 +13,7 @@ from azure.ai.ml.entities._assets._artifacts.code import Code from azure.ai.ml.entities._assets._artifacts.data import Data from azure.ai.ml.entities._assets._artifacts.model import Model -from azure.ai.ml.entities._assets._artifacts.feature_set import FeatureSet +from azure.ai.ml.entities._assets._artifacts.feature_set import _FeatureSet from azure.ai.ml.entities._assets.environment import Environment from azure.ai.ml.entities._component.command_component import CommandComponent from azure.ai.ml.entities._component.component import Component @@ -26,8 +26,8 @@ from azure.ai.ml.entities._deployment.online_deployment import OnlineDeployment from azure.ai.ml.entities._endpoint.batch_endpoint import BatchEndpoint from azure.ai.ml.entities._endpoint.online_endpoint import OnlineEndpoint -from azure.ai.ml.entities._feature_store.feature_store import FeatureStore -from azure.ai.ml.entities._feature_store_entity.feature_store_entity import FeatureStoreEntity +from azure.ai.ml.entities._feature_store.feature_store import _FeatureStore +from azure.ai.ml.entities._feature_store_entity.feature_store_entity import _FeatureStoreEntity from azure.ai.ml.entities._job.job import Job from azure.ai.ml.entities._registry.registry import Registry from azure.ai.ml.entities._resource import Resource @@ -672,7 +672,7 @@ def _load_feature_store( *, relative_origin: Optional[str] = None, **kwargs, -) -> FeatureStore: +) -> _FeatureStore: """Load a feature store object from a yaml file. :param source: The local yaml source of a feature store. Must be either a path to a local file, or an already-open file. @@ -690,9 +690,9 @@ def _load_feature_store( Format is [{"field1": "value1"}, {"field2": "value2"}] :type params_override: List[Dict] :return: Loaded feature store object. - :rtype: FeatureStore + :rtype: _FeatureStore """ - return load_common(FeatureStore, source, relative_origin, **kwargs) + return load_common(_FeatureStore, source, relative_origin, **kwargs) def _load_feature_set( @@ -700,7 +700,7 @@ def _load_feature_set( *, relative_origin: Optional[str] = None, **kwargs, -) -> FeatureSet: +) -> _FeatureSet: """Construct a FeatureSet object from yaml file. :param source: The local yaml source of a FeatureSet object. Must be either a @@ -721,9 +721,9 @@ def _load_feature_set( :raises ~azure.ai.ml.exceptions.ValidationException: Raised if FeatureSet cannot be successfully validated. Details will be provided in the error message. :return: Constructed FeatureSet object. - :rtype: FeatureSet + :rtype: _FeatureSet """ - return load_common(FeatureSet, source, relative_origin, **kwargs) + return load_common(_FeatureSet, source, relative_origin, **kwargs) def _load_feature_store_entity( @@ -731,7 +731,7 @@ def _load_feature_store_entity( *, relative_origin: Optional[str] = None, **kwargs, -) -> FeatureStoreEntity: +) -> _FeatureStoreEntity: """Construct a FeatureStoreEntity object from yaml file. :param source: The local yaml source of a FeatureStoreEntity object. Must be either a @@ -752,6 +752,6 @@ def _load_feature_store_entity( :raises ~azure.ai.ml.exceptions.ValidationException: Raised if FeatureStoreEntity cannot be successfully validated. Details will be provided in the error message. :return: Constructed FeatureStoreEntity object. - :rtype: FeatureStoreEntity + :rtype: _FeatureStoreEntity """ - return load_common(FeatureStoreEntity, source, relative_origin, **kwargs) + return load_common(_FeatureStoreEntity, source, relative_origin, **kwargs) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_notification/notification.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_notification/notification.py index 22959ddd2c46..1489ef81af6b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_notification/notification.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_notification/notification.py @@ -11,7 +11,7 @@ @experimental -class Notification(RestTranslatableMixin): +class _Notification(RestTranslatableMixin): """Configuration for notification.""" def __init__(self, *, email_on: Optional[List[str]] = None, emails: Optional[List[str]] = None): @@ -29,7 +29,7 @@ def _to_rest_object(self) -> RestNotificationSetting: return RestNotificationSetting(email_on=self.email_on, emails=self.emails) @classmethod - def _from_rest_object(cls, obj: RestNotificationSetting) -> "Notification": + def _from_rest_object(cls, obj: RestNotificationSetting) -> "_Notification": if not obj: return None - return Notification(email_on=obj.email_on, emails=obj.emails) + return _Notification(email_on=obj.email_on, emails=obj.emails) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/compute_runtime.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/compute_runtime.py index 536ab69dcb81..6aefb0bda845 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/compute_runtime.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/compute_runtime.py @@ -10,7 +10,7 @@ @experimental -class ComputeRuntime(RestTranslatableMixin): +class _ComputeRuntime(RestTranslatableMixin): def __init__( self, *, @@ -26,7 +26,7 @@ def _to_rest_object(self) -> RestComputeRuntimeDto: return RestComputeRuntimeDto(spark_runtime_version=self.spark_runtime_version) @classmethod - def _from_rest_object(cls, obj: RestComputeRuntimeDto) -> "ComputeRuntime": + def _from_rest_object(cls, obj: RestComputeRuntimeDto) -> "_ComputeRuntime": if not obj: return None - return ComputeRuntime(spark_runtime_version=obj.spark_runtime_version) + return _ComputeRuntime(spark_runtime_version=obj.spark_runtime_version) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/feature_store_settings.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/feature_store_settings.py index 6d2011c9ec84..535c5eeefd26 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/feature_store_settings.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/feature_store_settings.py @@ -9,13 +9,13 @@ from azure.ai.ml._restclient.v2022_12_01_preview.models import FeatureStoreSettings as RestFeatureStoreSettings from azure.ai.ml.entities._mixins import RestTranslatableMixin from azure.ai.ml._utils._experimental import experimental -from .compute_runtime import ComputeRuntime +from .compute_runtime import _ComputeRuntime @experimental -class FeatureStoreSettings(RestTranslatableMixin): +class _FeatureStoreSettings(RestTranslatableMixin): def __init__( - self, *, compute_runtime: Optional[ComputeRuntime] = None, offline_store_connection_name: Optional[str] = None + self, *, compute_runtime: Optional[_ComputeRuntime] = None, offline_store_connection_name: Optional[str] = None ): """ :keyword compute_runtime: @@ -23,21 +23,21 @@ def __init__( :keyword offline_store_connection_name: :paramtype offline_store_connection_name: str """ - self.compute_runtime = compute_runtime if compute_runtime else ComputeRuntime(spark_runtime_version="3.1.0") + self.compute_runtime = compute_runtime if compute_runtime else _ComputeRuntime(spark_runtime_version="3.1.0") self.offline_store_connection_name = offline_store_connection_name def _to_rest_object(self) -> RestFeatureStoreSettings: return RestFeatureStoreSettings( - compute_runtime=ComputeRuntime._to_rest_object(self.compute_runtime), + compute_runtime=_ComputeRuntime._to_rest_object(self.compute_runtime), offline_store_connection_name=self.offline_store_connection_name, online_store_connection_name=None, ) @classmethod - def _from_rest_object(cls, obj: RestFeatureStoreSettings) -> "FeatureStoreSettings": + def _from_rest_object(cls, obj: RestFeatureStoreSettings) -> "_FeatureStoreSettings": if not obj: return None - return FeatureStoreSettings( - compute_runtime=ComputeRuntime._from_rest_object(obj.compute_runtime), + return _FeatureStoreSettings( + compute_runtime=_ComputeRuntime._from_rest_object(obj.compute_runtime), offline_store_connection_name=obj.offline_store_connection_name, ) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/workspace.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/workspace.py index 651b8820429e..81cdc978343c 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/workspace.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_workspace/workspace.py @@ -9,17 +9,18 @@ from typing import IO, AnyStr, Dict, Optional, Union from azure.ai.ml._restclient.v2022_12_01_preview.models import ManagedServiceIdentity as RestManagedServiceIdentity +from azure.ai.ml._restclient.v2022_12_01_preview.models import FeatureStoreSettings as RestFeatureStoreSettings from azure.ai.ml._restclient.v2022_12_01_preview.models import Workspace as RestWorkspace from azure.ai.ml._restclient.v2022_12_01_preview.models import ManagedNetworkSettings as RestManagedNetwork from azure.ai.ml._schema.workspace.workspace import WorkspaceSchema -from azure.ai.ml._utils.utils import dump_yaml_to_file +from azure.ai.ml._utils.utils import dump_yaml_to_file, is_private_preview_enabled from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY, WorkspaceResourceConstants from azure.ai.ml.entities._credentials import IdentityConfiguration from azure.ai.ml.entities._resource import Resource from azure.ai.ml.entities._util import load_from_dict from .customer_managed_key import CustomerManagedKey -from .feature_store_settings import FeatureStoreSettings +from .feature_store_settings import _FeatureStoreSettings from .networking import ManagedNetwork @@ -98,7 +99,7 @@ def __init__( self._discovery_url = kwargs.pop("discovery_url", None) self._mlflow_tracking_uri = kwargs.pop("mlflow_tracking_uri", None) self._kind = kwargs.pop("kind", "default") - self._feature_store_settings: Optional[FeatureStoreSettings] = kwargs.pop("feature_store_settings", None) + self._feature_store_settings: Optional[_FeatureStoreSettings] = kwargs.pop("feature_store_settings", None) super().__init__(name=name, description=description, tags=tags, **kwargs) self.display_name = display_name @@ -204,6 +205,15 @@ def _from_rest_object(cls, rest_obj: RestWorkspace) -> "Workspace": identity = IdentityConfiguration._from_workspace_rest_object( # pylint: disable=protected-access rest_obj.identity ) + feature_store_settings = None + if ( + is_private_preview_enabled() + and rest_obj.feature_store_settings + and isinstance(rest_obj.feature_store_settings, RestFeatureStoreSettings) + ): + feature_store_settings = _FeatureStoreSettings._from_rest_object( # pylint: disable=protected-access + rest_obj.feature_store_settings + ) return Workspace( name=rest_obj.name, id=rest_obj.id, @@ -226,9 +236,13 @@ def _from_rest_object(cls, rest_obj: RestWorkspace) -> "Workspace": identity=identity, primary_user_assigned_identity=rest_obj.primary_user_assigned_identity, managed_network=managed_network, + feature_store_settings=feature_store_settings, ) def _to_rest_object(self) -> RestWorkspace: + feature_store_Settings = None + if is_private_preview_enabled() and self._feature_store_settings: + feature_store_Settings = self._feature_store_settings._to_rest_object() # pylint: disable=protected-access return RestWorkspace( identity=self.identity._to_workspace_rest_object() # pylint: disable=protected-access @@ -251,4 +265,5 @@ def _to_rest_object(self) -> RestWorkspace: managed_network=self.managed_network._to_rest_object() # pylint: disable=protected-access if self.managed_network else None, # pylint: disable=protected-access + feature_store_Settings=feature_store_Settings, ) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py index 36e7e8a392eb..10ea66b496f7 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py @@ -27,9 +27,8 @@ ) from azure.ai.ml._utils._feature_set_utils import read_feature_set_metadata_contents from azure.ai.ml._utils._logger_utils import OpsLogger -from azure.ai.ml.entities._assets import FeatureSet +from azure.ai.ml.entities._assets._artifacts.feature_set import _FeatureSet from azure.ai.ml.entities._feature_set.featureset_spec import FeaturesetSpec -from azure.ai.ml._utils._experimental import experimental from azure.core.polling import LROPoller from azure.core.paging import ItemPaged @@ -37,8 +36,14 @@ module_logger = ops_logger.module_logger -@experimental class _FeatureSetOperations(_ScopeDependentOperations): + """_FeatureSetOperations. + + You should not instantiate this class directly. Instead, you should + create an MLClient instance that instantiates it for you and + attaches it as an attribute. + """ + def __init__( self, operation_scope: OperationScope, @@ -66,7 +71,7 @@ def list( *, name: Optional[str] = None, list_view_type: ListViewType = ListViewType.ACTIVE_ONLY, - ) -> ItemPaged[FeatureSet]: + ) -> ItemPaged[_FeatureSet]: """List the FeatureSet assets of the workspace. :param name: Name of a specific FeatureSet asset, optional. @@ -75,19 +80,19 @@ def list( Default: ACTIVE_ONLY. :type list_view_type: Optional[ListViewType] :return: An iterator like instance of FeatureSet objects - :rtype: ~azure.core.paging.ItemPaged[FeatureSet] + :rtype: ~azure.core.paging.ItemPaged[_FeatureSet] """ if name: return self._operation.list( workspace_name=self._workspace_name, name=name, - cls=lambda objs: [FeatureSet._from_rest_object(obj) for obj in objs], + cls=lambda objs: [_FeatureSet._from_rest_object(obj) for obj in objs], list_view_type=list_view_type, **self._scope_kwargs, ) return self._container_operation.list( workspace_name=self._workspace_name, - cls=lambda objs: [FeatureSet._from_container_rest_object(obj) for obj in objs], + cls=lambda objs: [_FeatureSet._from_container_rest_object(obj) for obj in objs], list_view_type=list_view_type, **self._scope_kwargs, ) @@ -102,7 +107,7 @@ def _get(self, name: str, version: str = None) -> FeaturesetVersion: ) # @monitor_with_activity(logger, "FeatureSet.Get", ActivityType.PUBLICAPI) - def get(self, *, name: str, version: Optional[str] = None, label: Optional[str] = None) -> FeatureSet: + def get(self, *, name: str, version: Optional[str] = None, label: Optional[str] = None) -> _FeatureSet: """Get the specified FeatureSet asset. :param name: Name of FeatureSet asset. @@ -114,7 +119,7 @@ def get(self, *, name: str, version: Optional[str] = None, label: Optional[str] :raises ~azure.ai.ml.exceptions.ValidationException: Raised if FeatureSet cannot be successfully identified and retrieved. Details will be provided in the error message. :return: FeatureSet asset object. - :rtype: ~azure.ai.ml.entities.FeatureSet + :rtype: ~azure.ai.ml.entities._FeatureSet """ try: if version and label: @@ -140,18 +145,18 @@ def get(self, *, name: str, version: Optional[str] = None, label: Optional[str] error_type=ValidationErrorType.MISSING_FIELD, ) featureset_version_resource = self._get(name, version) - return FeatureSet._from_rest_object(featureset_version_resource) + return _FeatureSet._from_rest_object(featureset_version_resource) except (ValidationException, SchemaValidationError) as ex: log_and_raise_error(ex) # @monitor_with_activity(logger, "FeatureSet.BeginCreateOrUpdate", ActivityType.PUBLICAPI) - def begin_create_or_update(self, featureset: FeatureSet) -> LROPoller[FeatureSet]: + def begin_create_or_update(self, featureset: _FeatureSet) -> LROPoller[_FeatureSet]: """Create or update FeatureSet :param featureset: FeatureSet definition. :type featureset: FeatureSet :return: An instance of LROPoller that returns a FeatureSet. - :rtype: ~azure.core.polling.LROPoller[~azure.ai.ml.entities.FeatureSet] + :rtype: ~azure.core.polling.LROPoller[~azure.ai.ml.entities._FeatureSet] """ featureset_spec = validate_and_get_feature_set_spec(featureset) @@ -163,7 +168,7 @@ def begin_create_or_update(self, featureset: FeatureSet) -> LROPoller[FeatureSet artifact=featureset, asset_operations=self, sas_uri=sas_uri, artifact_type=ErrorTarget.FEATURE_SET ) - featureset_resource = FeatureSet._to_rest_object(featureset) + featureset_resource = _FeatureSet._to_rest_object(featureset) return self._operation.begin_create_or_update( resource_group_name=self._resource_group_name, @@ -233,7 +238,7 @@ def restore( label=label, ) - def _get_latest_version(self, name: str) -> FeatureSet: + def _get_latest_version(self, name: str) -> _FeatureSet: """Returns the latest version of the asset with the given name. Latest is defined as the most recently created, not the most @@ -245,10 +250,10 @@ def _get_latest_version(self, name: str) -> FeatureSet: return self.get(name=name, version=latest_version) -def validate_and_get_feature_set_spec(featureset: FeatureSet) -> FeaturesetSpec: +def validate_and_get_feature_set_spec(featureset: _FeatureSet) -> FeaturesetSpec: # pylint: disable=no-member - if not featureset.specification and not featureset.specification.path: - msg = "Missing FeatureSet spec path. Path is required for featureset." + if not (featureset.specification and featureset.specification.path): + msg = "Missing FeatureSet specification path. Path is required for feature set." raise ValidationException( message=msg, no_personal_data_message=msg, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_entity_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_entity_operations.py index e1822855f2f3..aff12e9dc318 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_entity_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_entity_operations.py @@ -22,8 +22,7 @@ _resolve_label_to_asset, ) from azure.ai.ml._utils._logger_utils import OpsLogger -from azure.ai.ml.entities._feature_store_entity.feature_store_entity import FeatureStoreEntity -from azure.ai.ml._utils._experimental import experimental +from azure.ai.ml.entities._feature_store_entity.feature_store_entity import _FeatureStoreEntity from azure.core.polling import LROPoller from azure.core.paging import ItemPaged @@ -31,8 +30,14 @@ module_logger = ops_logger.module_logger -@experimental class _FeatureStoreEntityOperations(_ScopeDependentOperations): + """_FeatureStoreEntityOperations. + + You should not instantiate this class directly. Instead, you should + create an MLClient instance that instantiates it for you and + attaches it as an attribute. + """ + def __init__( self, operation_scope: OperationScope, @@ -58,7 +63,7 @@ def list( *, name: Optional[str] = None, list_view_type: ListViewType = ListViewType.ACTIVE_ONLY, - ) -> ItemPaged[FeatureStoreEntity]: + ) -> ItemPaged[_FeatureStoreEntity]: """List the FeatureStoreEntity assets of the workspace. :param name: Name of a specific FeatureStoreEntity asset, optional. @@ -67,19 +72,19 @@ def list( Default: ACTIVE_ONLY. :type list_view_type: Optional[ListViewType] :return: An iterator like instance of FeatureStoreEntity objects - :rtype: ~azure.core.paging.ItemPaged[FeatureStoreEntity] + :rtype: ~azure.core.paging.ItemPaged[_FeatureStoreEntity] """ if name: return self._operation.list( workspace_name=self._workspace_name, name=name, - cls=lambda objs: [FeatureStoreEntity._from_rest_object(obj) for obj in objs], + cls=lambda objs: [_FeatureStoreEntity._from_rest_object(obj) for obj in objs], list_view_type=list_view_type, **self._scope_kwargs, ) return self._container_operation.list( workspace_name=self._workspace_name, - cls=lambda objs: [FeatureStoreEntity._from_container_rest_object(obj) for obj in objs], + cls=lambda objs: [_FeatureStoreEntity._from_container_rest_object(obj) for obj in objs], list_view_type=list_view_type, **self._scope_kwargs, ) @@ -94,7 +99,7 @@ def _get(self, name: str, version: str = None) -> FeaturestoreEntityVersion: ) # @monitor_with_activity(logger, "FeatureStoreEntity.Get", ActivityType.PUBLICAPI) - def get(self, name: str, version: Optional[str] = None, label: Optional[str] = None) -> FeatureStoreEntity: + def get(self, name: str, version: Optional[str] = None, label: Optional[str] = None) -> _FeatureStoreEntity: """Get the specified FeatureStoreEntity asset. :param name: Name of FeatureStoreEntity asset. @@ -106,7 +111,7 @@ def get(self, name: str, version: Optional[str] = None, label: Optional[str] = N :raises ~azure.ai.ml.exceptions.ValidationException: Raised if FeatureStoreEntity cannot be successfully identified and retrieved. Details will be provided in the error message. :return: FeatureStoreEntity asset object. - :rtype: ~azure.ai.ml.entities.FeatureStoreEntity + :rtype: ~azure.ai.ml.entities._FeatureStoreEntity """ try: if version and label: @@ -132,20 +137,20 @@ def get(self, name: str, version: Optional[str] = None, label: Optional[str] = N error_type=ValidationErrorType.MISSING_FIELD, ) feature_store_entity_version_resource = self._get(name, version) - return FeatureStoreEntity._from_rest_object(feature_store_entity_version_resource) + return _FeatureStoreEntity._from_rest_object(feature_store_entity_version_resource) except (ValidationException, SchemaValidationError) as ex: log_and_raise_error(ex) # @monitor_with_activity(logger, "FeatureStoreEntity.BeginCreateOrUpdate", ActivityType.PUBLICAPI) - def begin_create_or_update(self, feature_store_entity: FeatureStoreEntity) -> LROPoller[FeatureStoreEntity]: + def begin_create_or_update(self, feature_store_entity: _FeatureStoreEntity) -> LROPoller[_FeatureStoreEntity]: """Create or update FeatureStoreEntity :param feature_store_entity: FeatureStoreEntity definition. - :type feature_store_entity: FeatureStoreEntity + :type feature_store_entity: _FeatureStoreEntity :return: An instance of LROPoller that returns a FeatureStoreEntity. - :rtype: ~azure.core.polling.LROPoller[~azure.ai.ml.entities.FeatureStoreEntity] + :rtype: ~azure.core.polling.LROPoller[~azure.ai.ml.entities._FeatureStoreEntity] """ - feature_store_entity_resource = FeatureStoreEntity._to_rest_object(feature_store_entity) + feature_store_entity_resource = _FeatureStoreEntity._to_rest_object(feature_store_entity) return self._operation.begin_create_or_update( resource_group_name=self._resource_group_name, @@ -215,7 +220,7 @@ def restore( label=label, ) - def _get_latest_version(self, name: str) -> FeatureStoreEntity: + def _get_latest_version(self, name: str) -> _FeatureStoreEntity: """Returns the latest version of the asset with the given name. Latest is defined as the most recently created, not the most diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_operations.py index bcbf25e922c2..10453a08d9d7 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_operations.py @@ -16,15 +16,14 @@ from azure.core.polling import LROPoller from azure.core.tracing.decorator import distributed_trace from azure.ai.ml._utils._logger_utils import OpsLogger -from azure.ai.ml.entities._feature_store.feature_store import FeatureStore -from azure.ai.ml.entities._workspace.feature_store_settings import FeatureStoreSettings -from azure.ai.ml.entities._feature_store.materialization_store import MaterializationStore +from azure.ai.ml.entities._feature_store.feature_store import _FeatureStore +from azure.ai.ml.entities._workspace.feature_store_settings import _FeatureStoreSettings +from azure.ai.ml.entities._feature_store.materialization_store import _MaterializationStore from azure.ai.ml.entities import ( ManagedIdentityConfiguration, IdentityConfiguration, WorkspaceConnection, ) -from azure.ai.ml._utils._experimental import experimental from azure.ai.ml.constants._common import Scope from azure.ai.ml.entities._feature_store._constants import ( OFFLINE_STORE_CONNECTION_NAME, @@ -40,7 +39,6 @@ module_logger = ops_logger.module_logger -@experimental class _FeatureStoreOperations(WorkspaceOperationsBase): """_FeatureStoreOperations. @@ -68,27 +66,27 @@ def __init__( self._workspace_connection_operation = service_client.workspace_connections # @monitor_with_activity(logger, "FeatureStore.List", ActivityType.PUBLICAPI) - def list(self, *, scope: str = Scope.RESOURCE_GROUP) -> Iterable[FeatureStore]: + def list(self, *, scope: str = Scope.RESOURCE_GROUP) -> Iterable[_FeatureStore]: """List all feature stores that the user has access to in the current resource group or subscription. :param scope: scope of the listing, "resource_group" or "subscription", defaults to "resource_group" :type scope: str, optional :return: An iterator like instance of FeatureStore objects - :rtype: ~azure.core.paging.ItemPaged[FeatureStore] + :rtype: ~azure.core.paging.ItemPaged[_FeatureStore] """ if scope == Scope.SUBSCRIPTION: return self._operation.list_by_subscription( cls=lambda objs: [ - FeatureStore._from_rest_object(filterObj) + _FeatureStore._from_rest_object(filterObj) for filterObj in filter(lambda ws: ws.kind.lower() == FEATURE_STORE_KIND, objs) ] ) return self._operation.list_by_resource_group( self._resource_group_name, cls=lambda objs: [ - FeatureStore._from_rest_object(filterObj) + _FeatureStore._from_rest_object(filterObj) for filterObj in filter(lambda ws: ws.kind.lower() == FEATURE_STORE_KIND, objs) ], ) @@ -96,20 +94,20 @@ def list(self, *, scope: str = Scope.RESOURCE_GROUP) -> Iterable[FeatureStore]: # @monitor_with_activity(logger, "FeatureStore.Get", ActivityType.PUBLICAPI) @distributed_trace # pylint: disable=arguments-renamed - def get(self, name: str, **kwargs: Dict) -> FeatureStore: + def get(self, name: str, **kwargs: Dict) -> _FeatureStore: """Get a feature store by name. :param name: Name of the feature store. :type name: str :return: The feature store with the provided name. - :rtype: FeatureStore + :rtype: _FeatureStore """ feature_store = None resource_group = kwargs.get("resource_group") or self._resource_group_name rest_workspace_obj = self._operation.get(resource_group, name) if rest_workspace_obj and rest_workspace_obj.kind and rest_workspace_obj.kind.lower() == FEATURE_STORE_KIND: - feature_store = FeatureStore._from_rest_object(rest_workspace_obj) + feature_store = _FeatureStore._from_rest_object(rest_workspace_obj) if feature_store: offline_Store_connection = None @@ -126,7 +124,7 @@ def get(self, name: str, **kwargs: Dict) -> FeatureStore: offline_Store_connection.properties and offline_Store_connection.properties.category == OFFLINE_STORE_CONNECTION_CATEGORY ): - feature_store.offline_store = MaterializationStore( + feature_store.offline_store = _MaterializationStore( type=OFFLINE_MATERIALIZATION_STORE_TYPE, target=offline_Store_connection.properties.target ) # materialization identity = identity when created through feature store operations @@ -145,10 +143,10 @@ def get(self, name: str, **kwargs: Dict) -> FeatureStore: # pylint: disable=arguments-differ def begin_create( self, - feature_store: FeatureStore, + feature_store: _FeatureStore, update_dependent_resources: bool = False, **kwargs: Dict, - ) -> LROPoller[FeatureStore]: + ) -> LROPoller[_FeatureStore]: """Create a new FeatureStore. Returns the feature store if already exists. @@ -157,7 +155,7 @@ def begin_create( :type feature store: FeatureStore :type update_dependent_resources: boolean :return: An instance of LROPoller that returns a FeatureStore. - :rtype: ~azure.core.polling.LROPoller[~azure.ai.ml.entities.FeatureStore] + :rtype: ~azure.core.polling.LROPoller[~azure.ai.ml.entities._FeatureStore] """ if feature_store.offline_store and feature_store.offline_store.type != OFFLINE_MATERIALIZATION_STORE_TYPE: raise ValidationError("offline store type should be azure_data_lake_gen2") @@ -181,11 +179,11 @@ def get_callback(): # pylint: disable=arguments-renamed def begin_update( self, - feature_store: FeatureStore, + feature_store: _FeatureStore, *, update_dependent_resources: bool = False, **kwargs: Dict, - ) -> LROPoller[FeatureStore]: + ) -> LROPoller[_FeatureStore]: """Update friendly name, description, materialization identities or tags of a feature store. :param feature store: FeatureStore resource. @@ -200,7 +198,7 @@ def begin_update( :param container_registry: Container registry resource for feature store. :type feature store: FeatureStore :return: An instance of LROPoller that returns a FeatureStore. - :rtype: ~azure.core.polling.LROPoller[~azure.ai.ml.entities.FeatureStore] + :rtype: ~azure.core.polling.LROPoller[~azure.ai.ml.entities._FeatureStore] """ resource_group = kwargs.get("resource_group") or self._resource_group_name rest_workspace_obj = self._operation.get(resource_group, feature_store.name) @@ -233,7 +231,7 @@ def begin_update( if not materialization_identity: raise ValidationError("Materialization identity is required to setup offline store connection") - feature_store_settings = FeatureStoreSettings._from_rest_object(rest_workspace_obj.feature_store_settings) + feature_store_settings = _FeatureStoreSettings._from_rest_object(rest_workspace_obj.feature_store_settings) if offline_store and materialization_identity: offline_store_connection_name = ( @@ -265,7 +263,7 @@ def begin_update( ) def deserialize_callback(rest_obj): - return FeatureStore._from_rest_object(rest_obj=rest_obj) + return _FeatureStore._from_rest_object(rest_obj=rest_obj) return super().begin_update( workspace=feature_store, diff --git a/sdk/ml/azure-ai-ml/tests/feature_set/unittests/test_feature_set_operations.py b/sdk/ml/azure-ai-ml/tests/feature_set/unittests/test_feature_set_operations.py new file mode 100644 index 000000000000..a2dfc6eb567f --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/feature_set/unittests/test_feature_set_operations.py @@ -0,0 +1,148 @@ +from typing import Iterable +from unittest.mock import Mock, patch + +import pytest +from test_utilities.constants import Test_Resource_Group, Test_Workspace_Name + +from azure.ai.ml._restclient.v2023_02_01_preview.models._models_py3 import ( + FeaturesetContainer, + FeaturesetContainerProperties, + FeaturesetVersion, + FeaturesetVersionProperties, +) +from azure.ai.ml._scope_dependent_operations import OperationConfig, OperationScope +from azure.ai.ml.entities._assets._artifacts.artifact import ArtifactStorageInfo +from azure.ai.ml.entities import _FeatureSet, _FeatureSetSpecification +from azure.ai.ml.operations import DatastoreOperations +from azure.ai.ml.operations._feature_set_operations import _FeatureSetOperations +from azure.core.paging import ItemPaged + + +@pytest.fixture +def mock_datastore_operation( + mock_workspace_scope: OperationScope, mock_operation_config: OperationConfig, mock_aml_services_2022_10_01: Mock +) -> DatastoreOperations: + yield DatastoreOperations( + operation_scope=mock_workspace_scope, + operation_config=mock_operation_config, + serviceclient_2022_10_01=mock_aml_services_2022_10_01, + ) + + +@pytest.fixture +def mock_feature_set_operations( + mock_workspace_scope: OperationScope, + mock_operation_config: OperationConfig, + mock_aml_services_2023_02_01_preview: Mock, + mock_datastore_operation: Mock, +) -> _FeatureSetOperations: + yield _FeatureSetOperations( + operation_scope=mock_workspace_scope, + operation_config=mock_operation_config, + service_client=mock_aml_services_2023_02_01_preview, + datastore_operations=mock_datastore_operation, + ) + + +# @pytest.fixture +def mock_artifact_storage(_one, _two, _three, **kwargs) -> Mock: + return ArtifactStorageInfo( + name="testFileData", + version="3", + relative_path="path", + datastore_arm_id="/subscriptions/mock/resourceGroups/mock/providers/Microsoft.MachineLearningServices/workspaces/mock/datastores/datastore_id", + container_name="containerName", + ) + + +@pytest.mark.unittest +@patch("azure.ai.ml._artifacts._artifact_utilities._upload_to_datastore", new=mock_artifact_storage) +@patch.object(_FeatureSet, "_from_rest_object", new=Mock()) +@patch.object(_FeatureSet, "_from_container_rest_object", new=Mock()) +@pytest.mark.data_experiences_test +class TestFeatureSetOperations: + def test_list(self, mock_feature_set_operations: _FeatureSetOperations) -> None: + mock_feature_set_operations._operation.list.return_value = [Mock(_FeatureSet) for _ in range(10)] + mock_feature_set_operations._container_operation.list.return_value = [Mock(_FeatureSet) for _ in range(10)] + result = mock_feature_set_operations.list() + assert isinstance(result, Iterable) + mock_feature_set_operations._container_operation.list.assert_called_once() + mock_feature_set_operations.list(name="random_name") + mock_feature_set_operations._operation.list.assert_called_once() + + def test_get_with_version(self, mock_feature_set_operations: _FeatureSetOperations) -> None: + name_only = "some_name" + version = "1" + featureset = _FeatureSet( + name=name_only, + version=version, + entities=["test_entity"], + specification=_FeatureSetSpecification(path="local/"), + ) + with patch.object(ItemPaged, "next"), patch.object(_FeatureSet, "_from_rest_object", return_value=featureset): + mock_feature_set_operations.get(name=name_only, version=version) + mock_feature_set_operations._operation.get.assert_called_once_with( + name=name_only, version=version, resource_group_name=Test_Resource_Group, workspace_name=Test_Workspace_Name + ) + + def test_get_no_version(self, mock_feature_set_operations: _FeatureSetOperations) -> None: + name = "random_name" + with pytest.raises(Exception) as ex: + mock_feature_set_operations.get(name=name) + assert "At least one required parameter is missing" in str(ex.value) + + def test_archive_version(self, mock_feature_set_operations: _FeatureSetOperations): + name = "random_name" + featureset_version = Mock(FeaturesetVersion(properties=Mock(FeaturesetVersionProperties(entities=["test"])))) + version = "1" + mock_feature_set_operations._operation.get.return_value = featureset_version + mock_feature_set_operations.archive(name=name, version=version) + mock_feature_set_operations._operation.create_or_update.assert_called_once_with( + name=name, + version=version, + workspace_name=mock_feature_set_operations._workspace_name, + body=featureset_version, + resource_group_name=mock_feature_set_operations._resource_group_name, + ) + + def test_archive_container(self, mock_feature_set_operations: _FeatureSetOperations): + name = "random_name" + featureset_container = Mock( + FeaturesetContainer(properties=Mock(FeaturesetContainerProperties(description="test"))) + ) + mock_feature_set_operations._container_operation.get.return_value = featureset_container + mock_feature_set_operations.archive(name=name) + mock_feature_set_operations._container_operation.create_or_update.assert_called_once_with( + name=name, + workspace_name=mock_feature_set_operations._workspace_name, + body=featureset_container, + resource_group_name=mock_feature_set_operations._resource_group_name, + ) + + def test_restore_version(self, mock_feature_set_operations: _FeatureSetOperations): + name = "random_name" + featureset_version = Mock(FeaturesetVersion(properties=Mock(FeaturesetVersionProperties(entities=["test"])))) + version = "1" + mock_feature_set_operations._operation.get.return_value = featureset_version + mock_feature_set_operations.restore(name=name, version=version) + mock_feature_set_operations._operation.create_or_update.assert_called_once_with( + name=name, + version=version, + workspace_name=mock_feature_set_operations._workspace_name, + body=featureset_version, + resource_group_name=mock_feature_set_operations._resource_group_name, + ) + + def test_restore_container(self, mock_feature_set_operations: _FeatureSetOperations): + name = "random_name" + featureset_container = Mock( + FeaturesetContainer(properties=Mock(FeaturesetContainerProperties(entities=["test"]))) + ) + mock_feature_set_operations._container_operation.get.return_value = featureset_container + mock_feature_set_operations.restore(name=name) + mock_feature_set_operations._container_operation.create_or_update.assert_called_once_with( + name=name, + workspace_name=mock_feature_set_operations._workspace_name, + body=featureset_container, + resource_group_name=mock_feature_set_operations._resource_group_name, + ) diff --git a/sdk/ml/azure-ai-ml/tests/feature_set/unittests/test_feature_set_schema.py b/sdk/ml/azure-ai-ml/tests/feature_set/unittests/test_feature_set_schema.py new file mode 100644 index 000000000000..60a2cd322a43 --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/feature_set/unittests/test_feature_set_schema.py @@ -0,0 +1,28 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +import yaml +import pytest + +from azure.ai.ml.entities._assets._artifacts.feature_set import _FeatureSet +from azure.ai.ml.entities._load_functions import _load_feature_set + + +@pytest.mark.unittest +@pytest.mark.data_experiences_test +class TestFeatureSetSchema: + def test_feature_set_load(self) -> None: + test_path = "./tests/test_configs/feature_set/feature_set_full.yaml" + with open(test_path, "r") as f: + target = yaml.safe_load(f) + with open(test_path, "r") as f: + featureset: _FeatureSet = _load_feature_set(source=test_path) + assert featureset.name == target["name"] + assert featureset.version == target["version"] + assert featureset.description == target["description"] + assert featureset.entities is not None + assert featureset.specification is not None + assert featureset.specification.path is not None + assert featureset.tags is not None + assert featureset.properties is not None diff --git a/sdk/ml/azure-ai-ml/tests/feature_store/unittests/test_feature_store_operations.py b/sdk/ml/azure-ai-ml/tests/feature_store/unittests/test_feature_store_operations.py new file mode 100644 index 000000000000..ee8179a0d574 --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/feature_store/unittests/test_feature_store_operations.py @@ -0,0 +1,103 @@ +from unittest.mock import DEFAULT, Mock + +import pytest +from pytest_mock import MockFixture + +from azure.ai.ml._scope_dependent_operations import OperationScope +from azure.ai.ml.entities import ( + _FeatureStore, + Workspace, +) +from azure.ai.ml.operations._feature_store_operations import _FeatureStoreOperations +from azure.core.polling import LROPoller + + +@pytest.fixture +def mock_credential() -> Mock: + yield Mock() + + +@pytest.fixture +def mock_feature_store_operation( + mock_workspace_scope: OperationScope, + mock_aml_services_2022_12_01_preview: Mock, + mock_machinelearning_client: Mock, + mock_credential: Mock, +) -> _FeatureStoreOperations: + yield _FeatureStoreOperations( + operation_scope=mock_workspace_scope, + service_client=mock_aml_services_2022_12_01_preview, + all_operations=mock_machinelearning_client._operation_container, + credentials=mock_credential, + ) + + +@pytest.mark.unittest +@pytest.mark.data_experiences_test +class TestFeatureStoreOperation: + @pytest.mark.parametrize("arg", ["resource_group", "subscription", "other_rand_str"]) + def test_list(self, arg: str, mock_feature_store_operation: _FeatureStoreOperations) -> None: + mock_feature_store_operation.list(scope=arg) + if arg == "subscription": + mock_feature_store_operation._operation.list_by_subscription.assert_called_once() + else: + mock_feature_store_operation._operation.list_by_resource_group.assert_called_once() + + def test_get(self, mock_feature_store_operation: _FeatureStoreOperations) -> None: + mock_feature_store_operation.get("random_name") + mock_feature_store_operation._operation.get.assert_called_once() + + def test_begin_create( + self, + mock_feature_store_operation: _FeatureStoreOperations, + mocker: MockFixture, + ): + mocker.patch("azure.ai.ml.operations._feature_store_operations._FeatureStoreOperations.get", return_value=None) + mocker.patch( + "azure.ai.ml.operations._feature_store_operations._FeatureStoreOperations._populate_arm_paramaters", + return_value=({}, {}, {}), + ) + mocker.patch("azure.ai.ml._arm_deployments.ArmDeploymentExecutor.deploy_resource", return_value=LROPoller) + mock_feature_store_operation.begin_create(feature_store=_FeatureStore(name="name")) + + def test_update(self, mock_feature_store_operation: _FeatureStoreOperations) -> None: + fs = _FeatureStore( + name="name", + description="description", + ) + + def outgoing_get_call(rg, name): + return Workspace(name=name, kind="featurestore")._to_rest_object() + + def outgoing_call(rg, name, params, polling, cls): + assert rg == "test_resource_group" + assert name == "name" + assert params.description == "description" + assert polling is True + assert callable(cls) + return DEFAULT + + mock_feature_store_operation._operation.get.side_effect = outgoing_get_call + mock_feature_store_operation._operation.begin_update.side_effect = outgoing_call + mock_feature_store_operation.begin_update(fs, update_dependent_resources=True) + mock_feature_store_operation._operation.begin_update.assert_called() + + def test_delete(self, mock_feature_store_operation: _FeatureStoreOperations, mocker: MockFixture) -> None: + def outgoing_call(rg, name): + return Workspace(name=name, kind="featurestore")._to_rest_object() + + mock_feature_store_operation._operation.get.side_effect = outgoing_call + mocker.patch("azure.ai.ml.operations._workspace_operations_base.delete_resource_by_arm_id", return_value=None) + mock_feature_store_operation.begin_delete("randstr", delete_dependent_resources=True) + mock_feature_store_operation._operation.begin_delete.assert_called_once() + + def test_delete_non_feature_store_kind( + self, mock_feature_store_operation: _FeatureStoreOperations, mocker: MockFixture + ) -> None: + def outgoing_call(rg, name): + return Workspace(name=name)._to_rest_object() + + mock_feature_store_operation._operation.get.side_effect = outgoing_call + mocker.patch("azure.ai.ml.operations._workspace_operations_base.delete_resource_by_arm_id", return_value=None) + with pytest.raises(Exception): + mock_feature_store_operation.begin_delete("randstr", delete_dependent_resources=True) diff --git a/sdk/ml/azure-ai-ml/tests/feature_store/unittests/test_feature_store_schema.py b/sdk/ml/azure-ai-ml/tests/feature_store/unittests/test_feature_store_schema.py new file mode 100644 index 000000000000..aeb3fb0f6949 --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/feature_store/unittests/test_feature_store_schema.py @@ -0,0 +1,27 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +import yaml +import pytest + + +from azure.ai.ml.entities._feature_store.feature_store import _FeatureStore +from azure.ai.ml.entities._load_functions import _load_feature_store + + +@pytest.mark.unittest +@pytest.mark.data_experiences_test +class TestFeatureStoreSchema: + def test_feature_store_load(self) -> None: + test_path = "./tests/test_configs/feature_store/feature_store_full.yaml" + with open(test_path, "r") as f: + target = yaml.safe_load(f) + with open(test_path, "r") as f: + feature_store: _FeatureStore = _load_feature_store(source=test_path) + assert feature_store.name == target["name"] + assert feature_store.description == target["description"] + assert feature_store.materialization_identity is not None + assert feature_store.offline_store is not None + assert feature_store.tags is not None + assert feature_store.properties is not None diff --git a/sdk/ml/azure-ai-ml/tests/feature_store_entity/unittests/test_feature_store_entity_operations.py b/sdk/ml/azure-ai-ml/tests/feature_store_entity/unittests/test_feature_store_entity_operations.py new file mode 100644 index 000000000000..7c6987e17aeb --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/feature_store_entity/unittests/test_feature_store_entity_operations.py @@ -0,0 +1,128 @@ +from typing import Iterable +from unittest.mock import Mock, patch + +import pytest +from test_utilities.constants import Test_Resource_Group, Test_Workspace_Name + +from azure.ai.ml._restclient.v2023_02_01_preview.models._models_py3 import ( + FeaturestoreEntityContainer, + FeaturestoreEntityContainerProperties, + FeaturestoreEntityVersion, + FeaturestoreEntityVersionProperties, +) +from azure.ai.ml._scope_dependent_operations import OperationConfig, OperationScope +from azure.ai.ml.entities import _FeatureStoreEntity, _DataColumn, _DataColumnType +from azure.ai.ml.operations._feature_store_entity_operations import _FeatureStoreEntityOperations +from azure.core.paging import ItemPaged + + +@pytest.fixture +def mock_feature_store_entity_operations( + mock_workspace_scope: OperationScope, + mock_operation_config: OperationConfig, + mock_aml_services_2023_02_01_preview: Mock, +) -> _FeatureStoreEntityOperations: + yield _FeatureStoreEntityOperations( + operation_scope=mock_workspace_scope, + operation_config=mock_operation_config, + service_client=mock_aml_services_2023_02_01_preview, + ) + + +@pytest.mark.unittest +@patch.object(_FeatureStoreEntity, "_from_rest_object", new=Mock()) +@patch.object(_FeatureStoreEntity, "_from_container_rest_object", new=Mock()) +@pytest.mark.data_experiences_test +class TestFeatureStoreEntityOperations: + def test_list(self, mock_feature_store_entity_operations: _FeatureStoreEntityOperations) -> None: + mock_feature_store_entity_operations._operation.list.return_value = [ + Mock(_FeatureStoreEntity) for _ in range(10) + ] + mock_feature_store_entity_operations._container_operation.list.return_value = [ + Mock(_FeatureStoreEntity) for _ in range(10) + ] + result = mock_feature_store_entity_operations.list() + assert isinstance(result, Iterable) + mock_feature_store_entity_operations._container_operation.list.assert_called_once() + mock_feature_store_entity_operations.list(name="random_name") + mock_feature_store_entity_operations._operation.list.assert_called_once() + + def test_get_with_version(self, mock_feature_store_entity_operations: _FeatureStoreEntityOperations) -> None: + name_only = "some_name" + version = "1" + featurestoreEntity = _FeatureStoreEntity( + name=name_only, version=version, index_columns=[_DataColumn(name="test", type=_DataColumnType.string)] + ) + with patch.object(ItemPaged, "next"), patch.object( + _FeatureStoreEntity, "_from_rest_object", return_value=featurestoreEntity + ): + mock_feature_store_entity_operations.get(name=name_only, version=version) + mock_feature_store_entity_operations._operation.get.assert_called_once_with( + name=name_only, version=version, resource_group_name=Test_Resource_Group, workspace_name=Test_Workspace_Name + ) + + def test_get_no_version(self, mock_feature_store_entity_operations: _FeatureStoreEntityOperations) -> None: + name = "random_name" + with pytest.raises(Exception) as ex: + mock_feature_store_entity_operations.get(name=name) + assert "At least one required parameter is missing" in str(ex.value) + + def test_archive_version(self, mock_feature_store_entity_operations: _FeatureStoreEntityOperations): + name = "random_name" + featureStoreEntity_version = Mock( + FeaturestoreEntityVersion(properties=Mock(FeaturestoreEntityVersionProperties())) + ) + version = "1" + mock_feature_store_entity_operations._operation.get.return_value = featureStoreEntity_version + mock_feature_store_entity_operations.archive(name=name, version=version) + mock_feature_store_entity_operations._operation.create_or_update.assert_called_once_with( + name=name, + version=version, + workspace_name=mock_feature_store_entity_operations._workspace_name, + body=featureStoreEntity_version, + resource_group_name=mock_feature_store_entity_operations._resource_group_name, + ) + + def test_archive_container(self, mock_feature_store_entity_operations: _FeatureStoreEntityOperations): + name = "random_name" + featureStoreEntity_container = Mock( + FeaturestoreEntityContainer(properties=Mock(FeaturestoreEntityContainerProperties(description="test"))) + ) + mock_feature_store_entity_operations._container_operation.get.return_value = featureStoreEntity_container + mock_feature_store_entity_operations.archive(name=name) + mock_feature_store_entity_operations._container_operation.create_or_update.assert_called_once_with( + name=name, + workspace_name=mock_feature_store_entity_operations._workspace_name, + body=featureStoreEntity_container, + resource_group_name=mock_feature_store_entity_operations._resource_group_name, + ) + + def test_restore_version(self, mock_feature_store_entity_operations: _FeatureStoreEntityOperations): + name = "random_name" + featureStoreEntity_version = Mock( + FeaturestoreEntityVersion(properties=Mock(FeaturestoreEntityVersionProperties())) + ) + version = "1" + mock_feature_store_entity_operations._operation.get.return_value = featureStoreEntity_version + mock_feature_store_entity_operations.restore(name=name, version=version) + mock_feature_store_entity_operations._operation.create_or_update.assert_called_once_with( + name=name, + version=version, + workspace_name=mock_feature_store_entity_operations._workspace_name, + body=featureStoreEntity_version, + resource_group_name=mock_feature_store_entity_operations._resource_group_name, + ) + + def test_restore_container(self, mock_feature_store_entity_operations: _FeatureStoreEntityOperations): + name = "random_name" + featureStoreEntity_container = Mock( + FeaturestoreEntityContainer(properties=Mock(FeaturestoreEntityContainerProperties())) + ) + mock_feature_store_entity_operations._container_operation.get.return_value = featureStoreEntity_container + mock_feature_store_entity_operations.restore(name=name) + mock_feature_store_entity_operations._container_operation.create_or_update.assert_called_once_with( + name=name, + workspace_name=mock_feature_store_entity_operations._workspace_name, + body=featureStoreEntity_container, + resource_group_name=mock_feature_store_entity_operations._resource_group_name, + ) diff --git a/sdk/ml/azure-ai-ml/tests/feature_store_entity/unittests/test_feature_store_entity_schema.py b/sdk/ml/azure-ai-ml/tests/feature_store_entity/unittests/test_feature_store_entity_schema.py new file mode 100644 index 000000000000..45e1555d2a10 --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/feature_store_entity/unittests/test_feature_store_entity_schema.py @@ -0,0 +1,27 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +import yaml +import pytest + +from azure.ai.ml.entities._feature_store_entity.feature_store_entity import _FeatureStoreEntity + +from azure.ai.ml.entities._load_functions import _load_feature_store_entity + + +@pytest.mark.unittest +@pytest.mark.data_experiences_test +class TestFeatureStoreEntitySchema: + def test_feature_store_entity_load(self) -> None: + test_path = "./tests/test_configs/feature_store_entity/feature_store_entity_full.yaml" + with open(test_path, "r") as f: + target = yaml.safe_load(f) + with open(test_path, "r") as f: + feature_store_entity: _FeatureStoreEntity = _load_feature_store_entity(source=test_path) + assert feature_store_entity.name == target["name"] + assert feature_store_entity.version == target["version"] + assert feature_store_entity.description == target["description"] + assert feature_store_entity.index_columns is not None + assert feature_store_entity.tags is not None + assert feature_store_entity.properties is not None