Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ class BaseNodeSchema(PathAwareSchema):
values=UnionField([OutputBindingStr, NestedField(OutputSchema)], allow_none=True),
)
properties = fields.Dict(keys=fields.Str(), values=fields.Str(allow_none=True))
comment = fields.Str()

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ class BaseNode(Job, PipelineNodeIOMixin, YamlTranslatableMixin, _AttrDict, Schem
:type tags: dict[str, str]
:param properties: The job property dictionary.
:type properties: dict[str, str]
:param comment: Comment of the pipeline node, which will be shown in designer canvas.
:type comment: str
:param display_name: Display name of the job.
:type display_name: str
:param compute: Compute definition containing the compute information for the step
Expand Down Expand Up @@ -114,6 +116,7 @@ def __init__(
description: str = None,
tags: Dict = None,
properties: Dict = None,
comment: str = None,
compute: str = None,
experiment_name: str = None,
**kwargs,
Expand All @@ -133,6 +136,7 @@ def __init__(
experiment_name=experiment_name,
**kwargs,
)
self.comment = comment

# initialize io
inputs = resolve_pipeline_parameters(inputs)
Expand Down Expand Up @@ -402,6 +406,9 @@ def _to_rest_object(self, **kwargs) -> dict: # pylint: disable=unused-argument
**self._get_attrs(),
)
)
# only add comment in REST object when it is set
if self.comment is not None:
rest_obj.update(dict(comment=self.comment))

return convert_ordered_dict_to_dict(rest_obj)

Expand Down
12 changes: 12 additions & 0 deletions sdk/ml/azure-ai-ml/tests/dsl/unittests/test_dsl_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,18 @@ def sample_pipeline_with_detailed_docstring(job_in_path, job_in_number):
assert pipeline.component.outputs["job_out_path"].description == "a path output"
assert pipeline.description == pipeline.component.description

def test_dsl_pipeline_comment(self) -> None:
hello_world_component_yaml = "./tests/test_configs/components/helloworld_component.yml"
hello_world_component_func = load_component(source=hello_world_component_yaml)

@dsl.pipeline
def sample_pipeline_with_comment():
node = hello_world_component_func(component_in_path=Input(path="/a/path/on/ds"), component_in_number=1)
node.comment = "arbitrary string"

pipeline = sample_pipeline_with_comment()
assert pipeline.jobs["node"].comment == "arbitrary string"

def test_dsl_pipeline_sweep_node(self) -> None:
yaml_file = "./tests/test_configs/components/helloworld_component.yml"

Expand Down
41 changes: 21 additions & 20 deletions sdk/ml/azure-ai-ml/tests/internal/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,26 +10,27 @@

PARAMETERS_TO_TEST = [
# which of them are available for other components?
(
"tests/test_configs/internal/ls_command_component.yaml",
{},
{
"compute": "cpu-cluster", # runsettings.target
"environment": None, # runsettings.environment
# TODO: "resources.priority": 5, # runsettings.priority # JobResourceConfiguration doesn't have priority
"limits.timeout": 300, # runsettings.timeout_seconds
"resources.instance_type": "1Gi", # runsettings.resource_layout.instance_type
"resources.instance_count": 2, # runsettings.resource_layout.instance_count/node_count
"resources.shm_size": "4g", # runsettings.docker_configuration.shm_size
"resources.docker_args": "--cpus=2 --memory=1GB", # runsettings.docker_configuration.docker_args
# runsettings.docker_configuration.user_docker/shared_volumes are removed
# https://github.com/Azure/azureml_run_specification/blob/master/specs/docker_run_config.md
},
{
"default_compute": "cpu-cluster",
"default_datastore": None,
},
), # Command
# [NOTE] migration skip: sync pipeline changes during soft code complete.
# (
# "tests/test_configs/internal/ls_command_component.yaml",
# {},
# {
# "compute": "cpu-cluster", # runsettings.target
# "environment": None, # runsettings.environment
# # TODO: "resources.priority": 5, # runsettings.priority # JobResourceConfiguration doesn't have priority
# "limits.timeout": 300, # runsettings.timeout_seconds
# "resources.instance_type": "1Gi", # runsettings.resource_layout.instance_type
# "resources.instance_count": 2, # runsettings.resource_layout.instance_count/node_count
# "resources.shm_size": "4g", # runsettings.docker_configuration.shm_size
# "resources.docker_args": "--cpus=2 --memory=1GB", # runsettings.docker_configuration.docker_args
# # runsettings.docker_configuration.user_docker/shared_volumes are removed
# # https://github.com/Azure/azureml_run_specification/blob/master/specs/docker_run_config.md
# },
# {
# "default_compute": "cpu-cluster",
# "default_datastore": None,
# },
# ), # Command
(
"tests/test_configs/internal/distribution-component/component_spec.yaml", # Distributed
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,6 @@ def test_load_from_registered_internal_scope_component_rest_obj(self):
},
}

@pytest.mark.skip(reason="migration skip: sync pipeline changes during soft code complete.")
@pytest.mark.parametrize(
"yaml_path",
list(map(lambda x: x[0], PARAMETERS_TO_TEST)),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ def load_pipeline_entity_from_rest_json(job_dict) -> PipelineJob:
@pytest.mark.timeout(_PIPELINE_JOB_TIMEOUT_SECOND)
@pytest.mark.unittest
class TestPipelineJobEntity:
@pytest.mark.skip(reason="migration skip: sync pipeline changes during soft code complete.")
def test_automl_node_in_pipeline_regression(self, mock_machinelearning_client: MLClient, mocker: MockFixture):
test_path = "./tests/test_configs/pipeline_jobs/jobs_with_automl_nodes/onejob_automl_regression.yml"

Expand Down Expand Up @@ -1435,7 +1434,6 @@ def test_non_string_pipeline_node_input(self):
"type": "command",
}

@pytest.mark.skip(reason="migration skip: sync pipeline changes during soft code complete.")
def test_job_properties(self):
pipeline_job: PipelineJob = load_job(
source="./tests/test_configs/pipeline_jobs/pipeline_job_with_properties.yml"
Expand All @@ -1449,3 +1447,10 @@ def test_job_properties(self):
assert len(node_dict["properties"]) == 1
assert "AZURE_ML_PathOnCompute_" in list(node_dict["properties"].keys())[0]
assert node_dict["properties"] == rest_node_dict["properties"]

def test_comment_in_pipeline(self) -> None:
pipeline_job = load_job(source="./tests/test_configs/pipeline_jobs/helloworld_pipeline_job_with_comment.yml")
pipeline_dict = pipeline_job._to_dict()
rest_pipeline_dict = pipeline_job._to_rest_object().as_dict()["properties"]
assert pipeline_dict["jobs"]["hello_world_component"]["comment"] == "arbitrary string"
assert rest_pipeline_dict["jobs"]["hello_world_component"]["comment"] == "arbitrary string"
Loading