Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Support custom containers in CustomJob.from_local_script #1483

Merged
merged 9 commits into from
Jul 23, 2022
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 27 additions & 1 deletion google/cloud/aiplatform/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1388,7 +1388,10 @@ def from_local_script(
spec["container_spec"] = {
"image_uri": reduction_server_container_uri,
}
else:
## check if the container is pre-built
elif ("docker.pkg.dev/vertex-ai/" in container_uri) or (
"gcr.io/cloud-aiplatform/" in container_uri
):
spec["python_package_spec"] = {
"executor_image_uri": container_uri,
"python_module": python_packager.module_name,
Expand All @@ -1403,6 +1406,29 @@ def from_local_script(
{"name": key, "value": value}
for key, value in environment_variables.items()
]
else:
jaycee-li marked this conversation as resolved.
Show resolved Hide resolved
command = [
"sh",
"-c",
"\npip3 install -q --user --upgrade --no-warn-script-location gsutil"
+ f"\ngsutil -q cp {package_gcs_uri} ."
+ f"\npip3 install -q --user {package_gcs_uri[len(staging_bucket)+1:]}"
+ f"\npython3 -m {python_packager.module_name}",
]

spec["container_spec"] = {
"image_uri": container_uri,
"command": command,
}

if args:
jaycee-li marked this conversation as resolved.
Show resolved Hide resolved
spec["container_spec"]["args"] = args

if environment_variables:
spec["container_spec"]["env"] = [
{"name": key, "value": value}
for key, value in environment_variables.items()
]

return cls(
display_name=display_name,
Expand Down
201 changes: 159 additions & 42 deletions tests/unit/aiplatform/test_custom_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
_TEST_ENABLE_WEB_ACCESS = True
_TEST_WEB_ACCESS_URIS = {"workerpool0-0": "uri"}
_TEST_TRAINING_CONTAINER_IMAGE = "gcr.io/test-training/container:image"
_TEST_PREBUILT_CONTAINER_IMAGE = "gcr.io/cloud-aiplatform/container:image"
jaycee-li marked this conversation as resolved.
Show resolved Hide resolved

_TEST_RUN_ARGS = ["-v", "0.1", "--test=arg"]

Expand All @@ -70,6 +71,24 @@
}
]

_TEST_PYTHON_PACKAGE_SPEC = gca_custom_job_compat.PythonPackageSpec(
executor_image_uri=_TEST_PREBUILT_CONTAINER_IMAGE,
package_uris=[test_training_jobs._TEST_OUTPUT_PYTHON_PACKAGE_PATH],
python_module=test_training_jobs._TEST_MODULE_NAME,
)

_TEST_CONTAINER_SPEC = gca_custom_job_compat.ContainerSpec(
image_uri=_TEST_TRAINING_CONTAINER_IMAGE,
command=[
"sh",
"-c",
"\npip3 install -q --user --upgrade --no-warn-script-location gsutil"
+ f"\ngsutil -q cp {test_training_jobs._TEST_OUTPUT_PYTHON_PACKAGE_PATH} ."
+ "\npip3 install -q --user trainer.tar.gz"
+ f"\npython3 -m {test_training_jobs._TEST_MODULE_NAME}",
],
)

_TEST_STAGING_BUCKET = "gs://test-staging-bucket"
_TEST_BASE_OUTPUT_DIR = f"{_TEST_STAGING_BUCKET}/{_TEST_DISPLAY_NAME}"

Expand Down Expand Up @@ -532,7 +551,41 @@ def test_get_custom_job(self, get_custom_job_mock):

@pytest.mark.usefixtures("mock_python_package_to_gcs")
@pytest.mark.parametrize("sync", [True, False])
def test_create_from_local_script(
def test_create_from_local_script_prebuilt_container(
jaycee-li marked this conversation as resolved.
Show resolved Hide resolved
self, get_custom_job_mock, create_custom_job_mock, sync
):
aiplatform.init(
project=_TEST_PROJECT,
location=_TEST_LOCATION,
staging_bucket=_TEST_STAGING_BUCKET,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)

# configuration on this is tested in test_training_jobs.py
job = aiplatform.CustomJob.from_local_script(
display_name=_TEST_DISPLAY_NAME,
script_path=test_training_jobs._TEST_LOCAL_SCRIPT_FILE_NAME,
container_uri=_TEST_PREBUILT_CONTAINER_IMAGE,
base_output_dir=_TEST_BASE_OUTPUT_DIR,
labels=_TEST_LABELS,
)

assert (
job.job_spec.worker_pool_specs[0].python_package_spec
== _TEST_PYTHON_PACKAGE_SPEC
)

job.run(sync=sync)

job.wait()

assert (
job._gca_resource.state == gca_job_state_compat.JobState.JOB_STATE_SUCCEEDED
)

@pytest.mark.usefixtures("mock_python_package_to_gcs")
@pytest.mark.parametrize("sync", [True, False])
def test_create_from_local_script_custom_container(
self, get_custom_job_mock, create_custom_job_mock, sync
):
aiplatform.init(
Expand All @@ -551,6 +604,8 @@ def test_create_from_local_script(
labels=_TEST_LABELS,
)

assert job.job_spec.worker_pool_specs[0].container_spec == _TEST_CONTAINER_SPEC

job.run(sync=sync)

job.wait()
Expand Down Expand Up @@ -579,6 +634,109 @@ def test_create_from_local_script_raises_with_no_staging_bucket(
container_uri=_TEST_TRAINING_CONTAINER_IMAGE,
)

@pytest.mark.usefixtures("mock_python_package_to_gcs")
@pytest.mark.parametrize("sync", [True, False])
def test_create_from_local_script_prebuilt_container_with_all_args(
self, get_custom_job_mock, create_custom_job_mock, sync
):
aiplatform.init(
project=_TEST_PROJECT,
location=_TEST_LOCATION,
staging_bucket=_TEST_STAGING_BUCKET,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)

# configuration on this is tested in test_training_jobs.py
job = aiplatform.CustomJob.from_local_script(
display_name=_TEST_DISPLAY_NAME,
script_path=test_training_jobs._TEST_LOCAL_SCRIPT_FILE_NAME,
container_uri=_TEST_PREBUILT_CONTAINER_IMAGE,
args=_TEST_RUN_ARGS,
requirements=test_training_jobs._TEST_REQUIREMENTS,
environment_variables=test_training_jobs._TEST_ENVIRONMENT_VARIABLES,
replica_count=test_training_jobs._TEST_REPLICA_COUNT,
machine_type=test_training_jobs._TEST_MACHINE_TYPE,
accelerator_type=test_training_jobs._TEST_ACCELERATOR_TYPE,
accelerator_count=test_training_jobs._TEST_ACCELERATOR_COUNT,
boot_disk_type=test_training_jobs._TEST_BOOT_DISK_TYPE,
boot_disk_size_gb=test_training_jobs._TEST_BOOT_DISK_SIZE_GB,
reduction_server_replica_count=test_training_jobs._TEST_REDUCTION_SERVER_REPLICA_COUNT,
reduction_server_machine_type=test_training_jobs._TEST_REDUCTION_SERVER_MACHINE_TYPE,
reduction_server_container_uri=test_training_jobs._TEST_REDUCTION_SERVER_CONTAINER_URI,
base_output_dir=_TEST_BASE_OUTPUT_DIR,
labels=_TEST_LABELS,
)

expected_python_package_spec = _TEST_PYTHON_PACKAGE_SPEC
expected_python_package_spec.args = _TEST_RUN_ARGS
expected_python_package_spec.env = [
{"name": key, "value": value}
for key, value in test_training_jobs._TEST_ENVIRONMENT_VARIABLES.items()
]

assert (
job.job_spec.worker_pool_specs[0].python_package_spec
== expected_python_package_spec
)
job.run(sync=sync)

job.wait()

assert (
job._gca_resource.state == gca_job_state_compat.JobState.JOB_STATE_SUCCEEDED
)

@pytest.mark.usefixtures("mock_python_package_to_gcs")
@pytest.mark.parametrize("sync", [True, False])
def test_create_from_local_script_custom_container_with_all_args(
self, get_custom_job_mock, create_custom_job_mock, sync
):
aiplatform.init(
project=_TEST_PROJECT,
location=_TEST_LOCATION,
staging_bucket=_TEST_STAGING_BUCKET,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)

# configuration on this is tested in test_training_jobs.py
job = aiplatform.CustomJob.from_local_script(
display_name=_TEST_DISPLAY_NAME,
script_path=test_training_jobs._TEST_LOCAL_SCRIPT_FILE_NAME,
container_uri=_TEST_TRAINING_CONTAINER_IMAGE,
args=_TEST_RUN_ARGS,
requirements=test_training_jobs._TEST_REQUIREMENTS,
environment_variables=test_training_jobs._TEST_ENVIRONMENT_VARIABLES,
replica_count=test_training_jobs._TEST_REPLICA_COUNT,
machine_type=test_training_jobs._TEST_MACHINE_TYPE,
accelerator_type=test_training_jobs._TEST_ACCELERATOR_TYPE,
accelerator_count=test_training_jobs._TEST_ACCELERATOR_COUNT,
boot_disk_type=test_training_jobs._TEST_BOOT_DISK_TYPE,
boot_disk_size_gb=test_training_jobs._TEST_BOOT_DISK_SIZE_GB,
reduction_server_replica_count=test_training_jobs._TEST_REDUCTION_SERVER_REPLICA_COUNT,
reduction_server_machine_type=test_training_jobs._TEST_REDUCTION_SERVER_MACHINE_TYPE,
reduction_server_container_uri=test_training_jobs._TEST_REDUCTION_SERVER_CONTAINER_URI,
base_output_dir=_TEST_BASE_OUTPUT_DIR,
labels=_TEST_LABELS,
)

expected_container_spec = _TEST_CONTAINER_SPEC
expected_container_spec.args = _TEST_RUN_ARGS
expected_container_spec.env = [
{"name": key, "value": value}
for key, value in test_training_jobs._TEST_ENVIRONMENT_VARIABLES.items()
]

assert (
job.job_spec.worker_pool_specs[0].container_spec == expected_container_spec
)
job.run(sync=sync)

job.wait()

assert (
job._gca_resource.state == gca_job_state_compat.JobState.JOB_STATE_SUCCEEDED
)

@pytest.mark.parametrize("sync", [True, False])
def test_create_custom_job_with_enable_web_access(
self,
Expand Down Expand Up @@ -722,47 +880,6 @@ def test_create_custom_job_without_base_output_dir(
f"{_TEST_STAGING_BUCKET}/aiplatform-custom-job"
)

@pytest.mark.usefixtures("mock_python_package_to_gcs")
@pytest.mark.parametrize("sync", [True, False])
def test_create_from_local_script_with_all_args(
self, get_custom_job_mock, create_custom_job_mock, sync
):
aiplatform.init(
project=_TEST_PROJECT,
location=_TEST_LOCATION,
staging_bucket=_TEST_STAGING_BUCKET,
encryption_spec_key_name=_TEST_DEFAULT_ENCRYPTION_KEY_NAME,
)

# configuration on this is tested in test_training_jobs.py
job = aiplatform.CustomJob.from_local_script(
display_name=_TEST_DISPLAY_NAME,
script_path=test_training_jobs._TEST_LOCAL_SCRIPT_FILE_NAME,
container_uri=_TEST_TRAINING_CONTAINER_IMAGE,
args=_TEST_RUN_ARGS,
requirements=test_training_jobs._TEST_REQUIREMENTS,
environment_variables=test_training_jobs._TEST_ENVIRONMENT_VARIABLES,
replica_count=test_training_jobs._TEST_REPLICA_COUNT,
machine_type=test_training_jobs._TEST_MACHINE_TYPE,
accelerator_type=test_training_jobs._TEST_ACCELERATOR_TYPE,
accelerator_count=test_training_jobs._TEST_ACCELERATOR_COUNT,
boot_disk_type=test_training_jobs._TEST_BOOT_DISK_TYPE,
boot_disk_size_gb=test_training_jobs._TEST_BOOT_DISK_SIZE_GB,
reduction_server_replica_count=test_training_jobs._TEST_REDUCTION_SERVER_REPLICA_COUNT,
reduction_server_machine_type=test_training_jobs._TEST_REDUCTION_SERVER_MACHINE_TYPE,
reduction_server_container_uri=test_training_jobs._TEST_REDUCTION_SERVER_CONTAINER_URI,
base_output_dir=_TEST_BASE_OUTPUT_DIR,
labels=_TEST_LABELS,
)

job.run(sync=sync)

job.wait()

assert (
job._gca_resource.state == gca_job_state_compat.JobState.JOB_STATE_SUCCEEDED
)

@pytest.mark.usefixtures("get_custom_job_mock", "create_custom_job_mock")
def test_check_custom_job_availability(self):
aiplatform.init(
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/aiplatform/test_training_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@
_TEST_MODEL_SERVING_CONTAINER_PORTS = [8888, 10000]
_TEST_MODEL_DESCRIPTION = "test description"

_TEST_OUTPUT_PYTHON_PACKAGE_PATH = "gs://test/ouput/python/trainer.tar.gz"
_TEST_OUTPUT_PYTHON_PACKAGE_PATH = "gs://test-staging-bucket/trainer.tar.gz"
_TEST_PYTHON_MODULE_NAME = "aiplatform.task"

_TEST_MODEL_NAME = f"projects/{_TEST_PROJECT}/locations/us-central1/models/{_TEST_ID}"
Expand Down