Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -233,17 +233,24 @@ def format_options_name(operation):
class BatchArgumentTree(object):
"""Dependency tree parser for arguments of complex objects"""

def __init__(self, validator):
def __init__(self, validator, silent):
self._arg_tree = {}
self._request_param = {}
self._custom_validator = validator
self._silent_args = silent
self.done = False

def __iter__(self):
"""Iterate over arguments"""
for arg, details in self._arg_tree.items():
yield arg, details

def _is_silent(self, name):
"""Whether argument should not be exposed"""
arg = self._arg_tree[name]
full_path = full_name(arg)
return arg['path'] in self._silent_args or full_path in self._silent_args

def _is_bool(self, name):
"""Whether argument value is a boolean"""
return self._arg_tree[name]['type'] == 'bool'
Expand Down Expand Up @@ -379,6 +386,13 @@ def compile_args(self):
elif self._is_duration(name):
details['options']['type'] = validators.duration_format
self._help(name, "Expected format is an ISO-8601 duration.")
elif self._is_silent(name):
import argparse
from azure.cli.core.commands.parameters import IgnoreAction
details['options']['nargs'] = '?'
details['options']['help'] = argparse.SUPPRESS
details['options']['required'] = False
details['options']['action'] = IgnoreAction
yield (name, CliCommandArgument(dest=name, **details['options']))

def existing(self, name):
Expand Down Expand Up @@ -461,12 +475,13 @@ class AzureBatchDataPlaneCommand(object):
# pylint:disable=too-many-instance-attributes, too-few-public-methods

def __init__(self, module_name, name, operation, factory, transform_result, # pylint:disable=too-many-arguments
table_transformer, flatten, ignore, validator):
table_transformer, flatten, ignore, validator, silent):

if not isinstance(operation, string_types):
raise ValueError("Operation must be a string. Got '{}'".format(operation))

self.flatten = flatten # Number of object levels to flatten
self.silent = silent if silent else []
self.ignore = list(IGNORE_PARAMETERS) # Parameters to ignore
if ignore:
self.ignore.extend(ignore)
Expand Down Expand Up @@ -751,7 +766,7 @@ def _load_transformed_arguments(self, handler):
from azure.cli.core.commands.parameters import file_type
from argcomplete.completers import FilesCompleter, DirectoriesCompleter

self.parser = BatchArgumentTree(self.validator)
self.parser = BatchArgumentTree(self.validator, self.silent)
self._load_options_model(handler)
for arg in extract_args_from_signature(handler):
arg_type = find_param_type(handler, arg[0])
Expand Down Expand Up @@ -815,11 +830,11 @@ def _load_transformed_arguments(self, handler):

def cli_batch_data_plane_command(name, operation, client_factory, transform=None, # pylint:disable=too-many-arguments
table_transformer=None, flatten=FLATTEN,
ignore=None, validator=None):
ignore=None, validator=None, silent=None):
""" Registers an Azure CLI Batch Data Plane command. These commands must respond to a
challenge from the service when they make requests. """
command = AzureBatchDataPlaneCommand(__name__, name, operation, client_factory,
transform, table_transformer, flatten, ignore, validator)
command = AzureBatchDataPlaneCommand(__name__, name, operation, client_factory, transform,
table_transformer, flatten, ignore, validator, silent)

# add parameters required to create a batch client
group_name = 'Batch Account'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,21 @@ def validate_pool_settings(ns, parser):
in the add pool request body.
"""
if not ns.json_file:
if ns.node_agent_sku_id and not ns.image:
raise ValueError("Missing required argument: --image")
if ns.image:
ns.version = 'latest'
try:
ns.publisher, ns.offer, ns.sku = ns.image.split(':', 2)
except ValueError:
message = ("Incorrect format for VM image URN. Should be in the format: \n"
"'publisher:offer:sku[:version]'")
raise ValueError(message)
try:
ns.sku, ns.version = ns.sku.split(':', 1)
except ValueError:
pass
del ns.image
groups = ['pool.cloud_service_configuration', 'pool.virtual_machine_configuration']
parser.parse_mutually_exclusive(ns, True, groups)

Expand All @@ -287,25 +302,6 @@ def validate_pool_settings(ns, parser):
raise ValueError(message)
if ns.auto_scale_formula:
ns.enable_auto_scale = True
if ns.image:
version = 'latest'
try:
publisher, offer, sku = ns.image.split(':', 2)
except ValueError:
message = ("Incorrect format for VM image URN. Should be in the format: \n"
"'publisher:offer:sku[:version]'")
raise ValueError(message)
try:
sku, version = sku.split(':', 1)
except ValueError:
pass
ns.pool = {'virtual_machine_configuration': {'image_reference': {
'publisher': publisher,
'offer': offer,
'sku': sku,
'version': version
}}}
del ns.image


def validate_cert_settings(ns):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,9 @@
cli_batch_data_plane_command('batch pool all-statistics show', data_path.format('pool', 'PoolOperations.get_all_pools_lifetime_statistics'), pool_client_factory)
cli_batch_data_plane_command('batch pool create', data_path.format('pool', 'PoolOperations.add'), pool_client_factory, validator=validate_pool_settings,
ignore=['pool.cloud_service_configuration.current_os_version', 'pool.virtual_machine_configuration.windows_configuration',
'pool.auto_scale_evaluation_interval', 'pool.enable_auto_scale', 'pool.max_tasks_per_node', 'pool.metadata',
'pool.resize_timeout', 'pool.cloud_service_configuration.target_os_version', 'pool.task_scheduling_policy',
'pool.virtual_machine_configuration.image_reference', 'pool.start_task.max_task_retry_count', 'pool.start_task.environment_settings'])
'pool.auto_scale_evaluation_interval', 'pool.enable_auto_scale', 'pool.max_tasks_per_node', 'pool.network_configuration',
'pool.cloud_service_configuration.target_os_version', 'pool.task_scheduling_policy',
'pool.start_task.max_task_retry_count', 'pool.start_task.environment_settings'], silent=['pool.virtual_machine_configuration.image_reference'])
cli_batch_data_plane_command('batch pool list', data_path.format('pool', 'PoolOperations.list'), pool_client_factory)
cli_batch_data_plane_command('batch pool delete', data_path.format('pool', 'PoolOperations.delete'), pool_client_factory)
cli_batch_data_plane_command('batch pool show', data_path.format('pool', 'PoolOperations.get'), pool_client_factory)
Expand All @@ -86,8 +86,8 @@

cli_batch_data_plane_command('batch job all-statistics show', data_path.format('job', 'JobOperations.get_all_jobs_lifetime_statistics'), job_client_factory)
cli_batch_data_plane_command('batch job create', data_path.format('job', 'JobOperations.add'), job_client_factory,
ignore=['job.job_preparation_task', 'job.job_release_task', 'job.pool_info.auto_pool_specification', 'job.metadata',
'job.job_manager_task.kill_job_on_completion', 'job.constraints', 'job.common_environment_settings',
ignore=['job.job_preparation_task', 'job.job_release_task', 'job.pool_info.auto_pool_specification', 'job.on_task_failure',
'job.job_manager_task.kill_job_on_completion', 'job.common_environment_settings', 'job.on_all_tasks_complete',
'job.job_manager_task.run_exclusive', 'job.job_manager_task.constraints', 'job.job_manager_task.application_package_references'])
cli_batch_data_plane_command('batch job delete', data_path.format('job', 'JobOperations.delete'), job_client_factory)
cli_batch_data_plane_command('batch job show', data_path.format('job', 'JobOperations.get'), job_client_factory)
Expand All @@ -100,10 +100,9 @@
cli_batch_data_plane_command('batch job prep-release-status list', data_path.format('job', 'JobOperations.list_preparation_and_release_task_status'), job_client_factory)

cli_batch_data_plane_command('batch job-schedule create', data_path.format('job_schedule', 'JobScheduleOperations.add'), job_schedule_client_factory,
ignore=['cloud_job_schedule.job_specification.job_preparation_task', 'cloud_job_schedule.metadata',
ignore=['cloud_job_schedule.job_specification.job_preparation_task',
'cloud_job_schedule.job_specification.job_release_task', 'cloud_job_schedule.job_specification.metadata',
'cloud_job_schedule.job_specification.job_manager_task.kill_job_on_completion',
'cloud_job_schedule.job_specification.constraints'
'cloud_job_schedule.job_specification.job_manager_task.run_exclusive',
'cloud_job_schedule.job_specification.job_manager_task.application_package_references',
'cloud_job_schedule.job_specification.job_manager_task.environment_settings'])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ def body(self):

# test create pool with parameters from mutually exclusive groups
try:
self.cmd('batch pool create --id mutually-exclusive-test --vm-size small --os-family' +
'4 --image Canonical:UbuntuServer')
self.cmd('batch pool create --id mutually-exclusive-test --vm-size small --os-family ' +
'4 --image Canonical:UbuntuServer:16-LTS:latest')
raise AssertionError("Excepted exception to be raised.")
except SystemExit as exp:
self.assertEqual(exp.code, 2)
Expand Down