From 85b7250fd4949b40f1efdd29647a294a6e2b2377 Mon Sep 17 00:00:00 2001 From: Connor Goggins Date: Thu, 6 Feb 2020 15:54:34 -0800 Subject: [PATCH] [OpPerf] Implement remaining random sampling ops (#17502) * Added support for remaining random sampling ops, removed exception for sample_multinomial as it does work with random data * Dropped unused unique_ops variable * Added affine transform, dropped parentheses, changed 19 to 18 * Dropped unique_ops condition - no longer in use * Fixed indentation * Dropped unique_ops --- .../random_sampling_operators.py | 4 +-- benchmark/opperf/rules/default_params.py | 16 ++++++++-- benchmark/opperf/utils/op_registry_utils.py | 31 ++++++++++--------- 3 files changed, 32 insertions(+), 19 deletions(-) diff --git a/benchmark/opperf/nd_operations/random_sampling_operators.py b/benchmark/opperf/nd_operations/random_sampling_operators.py index eeda0026814c..b6a1f44dba25 100644 --- a/benchmark/opperf/nd_operations/random_sampling_operators.py +++ b/benchmark/opperf/nd_operations/random_sampling_operators.py @@ -19,12 +19,12 @@ 1. Operators are automatically fetched from MXNet operator registry. 2. Default Inputs are generated. See rules/default_params.py. You can override the default values. -Below 16 random sampling Operators are covered: +Below 18 random sampling Operators are covered: ['random_exponential', 'random_gamma', 'random_generalized_negative_binomial', 'random_negative_binomial', 'random_normal', 'random_poisson', 'random_randint', 'random_uniform', 'sample_exponential', 'sample_gamma', 'sample_generalized_negative_binomial', 'sample_multinomial', 'sample_negative_binomial', 'sample_normal', -'sample_poisson', 'sample_uniform'] +'sample_poisson', 'sample_uniform', 'GridGenerator', 'BilinearSampler'] """ diff --git a/benchmark/opperf/rules/default_params.py b/benchmark/opperf/rules/default_params.py index b8532e76361c..19caf9e5d9f2 100644 --- a/benchmark/opperf/rules/default_params.py +++ b/benchmark/opperf/rules/default_params.py @@ -63,6 +63,12 @@ DEFAULT_LAM = [[1.0, 8.5]] DEFAULT_K_ND = [[20, 49]] DEFAULT_P_ND = [[0.4, 0.77]] +DEFAULT_GRID = [(32, 2, 256, 256)] +DEFAULT_DATA_BILINEAR = [(32, 2, 256, 256)] +DEFAULT_TRANSFORM_TYPE = ['warp', 'affine'] +DEFAULT_DATA_GRIDGEN = [(32, 2, 256, 256), (256, 6)] +DEFAULT_TARGET_SHAPE = [(256, 6)] +DEFAULT_DATA_SM = [(32, 32), (64, 64)] # For reduction operators # NOTE: Data used is DEFAULT_DATA @@ -194,7 +200,13 @@ "data_3d": DEFAULT_DATA_3d, "label_smce": DEFAULT_LABEL_SMCE, "label": DEFAULT_LABEL, - "index": DEFAULT_INDEX} + "index": DEFAULT_INDEX, + "grid": DEFAULT_GRID, + "data_bilinearsampler": DEFAULT_DATA_BILINEAR, + "transform_type": DEFAULT_TRANSFORM_TYPE, + "data_gridgenerator": DEFAULT_DATA_GRIDGEN, + "target_shape_gridgenerator": DEFAULT_TARGET_SHAPE, + "data_sample_multinomial": DEFAULT_DATA_SM} # These are names of MXNet operator parameters that is of type NDArray. @@ -207,4 +219,4 @@ "low", "high", "weight", "bias", "moving_mean", "moving_var", "weight", "weight32", "grad", "mean", "var", "mom", "n", "d", "v", "z", "g", "delta", "args", "indices", "shape_like", "y", - "x", "condition", "a", "index", "raveL_data", "label"] + "x", "condition", "a", "index", "raveL_data", "label", "grid"] diff --git a/benchmark/opperf/utils/op_registry_utils.py b/benchmark/opperf/utils/op_registry_utils.py index 43158dbc4edf..515a51f1f22d 100644 --- a/benchmark/opperf/utils/op_registry_utils.py +++ b/benchmark/opperf/utils/op_registry_utils.py @@ -22,10 +22,6 @@ from benchmark.opperf.rules.default_params import DEFAULTS_INPUTS, MX_OP_MODULE -# Operators where parameter have special criteria that cannot be cleanly automated. -# Example: sample_multinomial operator has a parameter 'data'. It expects values to sum up to 1. -unique_ops = ("sample_multinomial",) - def _select_ops(operator_names, filters=("_contrib", "_"), merge_op_forward_backward=True): """From a given list of operators, filter out all operator names starting with given filters and prepares @@ -121,6 +117,7 @@ def prepare_op_inputs(op, arg_params): # 3d tensor is needed by following ops ops_3d = ['CTCLoss', 'ctc_loss'] + custom_data = ['BilinearSampler', 'GridGenerator', 'sample_multinomial'] # Prepare op to default input mapping arg_values = {} @@ -128,6 +125,8 @@ def prepare_op_inputs(op, arg_params): arg_params["params"]["arg_types"]): if "NDArray" in arg_type and op == "ravel_multi_index": arg_values[arg_name] = DEFAULTS_INPUTS["ravel_data"] + elif op in custom_data and arg_name + "_" + op.lower() in DEFAULTS_INPUTS: + arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_" + op.lower()] elif "NDArray" in arg_type and arg_name + "_nd" in DEFAULTS_INPUTS: arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_nd"] elif "NDArray" in arg_type and op in ops_4d and arg_name + "_4d" in DEFAULTS_INPUTS: @@ -254,13 +253,16 @@ def get_all_random_sampling_operators(): ------- {"operator_name": {"has_backward", "nd_op_handle", "params"}} """ + # Additional Random Sampling ops which do not start with "random_" or "sample_" + additional_random_sampling_ops = ['GridGenerator', 'BilinearSampler'] + # Get all mxnet operators mx_operators = _get_all_mxnet_operators() # Filter for Random Sampling operators random_sampling_mx_operators = {} for op_name, _ in mx_operators.items(): - if op_name.startswith(("random_", "sample_")) and op_name not in unique_ops: + if op_name.startswith(("random_", "sample_")) or op_name in additional_random_sampling_ops: random_sampling_mx_operators[op_name] = mx_operators[op_name] return random_sampling_mx_operators @@ -279,8 +281,7 @@ def get_all_reduction_operators(): reduction_mx_operators = {} for op_name, op_params in mx_operators.items(): if op_params["params"]["narg"] == 4 and \ - set(["data", "axis", "exclude", "keepdims"]).issubset(set(op_params["params"]["arg_names"])) \ - and op_name not in unique_ops: + set(["data", "axis", "exclude", "keepdims"]).issubset(set(op_params["params"]["arg_names"])): reduction_mx_operators[op_name] = mx_operators[op_name] return reduction_mx_operators @@ -301,8 +302,8 @@ def get_all_optimizer_operators(): # Filter for Optimizer operators optimizer_mx_operators = {} - for op_name, _ in mx_operators.items(): - if op_name in optimizer_ops and op_name not in unique_ops: + for op_name, op_params in mx_operators.items(): + if op_name in optimizer_ops: optimizer_mx_operators[op_name] = mx_operators[op_name] return optimizer_mx_operators @@ -320,8 +321,8 @@ def get_all_sorting_searching_operators(): # Filter for Sort and search operators sort_search_mx_operators = {} - for op_name, _ in mx_operators.items(): - if op_name in sort_search_ops and op_name not in unique_ops: + for op_name, op_params in mx_operators.items(): + if op_name in sort_search_ops: sort_search_mx_operators[op_name] = mx_operators[op_name] return sort_search_mx_operators @@ -340,8 +341,8 @@ def get_all_rearrange_operators(): # Filter for Array Rearrange operators rearrange_mx_operators = {} - for op_name, _ in mx_operators.items(): - if op_name in rearrange_ops and op_name not in unique_ops: + for op_name, op_params in mx_operators.items(): + if op_name in rearrange_ops: rearrange_mx_operators[op_name] = mx_operators[op_name] return rearrange_mx_operators @@ -366,7 +367,7 @@ def get_all_indexing_routines(): # Filter for Indexing routines indexing_mx_routines = {} for op_name, _ in mx_operators.items(): - if op_name in indexing_routines and op_name not in unique_ops: + if op_name in indexing_routines: indexing_mx_routines[op_name] = mx_operators[op_name] return indexing_mx_routines @@ -386,7 +387,7 @@ def get_all_loss_operators(): # Filter for NN Loss operators loss_mx_operators = {} for op_name, op_params in mx_operators.items(): - if op_name in loss_ops and op_name not in unique_ops: + if op_name in loss_ops: loss_mx_operators[op_name] = mx_operators[op_name] return loss_mx_operators