Skip to content

Commit

Permalink
Fix words
Browse files Browse the repository at this point in the history
  • Loading branch information
co63oc committed Jan 7, 2024
1 parent 7c7c5b1 commit b2bab81
Show file tree
Hide file tree
Showing 10 changed files with 31 additions and 31 deletions.
14 changes: 7 additions & 7 deletions test/ir/inference/auto_scan_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,9 +278,9 @@ def run_test(self, quant=False, *args, **kwargs):
model, params, prog_config, base_config, feed_data
)
)
self.success_log(f"basline program_config: {prog_config}")
self.success_log(f"baseline program_config: {prog_config}")
self.success_log(
f"basline predictor_config: {self.inference_config_str(base_config)}"
f"baseline predictor_config: {self.inference_config_str(base_config)}"
)

for pred_config, (atol, rtol) in self.sample_predictor_configs(
Expand Down Expand Up @@ -561,11 +561,11 @@ def inference_config_str(self, config) -> str:
dic["passes"] = self.passes

enable_trt = config.tensorrt_engine_enabled()
trt_precison = config.tensorrt_precision_mode()
trt_precision = config.tensorrt_precision_mode()
trt_dynamic_shape = config.tensorrt_dynamic_shape_enabled()
if enable_trt:
dic["use_trt"] = True
dic["trt_precision"] = trt_precison
dic["trt_precision"] = trt_precision
dic["use_dynamic_shape"] = trt_dynamic_shape
else:
dic["use_trt"] = False
Expand Down Expand Up @@ -713,11 +713,11 @@ def assert_op_size(self, trt_engine_num, paddle_op_num):
def inference_config_str(self, config: paddle_infer.Config) -> str:
dic = {}
enable_trt = config.tensorrt_engine_enabled()
trt_precison = config.tensorrt_precision_mode()
trt_precision = config.tensorrt_precision_mode()
trt_dynamic_shape = config.tensorrt_dynamic_shape_enabled()
if enable_trt:
dic["use_trt"] = True
dic["trt_precision"] = trt_precison
dic["trt_precision"] = trt_precision
dic["use_dynamic_shape"] = trt_dynamic_shape
else:
dic["use_trt"] = False
Expand Down Expand Up @@ -755,7 +755,7 @@ def random_to_skip():
gpu_config,
prog_config.get_feed_data(),
)
self.success_log(f"basline program_config: {prog_config}")
self.success_log(f"baseline program_config: {prog_config}")

for (
pred_config,
Expand Down
2 changes: 1 addition & 1 deletion test/ir/inference/program_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ def __init__(
no_cast_list: Optional[List[str]] = None,
):
self.ops = ops
# if no weight need to save, we create a place_holder to help seriazlie params.
# if no weight need to save, we create a place_holder to help serialize params.
if not weights:

def generate_weight():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def sample_program_config(self, draw):
)
)

# 9. Generate legal elemntwise_add: X of conv2d
# 9. Generate legal elementwise_add: X of conv2d
bias_2_dict = {}
bias_2_dict[1] = [
x_shape[0],
Expand Down
2 changes: 1 addition & 1 deletion test/ir/inference/test_trt_convert_pad.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def teller1(program_config, predictor_config):
self.add_skip_case(
teller1,
SkipReasons.TRT_NOT_IMPLEMENTED,
"NOT Implemented: we need to add support pad not only inplement on h or w, such as paddings = [0, 0, 1, 1, 1, 1, 1, 1]",
"NOT Implemented: we need to add support pad not only implement on h or w, such as paddings = [0, 0, 1, 1, 1, 1, 1, 1]",
)

def test(self):
Expand Down
16 changes: 8 additions & 8 deletions test/ir/inference/test_trt_convert_reshape.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,12 @@ def generate_shapeT2_data(attrs: List[Dict[str, Any]]):
},
]
self.dims = dims
dics_intput = [{"X": ["reshape_input"]}]
dics_input = [{"X": ["reshape_input"]}]

ops_config = [
{
"op_type": "reshape",
"op_inputs": dics_intput[0],
"op_inputs": dics_input[0],
"op_outputs": {"Out": ["reshape_out"]},
"op_attrs": dics[0],
}
Expand Down Expand Up @@ -228,7 +228,7 @@ def generate_input1(attrs: List[Dict[str, Any]]):
{},
]
self.dims = dims
dics_intput = [
dics_input = [
{
"X": ["reshape_input"],
"ShapeTensor": ["shapeT1_data", "shapeT2_data"],
Expand Down Expand Up @@ -257,7 +257,7 @@ def generate_input1(attrs: List[Dict[str, Any]]):
},
{
"op_type": "reshape",
"op_inputs": dics_intput[0],
"op_inputs": dics_input[0],
"op_outputs": {"Out": ["reshape_out"]},
"op_attrs": dics[0],
},
Expand Down Expand Up @@ -351,7 +351,7 @@ def generate_input1(attrs: List[Dict[str, Any]]):
{},
]
self.dims = dims
dics_intput = [
dics_input = [
{
"X": ["reshape_input"],
"shape_data": ["shape_data"],
Expand All @@ -370,7 +370,7 @@ def generate_input1(attrs: List[Dict[str, Any]]):
},
{
"op_type": "reshape",
"op_inputs": dics_intput[0],
"op_inputs": dics_input[0],
"op_outputs": {"Out": ["reshape_out"]},
"op_attrs": dics[0],
},
Expand Down Expand Up @@ -463,12 +463,12 @@ def generate_input1(attrs: List[Dict[str, Any]]):
},
]
self.dims = dims
dics_intput = [{"X": ["reshape_input"]}]
dics_input = [{"X": ["reshape_input"]}]

ops_config = [
{
"op_type": "reshape",
"op_inputs": dics_intput[0],
"op_inputs": dics_input[0],
"op_outputs": {"Out": ["reshape_out"]},
"op_attrs": dics[0],
}
Expand Down
2 changes: 1 addition & 1 deletion test/ir/inference/test_trt_convert_rnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def sample_program_configs(self):
"is_bidirec": is_bidirec,
"is_test": True,
"dropout_prob": 0.0,
# for my convience
# for my convenience
"batch": batch,
"seq_len": seq_len,
}
Expand Down
10 changes: 5 additions & 5 deletions test/ir/pass_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,16 +187,16 @@ def _check_fused_ops(self, program):
if program is None or program == self.main_program:
program = self._apply_ir_passes()

acctual_num_fused_ops = 0
# Ir passes can only be applyed to block 0.
actual_num_fused_ops = 0
# Ir passes can only be applied to block 0.
for op in program.block(0).ops:
if op.type == self.fused_op_type:
acctual_num_fused_ops += 1
actual_num_fused_ops += 1
self.assertTrue(
self.num_fused_ops == acctual_num_fused_ops,
self.num_fused_ops == actual_num_fused_ops,
"Checking of the number of fused operator < {} > failed. "
"Expected: {}, Received: {}".format(
self.fused_op_type, self.num_fused_ops, acctual_num_fused_ops
self.fused_op_type, self.num_fused_ops, actual_num_fused_ops
),
)

Expand Down
8 changes: 4 additions & 4 deletions test/ir/pir/test_if_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def test_if_with_multiple_output(self):
self.assertEqual(last_op.name(), "pd_op.if")
self.assertEqual(len(out), 2)

# check Operaion::as_if_op interface
# check Operation::as_if_op interface
if_op = last_op.as_if_op()
true_block = if_op.true_block()
self.assertEqual(len(true_block), 3)
Expand All @@ -77,7 +77,7 @@ def test_if_with_multiple_output(self):
build_pipe_for_block(true_block)
self.assertEqual(len(true_block), 4)

# check Operaion::blocks interface
# check Operation::blocks interface
block_list = []
for block in out[0].get_defining_op().blocks():
block_list.append(block)
Expand All @@ -94,7 +94,7 @@ def test_if_op_vjp_interface(self):
out_grad = paddle.full(shape=[6, 1], dtype='float32', fill_value=3)
# check vjp interface for if_op
if_input = [[input] for input in get_used_external_value(if_op)]
if_input_stop_graditents = [[True], [False], [False], [True]]
if_input_stop_gradients = [[True], [False], [False], [True]]
if_output = [if_op.results()]
if_output_grad = [[out_grad]]
self.assertEqual(has_vjp(if_op), True)
Expand All @@ -103,7 +103,7 @@ def test_if_op_vjp_interface(self):
if_input,
if_output,
if_output_grad,
if_input_stop_graditents,
if_input_stop_gradients,
)

self.assertEqual(grad_outs[0][0], None)
Expand Down
4 changes: 2 additions & 2 deletions test/ir/pir/test_while_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def test_while_op_vjp_interface(self):
[input] for input in get_used_external_value(body_block)
]
self.assertEqual(len(while_input), 4)
while_input_stop_graditents = [[True], [False], [True], [True]]
while_input_stop_gradients = [[True], [False], [True], [True]]
while_output = [[value] for value in while_op.results()]
while_output_grad = [[out_grad], [out_grad], [out_grad]]
self.assertEqual(has_vjp(while_op), True)
Expand All @@ -113,7 +113,7 @@ def test_while_op_vjp_interface(self):
while_input,
while_output,
while_output_grad,
while_input_stop_graditents,
while_input_stop_gradients,
)

self.assertEqual(grad_outs[0][0], None)
Expand Down
2 changes: 1 addition & 1 deletion test/ir/test_fuse_resnet_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"and device's compute capability is at least 7.0 and less than 9.0",
)
class TestFuseResNetUnit(unittest.TestCase):
def test_fuse_resenet_unit(self):
def test_fuse_resnet_unit(self):
place = paddle.CUDAPlace(0)
program = paddle.static.Program()
startup_program = paddle.static.Program()
Expand Down

0 comments on commit b2bab81

Please sign in to comment.