Skip to content

Commit

Permalink
remove 'foo' and other print msg from test (apache#16088)
Browse files Browse the repository at this point in the history
* remove 'foo' and other print msg from test

* address review comment

* re-trigger CI
  • Loading branch information
apeforest authored and gyshi committed Sep 7, 2019
1 parent c613fe6 commit 3e3026f
Showing 1 changed file with 49 additions and 38 deletions.
87 changes: 49 additions & 38 deletions tests/python/unittest/test_profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,16 @@
# under the License.

from __future__ import print_function
import logging
import mxnet as mx
from mxnet import profiler
import time
import os
import json
import unittest
from collections import OrderedDict

import mxnet as mx
from mxnet import profiler
from common import run_in_spawned_process
import unittest


def enable_profiler(profile_filename, run=True, continuous_dump=False, aggregate_stats=False):
profiler.set_config(profile_symbolic=True,
Expand All @@ -35,9 +36,8 @@ def enable_profiler(profile_filename, run=True, continuous_dump=False, aggregate
continuous_dump=continuous_dump,
aggregate_stats=aggregate_stats
)
print('profile file save to {}'.format(profile_filename))
if run is True:
profiler.set_state('run')
profiler.set_state('run')


def test_profiler():
Expand All @@ -59,9 +59,7 @@ def test_profiler():
a.copyto(executor.arg_dict['A'])
b.copyto(executor.arg_dict['B'])

print("execution begin")
for i in range(iter_num):
print("Iteration {}/{}".format(i + 1, iter_num))
if i == begin_profiling_iter:
t0 = time.clock()
profiler.set_state('run')
Expand All @@ -71,52 +69,50 @@ def test_profiler():
executor.forward()
c = executor.outputs[0]
c.wait_to_read()
print("execution end")

duration = t1 - t0
print('duration: {0}s'.format(duration))
print(' {0}ms/operator'.format(duration*1000/iter_num))
profiler.dump(True)
profiler.set_state('stop')


def test_profile_create_domain():
enable_profiler('test_profile_create_domain.json')
domain = profiler.Domain(name='PythonDomain')
print("Domain created: {}".format(str(domain)))
profiler.set_state('stop')


def test_profile_create_domain_dept():
profiler.set_config(profile_symbolic=True, filename='test_profile_create_domain_dept.json')
profiler.set_state('run')
domain = profiler.Domain(name='PythonDomain')
print("Domain created: {}".format(str(domain)))
profiler.dump_profile()
profiler.dump()
profiler.set_state('stop')


def test_profile_task():
def makeParams():
objects = tuple('foo' for _ in range(50))
template = ''.join('{%d}' % i for i in range(len(objects)))
return template, objects

def doLog():
def get_log():
template, objects = makeParams()
for _ in range(100000):
logging.info(template.format(*objects))
logs = []
for _ in range(10):
logs.append(template.format(*objects))
return logs

logging.basicConfig()
enable_profiler('test_profile_task.json')
python_domain = profiler.Domain('PythonDomain::test_profile_task')
task = profiler.Task(python_domain, "test_profile_task")
task.start()
start = time.time()
var = mx.nd.ones((1000, 500))
doLog()
assert len(get_log()) == 10
var.asnumpy()
stop = time.time()
task.stop()
print('run took: %.3f' % (stop - start))
assert stop > start
profiler.set_state('stop')


Expand All @@ -126,23 +122,24 @@ def makeParams():
template = ''.join('{%d}' % i for i in range(len(objects)))
return template, objects

def doLog():
def get_log():
template, objects = makeParams()
logs = []
for _ in range(100000):
logging.info(template.format(*objects))
logs.append(template.format(*objects))
return logs

logging.basicConfig()
enable_profiler('test_profile_frame.json')
python_domain = profiler.Domain('PythonDomain::test_profile_frame')
frame = profiler.Frame(python_domain, "test_profile_frame")
frame.start()
start = time.time()
var = mx.nd.ones((1000, 500))
doLog()
assert len(get_log()) == 100000
var.asnumpy()
stop = time.time()
frame.stop()
print('run took: %.3f' % (stop - start))
assert stop > start
profiler.set_state('stop')


Expand All @@ -152,23 +149,24 @@ def makeParams():
template = ''.join('{%d}' % i for i in range(len(objects)))
return template, objects

def doLog():
def get_log():
template, objects = makeParams()
logs = []
for _ in range(100000):
logging.info(template.format(*objects))
logs.append(template.format(*objects))
return logs

logging.basicConfig()
if do_enable_profiler is True:
enable_profiler('test_profile_event.json')
event = profiler.Event("test_profile_event")
event.start()
start = time.time()
var = mx.nd.ones((1000, 500))
doLog()
assert len(get_log()) == 100000
var.asnumpy()
stop = time.time()
event.stop()
print('run took: %.3f' % (stop - start))
assert stop > start
if do_enable_profiler is True:
profiler.set_state('stop')

Expand All @@ -191,15 +189,17 @@ def makeParams():
template = ''.join('{%d}' % i for i in range(len(objects)))
return template, objects

def doLog(counter):
def get_log(counter):
template, objects = makeParams()
range_size = 100000
logs = []
for i in range(range_size):
if i <= range_size / 2:
counter += 1
else:
counter -= 1
logging.info(template.format(*objects))
logs.append(template.format(*objects))
return logs

if do_enable_profiler is True:
enable_profiler('test_profile_counter.json')
Expand All @@ -208,9 +208,11 @@ def doLog(counter):
counter.set_value(5)
counter += 1
start = time.time()
doLog(counter)
log = get_log(counter)
assert len(log) == 100000
assert log[0].count('foo') == 50
stop = time.time()
print('run took: %.3f' % (stop - start))
assert stop > start
if do_enable_profiler is True:
profiler.set_state('stop')

Expand All @@ -222,7 +224,6 @@ def test_continuous_profile_and_instant_marker():
last_file_size = 0
for i in range(5):
profiler.Marker(python_domain, "StartIteration-" + str(i)).mark('process')
print("{}...".format(i))
test_profile_event(False)
test_profile_counter(False)
profiler.dump(False)
Expand All @@ -233,9 +234,9 @@ def test_continuous_profile_and_instant_marker():
profiler.dump(False)
debug_str = profiler.dumps()
assert(len(debug_str) > 0)
print(debug_str)
profiler.set_state('stop')


def test_aggregate_stats_valid_json_return():
file_name = 'test_aggregate_stats_json_return.json'
enable_profiler(file_name, True, True, True)
Expand All @@ -246,10 +247,12 @@ def test_aggregate_stats_valid_json_return():
assert 'Memory' in target_dict and 'Time' in target_dict and 'Unit' in target_dict
profiler.set_state('stop')


def test_aggregate_stats_sorting():
sort_by_options = {'total': 'Total', 'avg': 'Avg', 'min': 'Min',\
'max': 'Max', 'count': 'Count'}
ascending_options = [False, True]

def check_ascending(lst, asc):
assert(lst == sorted(lst, reverse = not asc))

Expand All @@ -274,6 +277,7 @@ def check_sorting(debug_str, sort_by, ascending):
check_sorting(debug_str, sb, asc)
profiler.set_state('stop')


def test_aggregate_duplication():
file_name = 'test_aggregate_duplication.json'
enable_profiler(profile_filename=file_name, run=True, continuous_dump=True, \
Expand All @@ -297,7 +301,8 @@ def test_aggregate_duplication():
assert target_dict['Time']['operator']['sqrt']['Count'] == 1
assert target_dict['Time']['operator']['_plus_scalar']['Count'] == 2
profiler.set_state('stop')



def test_custom_operator_profiling(seed=None, file_name=None):
class Sigmoid(mx.operator.CustomOp):
def forward(self, is_train, req, in_data, out_data, aux):
Expand Down Expand Up @@ -354,6 +359,7 @@ def create_operator(self, ctx, in_shapes, in_dtypes):
and 'MySigmoid::_zeros' in target_dict['Time']['Custom Operator']
profiler.set_state('stop')


def check_custom_operator_profiling_multiple_custom_ops_output(debug_str):
target_dict = json.loads(debug_str)
assert 'Time' in target_dict and 'Custom Operator' in target_dict['Time'] \
Expand All @@ -362,6 +368,7 @@ def check_custom_operator_profiling_multiple_custom_ops_output(debug_str):
and 'MyAdd1::_plus_scalar' in target_dict['Time']['Custom Operator'] \
and 'MyAdd2::_plus_scalar' in target_dict['Time']['Custom Operator']


def custom_operator_profiling_multiple_custom_ops(seed, mode, file_name):
class MyAdd(mx.operator.CustomOp):
def forward(self, is_train, req, in_data, out_data, aux):
Expand Down Expand Up @@ -427,15 +434,18 @@ def create_operator(self, ctx, shapes, dtypes):
debug_str = profiler.dumps(format='json')
check_custom_operator_profiling_multiple_custom_ops_output(debug_str)
profiler.set_state('stop')



def test_custom_operator_profiling_multiple_custom_ops_symbolic():
custom_operator_profiling_multiple_custom_ops(None, 'symbolic', \
'test_custom_operator_profiling_multiple_custom_ops_symbolic.json')


def test_custom_operator_profiling_multiple_custom_ops_imperative():
custom_operator_profiling_multiple_custom_ops(None, 'imperative', \
'test_custom_operator_profiling_multiple_custom_ops_imperative.json')


@unittest.skip("Flaky test https://github.com/apache/incubator-mxnet/issues/15406")
def test_custom_operator_profiling_naive_engine():
# run the three tests above using Naive Engine
Expand All @@ -449,6 +459,7 @@ def test_custom_operator_profiling_naive_engine():
{'MXNET_ENGINE_TYPE' : "NaiveEngine"}, 'symbolic', \
'test_custom_operator_profiling_multiple_custom_ops_symbolic_naive.json')


if __name__ == '__main__':
import nose
nose.runmodule()

0 comments on commit 3e3026f

Please sign in to comment.