Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Infer dtype in SymbolBlock import from input symbol #12412

Merged
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
71 changes: 65 additions & 6 deletions python/mxnet/gluon/block.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import re
from collections import OrderedDict

from ..base import mx_real_t
from .. import symbol, ndarray, initializer
from ..symbol import Symbol
from ..ndarray import NDArray
Expand Down Expand Up @@ -1053,13 +1054,20 @@ def __init__(self, outputs, inputs, params=None):
"SymbolBlock doesn't support Parameter '%s' because its storage " \
"type is 'row_sparse'." % j.name

for i in out.list_arguments():
if i not in input_names:
self.params.get(i, allow_deferred_init=True)
# Infer type of parameters. Without this, every parameter will be created with
# default type i.e., fp32
arg_params = out.list_arguments()
aux_params = out.list_auxiliary_states()

for i in out.list_auxiliary_states():
if i not in input_names:
self.params.get(i, grad_req='null', allow_deferred_init=True)
arg_types, aux_types = _infer_param_types(inputs[0], out, arg_params, aux_params)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this does not handle Grouped Symbol because you are only slicing [0] from the symbol.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

also, I think the type inference should occur in cast as well, otherwise it's buggy when user is trying to cast dtype of a cascaded network with symbolBlock inside.


for i, arg in enumerate(arg_params):
if arg not in input_names:
self.params.get(arg, allow_deferred_init=True, dtype=arg_types[i])

for i, aux in enumerate(aux_params):
if aux not in input_names:
self.params.get(aux, grad_req='null', allow_deferred_init=True, dtype=aux_types[i])

self._cached_graph = syms, out
len_prefix = len(_common_prefix(list(self._params.keys())))
Expand All @@ -1086,3 +1094,54 @@ def _clear_cached_op(self):

def hybrid_forward(self, F, x, *args, **kwargs):
raise NotImplementedError

def _infer_param_types(in_params, out_params, arg_params, aux_params, default_dtype=mx_real_t):
"""Utility function that helps in inferring DType of args and auxs params
from given input param.

Parameters
----------
in_params: Symbol
Input symbol variable.
out_params: Symbol
Output symbol variable.
arg_params: List of Str
List of names of argument parametrs.
aux_params: List of Str
List of names of auxiliary parameters.
default_dtype: numpy.dtype or str, default 'float32'
Default data type for arg_params and aux_params, if unable to infer the type.

Returns
-------
arg_types: List of numpy.dtype
List of arg_params type. Order is same as arg_params.
Defaults to 'float32', if unable to infer type.
aux_types: List of numpy.dtype
List of aux_params type. Order is same as aux_params.
Defaults to 'float32', if unable to infer type.
"""
arg_types = None
aux_types = None

# Get Input symbol details. This will be used to infer types of
# other parameters.
input_sym_name = in_params.name
input_sym_arg_type = in_params.infer_type()[0]

# Try to infer types of other parameters.
if input_sym_arg_type and len(input_sym_arg_type) > 0:
params = {input_sym_name:input_sym_arg_type[0]}
arg_types, _, aux_types = out_params.infer_type(**params)

if arg_types is None or len(arg_types) != len(arg_params):
arg_types = []
for _ in arg_params:
arg_types.append(default_dtype)

if aux_types is None or len(aux_types) != len(aux_params):
aux_types = []
for _ in aux_params:
aux_types.append(default_dtype)

return (arg_types, aux_types)
2 changes: 2 additions & 0 deletions python/mxnet/gluon/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -727,6 +727,8 @@ def get(self, name, **kwargs):
if matched:
param._shape = tuple(inferred_shape)
continue
elif k == 'dtype' and np.dtype(v) == np.dtype(existing):
continue

assert v is None or v == existing, \
"Cannot retrieve Parameter '%s' because desired attribute " \
Expand Down
26 changes: 26 additions & 0 deletions tests/python/gpu/test_gluon_gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from __future__ import print_function
import sys
import os
import tempfile
import time
import multiprocessing as mp
import unittest
Expand Down Expand Up @@ -202,6 +203,31 @@ def get_num_devices():
_check_batchnorm_result(mx.nd.random.uniform(shape=(4, 1, 4, 4)),
num_devices=ndev, cuda=True)

@with_seed()
def test_symbol_block_fp16():
# Test case to verify if initializing the SymbolBlock from a model with params
# other than fp32 param dtype.

# 1. Load a resnet model, cast it to fp16 and export
tmp = tempfile.mkdtemp()
tmpfile = os.path.join(tmp, 'resnet34_fp16')
ctx = mx.gpu(0)

net_fp32 = mx.gluon.model_zoo.vision.resnet34_v2(pretrained=True, ctx=ctx, root=tmp)
net_fp32.cast('float16')
net_fp32.hybridize()
data = mx.nd.zeros((1,3,224,224), dtype='float16', ctx=ctx)
net_fp32.forward(data)
net_fp32.export(tmpfile, 0)

# 2. Load the saved model and verify if all the params are loaded correctly.
# and choose one of the param to verify the type if fp16.
sm = mx.sym.load(tmpfile + '-symbol.json')
inputs = mx.sym.var('data', dtype='float16')
net_fp64 = mx.gluon.SymbolBlock(sm, inputs)
net_fp64.collect_params().load(tmpfile + '-0000.params', ctx=ctx)
assert (net_fp64.params['resnetv20_stage1_conv2_weight'].dtype is np.float16)

if __name__ == '__main__':
import nose
nose.runmodule()
26 changes: 26 additions & 0 deletions tests/python/unittest/test_gluon.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@
# specific language governing permissions and limitations
# under the License.

import os
import tempfile

import mxnet as mx
from mxnet import gluon
from mxnet.gluon import nn
Expand Down Expand Up @@ -336,6 +339,29 @@ def hybrid_forward(self, F, x):
net.hybridize()
assert isinstance(net(mx.nd.zeros((16, 10))), mx.nd.NDArray)

# Test case to verify if initializing the SymbolBlock from a model with params
# other than fp32 param dtype.

# 1. Load a resnet model, cast it to fp64 and export
tmp = tempfile.mkdtemp()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should we delete the temporary directory when done with it?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Temp gets automatically cleaned up.

Copy link
Contributor

@stu1130 stu1130 Sep 7, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

According to python docs, it seems that it should be deleted by the user. The user of mkdtemp() is responsible for deleting the temporary directory and its contents when done with it.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Right. Thanks. I meant, temp gets automatically cleaned up after all the tests (test session).

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@marcoabreu - Can you please confirm if my understanding is correct? If not, I will add code to delete the temp directory created in the tests. Also, I see similar behavior in all other tests, where it creates temp dir, but, assumes, it will be cleaned up the system.

tmpfile = os.path.join(tmp, 'resnet34_fp64')
ctx = mx.cpu(0)

net_fp32 = mx.gluon.model_zoo.vision.resnet34_v2(pretrained=True, ctx=ctx, root=tmp)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is the model name in model zoo going to be maintained? If there is any change in the name, it would break this unit test. Not sure if we want to keep this dependency

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

resnet34_v2 is a public function exposed through model_zoo.vision module. I think it is ok because we are not using string based selection of the model.

net_fp32.cast('float64')
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shall we add another test that casts to float16?
The original issue reported import from float16 failing, and it might be appropriate to cover it as well.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added a test for fp16.

net_fp32.hybridize()
data = mx.nd.zeros((1,3,224,224), dtype='float64', ctx=ctx)
net_fp32.forward(data)
net_fp32.export(tmpfile, 0)

# 2. Load the saved model and verify if all the params are loaded correctly.
# and choose one of the param to verify the type if fp64.
sm = mx.sym.load(tmpfile + '-symbol.json')
inputs = mx.sym.var('data', dtype='float64')
net_fp64 = mx.gluon.SymbolBlock(sm, inputs)
net_fp64.collect_params().load(tmpfile + '-0000.params', ctx=ctx)
assert (net_fp64.params['resnetv20_stage1_conv2_weight'].dtype is np.float64)

@with_seed()
@raises(AssertionError)
def test_sparse_symbol_block():
Expand Down