-
Notifications
You must be signed in to change notification settings - Fork 6.8k
[BUGFIX] enable test_fc_subgraph.py::test_fc_eltwise #20393
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -71,13 +71,18 @@ class CustomNormalInit(mx.init.Initializer): | |
"""Initializes weights with random values sampled from a normal distribution | ||
with a custom mean and standard deviation of `sigma`. | ||
""" | ||
def __init__(self, mean=0, sigma=0.01): | ||
super(CustomNormalInit, self).__init__(mean=mean, sigma=sigma) | ||
def __init__(self, mean=0, sigma=0.01, bounded=False): | ||
super(CustomNormalInit, self).__init__(mean=mean, sigma=sigma, bounded=bounded) | ||
self.mean = mean | ||
self.sigma = sigma | ||
self.bounded = bounded | ||
|
||
def _init_weight(self, _, arr): | ||
mx.np.random.normal(self.mean, self.sigma, arr.shape, dtype=arr.dtype, out=arr) | ||
# import pdb | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. remove |
||
# pdb.set_trace() | ||
if self.bounded: | ||
mx.np.abs(arr, out=arr) | ||
|
||
|
||
def check_qsym_calibrated(qsym, out_type, name='conv'): | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -59,18 +59,19 @@ def forward(self, x): | |
@pytest.mark.parametrize('use_bias', [True, False]) | ||
@pytest.mark.parametrize('flatten', [True, False]) | ||
@pytest.mark.parametrize('alg', fc_post_ops_list) | ||
@pytest.mark.skip("Operator square, square_root, abs, exp cannot be found in numpy mode") | ||
def test_fc_eltwise(data_shape, use_bias, flatten, alg): | ||
# fc + eltwise fusion case | ||
class FCEltwise(nn.HybridBlock): | ||
def __init__(self, use_bias, flatten, alg, **kwargs): | ||
super(FCEltwise, self).__init__(**kwargs) | ||
self.fc = nn.Dense(units=64, use_bias=use_bias, flatten=flatten, | ||
weight_initializer=CustomNormalInit(mean=0.5, sigma=0.1) if alg == 'square_root' else None) | ||
weight_initializer=CustomNormalInit(mean=0.5, sigma=0.1, bounded=True) if alg == 'square_root' else None) | ||
#avoid calculating square root of negative values | ||
self.alg = alg | ||
|
||
def forward(self, x): | ||
if self.alg == 'square_root': | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please add comment why we need abs here There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is there any hidden reason besides the fact that square root can take only 0 and positive numbers as argument? It there is not I believe the comment is unnecessary. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Only that square_root returns nans for negative numbers which fail on assert equals because nan != nan |
||
x = abs(x) | ||
fc_out = self.fc(x) | ||
if self.alg in ['relu', 'sigmoid', 'log_sigmoid', 'mish', 'tanh', 'softrelu']: | ||
out = mx.npx.activation(fc_out, act_type=self.alg) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
If you change something I think it would be nice to put those checks in pairs e.g.
if (new_node.op() == Op::Get("abs") || new_node.op() == Op::Get("_npi_absolute")) {
ornew_node.op() == Op::Get("exp") || new_node.op() == Op::Get("_npi_exp")
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Like this?
I prefer it as it is
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yes, I was thinking about something similar to this. That was just a suggestion so if you do not like it, feel free to ignore it :)
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
ok thank you it was worth a try :)