Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

[BUGFIX] enable test_fc_subgraph.py::test_fc_eltwise #20393

Merged
merged 2 commits into from
Jun 30, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 8 additions & 4 deletions src/operator/subgraph/mkldnn/mkldnn_fc-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,13 @@ namespace op {
static inline bool SupportMKLDNNFCEltwiseFusion(const std::string op_name) {
if (op_name == "Activation" ||
op_name == "square" ||
op_name == "_npi_square" ||
op_name == "sqrt" ||
op_name == "_npi_sqrt" ||
op_name == "exp" ||
op_name == "_npi_exp" ||
op_name == "abs" ||
op_name == "_npi_absolute" ||
op_name == "clip" ||
op_name == "LeakyReLU") {
return true;
Expand All @@ -45,13 +49,13 @@ static inline bool SupportMKLDNNFCEltwiseFusion(const std::string op_name) {
}

static inline mkldnn::algorithm GetMKLDNNEltwiseAlgo(const std::string op_name) {
if (op_name == "square")
if (op_name == "square" || op_name == "_npi_square")
return mkldnn::algorithm::eltwise_square;
else if (op_name == "sqrt")
else if (op_name == "sqrt" || op_name == "_npi_sqrt")
return mkldnn::algorithm::eltwise_sqrt;
else if (op_name == "exp")
else if (op_name == "exp" || op_name == "_npi_exp")
return mkldnn::algorithm::eltwise_exp;
else if (op_name == "abs")
else if (op_name == "abs" || op_name == "_npi_absolute")
return mkldnn::algorithm::eltwise_abs;
else
LOG(FATAL) << "Unsupported eltwise fusion op: " << op_name;
Expand Down
10 changes: 7 additions & 3 deletions src/operator/subgraph/mkldnn/mkldnn_fc_property.h
Original file line number Diff line number Diff line change
Expand Up @@ -113,13 +113,17 @@ class SgMKLDNNFCSelector : public SubgraphSelector {
}
}
if (!quantized_ && (new_node.op() == Op::Get("square") ||
new_node.op() == Op::Get("sqrt") ||
new_node.op() == Op::Get("exp"))) {
new_node.op() == Op::Get("_npi_square") ||
new_node.op() == Op::Get("sqrt") ||
new_node.op() == Op::Get("_npi_sqrt") ||
new_node.op() == Op::Get("exp") ||
new_node.op() == Op::Get("_npi_exp"))) {
matched_list_.push_back(&new_node);
status_ = kSuccess;
return true;
}
if (new_node.op() == Op::Get("abs")) {
if (new_node.op() == Op::Get("abs") ||
new_node.op() == Op::Get("_npi_absolute")) {
Copy link
Contributor

@bartekkuncer bartekkuncer Jun 28, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you change something I think it would be nice to put those checks in pairs e.g. if (new_node.op() == Op::Get("abs") || new_node.op() == Op::Get("_npi_absolute")) { or new_node.op() == Op::Get("exp") || new_node.op() == Op::Get("_npi_exp").

Copy link
Contributor Author

@sfraczek sfraczek Jun 28, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Like this?
image
I prefer it as it is

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I was thinking about something similar to this. That was just a suggestion so if you do not like it, feel free to ignore it :)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok thank you it was worth a try :)

matched_list_.push_back(&new_node);
status_ = kSuccess;
return true;
Expand Down
7 changes: 5 additions & 2 deletions tests/python/mkl/subgraphs/subgraph_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,13 +71,16 @@ class CustomNormalInit(mx.init.Initializer):
"""Initializes weights with random values sampled from a normal distribution
with a custom mean and standard deviation of `sigma`.
"""
def __init__(self, mean=0, sigma=0.01):
super(CustomNormalInit, self).__init__(mean=mean, sigma=sigma)
def __init__(self, mean=0, sigma=0.01, bounded=False):
super(CustomNormalInit, self).__init__(mean=mean, sigma=sigma, bounded=bounded)
self.mean = mean
self.sigma = sigma
self.bounded = bounded

def _init_weight(self, _, arr):
mx.np.random.normal(self.mean, self.sigma, arr.shape, dtype=arr.dtype, out=arr)
if self.bounded:
mx.np.abs(arr, out=arr)


def check_qsym_calibrated(qsym, out_type, name='conv'):
Expand Down
5 changes: 3 additions & 2 deletions tests/python/mkl/subgraphs/test_fc_subgraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,18 +59,19 @@ def forward(self, x):
@pytest.mark.parametrize('use_bias', [True, False])
@pytest.mark.parametrize('flatten', [True, False])
@pytest.mark.parametrize('alg', fc_post_ops_list)
@pytest.mark.skip("Operator square, square_root, abs, exp cannot be found in numpy mode")
def test_fc_eltwise(data_shape, use_bias, flatten, alg):
# fc + eltwise fusion case
class FCEltwise(nn.HybridBlock):
def __init__(self, use_bias, flatten, alg, **kwargs):
super(FCEltwise, self).__init__(**kwargs)
self.fc = nn.Dense(units=64, use_bias=use_bias, flatten=flatten,
weight_initializer=CustomNormalInit(mean=0.5, sigma=0.1) if alg == 'square_root' else None)
weight_initializer=CustomNormalInit(mean=0.5, sigma=0.1, bounded=True) if alg == 'square_root' else None)
#avoid calculating square root of negative values
self.alg = alg

def forward(self, x):
if self.alg == 'square_root':
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please add comment why we need abs here

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there any hidden reason besides the fact that square root can take only 0 and positive numbers as argument? It there is not I believe the comment is unnecessary.

Copy link
Contributor Author

@sfraczek sfraczek Jun 28, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Only that square_root returns nans for negative numbers which fail on assert equals because nan != nan

x = abs(x)
fc_out = self.fc(x)
if self.alg in ['relu', 'sigmoid', 'log_sigmoid', 'mish', 'tanh', 'softrelu']:
out = mx.npx.activation(fc_out, act_type=self.alg)
Expand Down