Skip to content

Commit

Permalink
Revert "Fix broadcasting issue in FQ ref implementation"
Browse files Browse the repository at this point in the history
This reverts commit 68db1c4.
  • Loading branch information
vurusovs committed Apr 14, 2023
1 parent a926b79 commit 934793c
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,62 @@ namespace ngraph {
namespace runtime {
namespace reference {
namespace fake_quantize_details {
inline std::vector<size_t> calc_broadcast_index_offset(const std::vector<size_t>& memory_offsets,
const std::vector<size_t>& broadcast_shape) {
std::vector<size_t> broadcast_offsets(broadcast_shape.size(), 0);
for (int i = static_cast<int>(broadcast_shape.size()) - 2; i >= 0; --i) {
if (broadcast_shape[i] == 1) {
broadcast_offsets[i] = memory_offsets[i];
}
}
const auto not_one = [](size_t i) {
return i != 1;
};
if (std::any_of(broadcast_shape.begin(), broadcast_shape.end(), not_one) && broadcast_shape.back() == 1) {
broadcast_offsets[broadcast_offsets.size() - 1] = 1;
}
if (broadcast_shape.back() == 1) {
for (int i = static_cast<int>(broadcast_shape.size()) - 1; i >= 0; --i) {
if (broadcast_shape[i] != 1) {
broadcast_offsets[i] = memory_offsets[i] - 1;
break;
}
}
}
return broadcast_offsets;
}

inline size_t calc_full_broadcast_offset(const std::vector<size_t>& current_dims, const std::vector<size_t>& offsets) {
return std::inner_product(begin(current_dims), end(current_dims), begin(offsets), uint64_t(0));
}

inline Shape align_shape_sizes(const Shape& shape, const Shape& target_shape, const op::AutoBroadcastSpec& broadcast) {
Shape s;
switch (broadcast.m_type) {
case op::AutoBroadcastType::NONE: {
s = shape;
break;
}
case op::AutoBroadcastType::NUMPY: {
s = Shape(target_shape.size(), 1);
std::copy(begin(shape), end(shape), prev(end(s), shape.size()));
break;
}
case op::AutoBroadcastType::PDPD: {
const size_t axis =
broadcast.m_axis == -1 ? target_shape.size() - shape.size() : static_cast<size_t>(broadcast.m_axis);

s = Shape(target_shape.size(), 1);
const auto axis_to_copy = target_shape.size() - axis;
const auto b = begin(shape);
const auto e = b + axis_to_copy; // from e to end(shape) should be only ones
std::copy(b, e, next(begin(s), axis));
break;
}
}
return s;
}

inline void increment_current_dim(std::vector<size_t>& current_dims, const std::vector<size_t>& shape) {
size_t incremented_dim_number = current_dims.size();
while (incremented_dim_number-- > 0) {
Expand Down Expand Up @@ -54,7 +105,9 @@ class QuantizationBound {
bound = Bound::aligned;
} else {
bound = Bound::broadcast;
row_strides = row_major_strides(arg_shape);
const auto arg_memory_offsets = row_major_strides(arg_shape);
const auto unsqueezed_bound_shape = align_shape_sizes(bound_shape, arg_shape, broadcast_spec);
row_strides = calc_broadcast_index_offset(arg_memory_offsets, unsqueezed_bound_shape);
}
}
T get_value(const std::vector<size_t>& current_dim, size_t idx) const {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,9 @@ std::vector<std::string> disabledTestPatterns() {
R"(smoke_dynamic_BatchSizeOne/RNNSequenceCPUTest.*IS=\(\[1\.\?\.10\]_\[1\.1\.10\]_\[\?\]_\)_TS=\{\(1\.2\.10\)_\(1\.1\.10\)_\(1\)\}_\{\(1\.4\.10\)_\(1\.1\.10\)_\(1\)\}_\{\(1\.8\.10\)_\(1\.1\.10\)_\(1\)\}_seqMode=PURE_SEQ_activations=\(relu\)_clip=0_direction=forward_netPrec=f32__inFmts=ncw\.ntc_outFmts=ncw\.ncw_primitive=ref_any)", // NOLINT
// 98151. Not valid sorting for slices in reference.
R"(.*UniqueLayerTestCPU.*axis.*True.*)",
// Issue: 104402. Incorrect broadcasting in FQ reference implentation
R"(.*smoke_FakeQuantizeLayerCPUTest_Decompos.*IS=\[4\.5\.6\.6\]_TS=\(\(4\.5\.6\.6\)\)_RS=\(\(1\.1\.6\.6\)\)_\(\(1\.1\.6\.6\)\)_\(\(1\.5\.6\.1\)\)_\(\(1\.5\.1\.6\)\).*)",
R"(.*smoke_FakeQuantizeLayerCPUTest_Decompos.*IS=\[4\.5\.6\.6\]_TS=\(\(4\.5\.6\.6\)\)_RS=\(\(1\.5\.6\.1\)\)_\(\(1\.5\.6\.1\)\)_\(\(1\.5\.6\.1\)\)_\(\(1\.5\.1\.6\)\).*)",
};

#define FIX_62820 0
Expand Down

0 comments on commit 934793c

Please sign in to comment.