Skip to content

Commit

Permalink
[CPU][ARM64] Added LogicalNot JIT emitter (openvinotoolkit#24421)
Browse files Browse the repository at this point in the history
Added support and unit test for Greater jit emitter
  • Loading branch information
jvr0123 committed Jun 16, 2024
1 parent 78fcf9d commit 48ccbd1
Show file tree
Hide file tree
Showing 9 changed files with 93 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,53 @@ std::set<std::vector<element::Type>> jit_divide_emitter::get_supported_precision
return {{element::f32, element::f32}};
}

/// GREATER ///
jit_greater_emitter::jit_greater_emitter(dnnl::impl::cpu::aarch64::jit_generator *host,
dnnl::impl::cpu::aarch64::cpu_isa_t host_isa,
const ov::element::Type exec_prc) : jit_emitter(host, host_isa, exec_prc) {prepare_table();}

jit_greater_emitter::jit_greater_emitter(dnnl::impl::cpu::aarch64::jit_generator *host,
dnnl::impl::cpu::aarch64::cpu_isa_t host_isa,
const std::shared_ptr<ov::Node>& node) : jit_emitter(host, host_isa, node, get_arithmetic_binary_exec_precision(node)) {prepare_table();}

size_t jit_greater_emitter::get_inputs_count() const {return 2;}

size_t jit_greater_emitter::get_aux_vecs_count() const {return 1;}

size_t jit_greater_emitter::get_aux_gprs_count() const {return 1;}

std::set<std::vector<element::Type>> jit_greater_emitter::get_supported_precisions(
const std::shared_ptr<ov::Node>& node) {return {{element::f32, element::f32}};}

void jit_greater_emitter::emit_impl(const std::vector<size_t> &in_vec_idxs, const std::vector<size_t> &out_vec_idxs) const {
if (host_isa_ == dnnl::impl::cpu::aarch64::asimd) {
emit_isa<dnnl::impl::cpu::aarch64::asimd>(in_vec_idxs, out_vec_idxs);
} else {
OV_CPU_JIT_EMITTER_THROW("Can't create jit eltwise kernel");
}
}

template <dnnl::impl::cpu::aarch64::cpu_isa_t isa>
void jit_greater_emitter::emit_isa(const std::vector<size_t> &in_vec_idxs, const std::vector<size_t> &out_vec_idxs) const {
OV_CPU_JIT_EMITTER_ASSERT(exec_prc_ == ov::element::f32, "unsupported precision: " + exec_prc_.to_string());

using TReg = typename dnnl::impl::cpu::aarch64::cpu_isa_traits<isa>::TReg;
TReg src1 = TReg(in_vec_idxs[0]);
TReg src2 = TReg(in_vec_idxs[1]);
TReg dst = TReg(out_vec_idxs[0]);
TReg aux = TReg(aux_vec_idxs[0]);

h->cmgt(dst.s, src1.s, src2.s);

h->ld1r(aux.s, table_val2("one"));
h->and_(dst.b16, dst.b16, aux.b16);
}

void jit_greater_emitter::register_table_entries() {
push_arg_entry_of("one", 0x3f800000, true);
}


/// EQUAL ///
jit_equal_emitter::jit_equal_emitter(dnnl::impl::cpu::aarch64::jit_generator *host,
dnnl::impl::cpu::aarch64::cpu_isa_t host_isa,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,33 @@ class jit_equal_emitter : public jit_emitter {
void register_table_entries() override;
};

class jit_greater_emitter : public jit_emitter {
public:
jit_greater_emitter(dnnl::impl::cpu::aarch64::jit_generator *host,
dnnl::impl::cpu::aarch64::cpu_isa_t host_isa,
const ov::element::Type exec_prc = ov::element::f32);

jit_greater_emitter(dnnl::impl::cpu::aarch64::jit_generator *host,
dnnl::impl::cpu::aarch64::cpu_isa_t host_isa,
const std::shared_ptr<ov::Node>& node);

size_t get_inputs_count() const override;

size_t get_aux_vecs_count() const override;

size_t get_aux_gprs_count() const override;

void register_table_entries() override;

static std::set<std::vector<element::Type>> get_supported_precisions(
const std::shared_ptr<ov::Node>& node = nullptr);
private:
void emit_impl(const std::vector<size_t> &in_vec_idxs, const std::vector<size_t> &out_vec_idxs) const override;

template <dnnl::impl::cpu::aarch64::cpu_isa_t isa>
void emit_isa(const std::vector<size_t> &in_vec_idxs, const std::vector<size_t> &out_vec_idxs) const;
};

class jit_exp_emitter : public jit_emitter {
public:
jit_exp_emitter(dnnl::impl::cpu::aarch64::jit_generator* host,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ bool JitEltwiseExecutor::isSupported(
Algorithm::EltwiseFloor,
Algorithm::EltwiseGeluErf,
Algorithm::EltwiseGeluTanh,
Algorithm::EltwiseGreater,
Algorithm::EltwiseHswish,
Algorithm::EltwiseIsInf,
Algorithm::EltwiseMaximum,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -595,6 +595,15 @@ struct EltwiseEmitter<jit_elu_emitter> {
}
};

template<>
struct EltwiseEmitter<jit_greater_emitter> {
void operator()(EltwiseEmitterContext& ctx) {
ctx.emitter = std::make_shared<jit_greater_emitter>(ctx.host,
ctx.host_isa,
ctx.exec_prc);
}
};

template<>
struct EltwiseEmitter<jit_clamp_emitter> {
void operator()(EltwiseEmitterContext& ctx) {
Expand Down Expand Up @@ -655,6 +664,7 @@ std::shared_ptr<jit_emitter> jit_uni_eltwise_generic<isa>::create_eltwise_emitte
OV_CASE(Algorithm::EltwiseMish, ov::intel_cpu::aarch64::jit_mish_emitter),
OV_CASE(Algorithm::EltwiseGeluErf, ov::intel_cpu::aarch64::jit_gelu_erf_emitter),
OV_CASE(Algorithm::EltwiseGeluTanh, ov::intel_cpu::aarch64::jit_gelu_tanh_emitter),
OV_CASE(Algorithm::EltwiseGreater, ov::intel_cpu::aarch64::jit_greater_emitter),
OV_CASE(Algorithm::EltwiseMulAdd, ov::intel_cpu::aarch64::jit_mul_add_emitter),
OV_CASE(Algorithm::EltwiseMod, ov::intel_cpu::aarch64::jit_mod_emitter),
OV_CASE(Algorithm::EltwiseMultiply, ov::intel_cpu::aarch64::jit_multiply_emitter),
Expand Down Expand Up @@ -821,6 +831,7 @@ std::set<std::vector<element::Type>> eltwise_precision_helper::get_supported_pre
OV_CASE(Algorithm::EltwiseFloor, jit_floor_emitter),
OV_CASE(Algorithm::EltwiseGeluErf, jit_gelu_erf_emitter),
OV_CASE(Algorithm::EltwiseGeluTanh, jit_gelu_tanh_emitter),
OV_CASE(Algorithm::EltwiseGreater, jit_greater_emitter),
OV_CASE(Algorithm::EltwiseHswish, jit_hswish_emitter),
OV_CASE(Algorithm::EltwiseIsInf, jit_is_inf_emitter),
OV_CASE(Algorithm::EltwiseMaximum, jit_maximum_emitter),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ std::string ActivationLayerCPUTest::getPrimitiveType(const utils::ActivationType
(activation_type == utils::ActivationTypes::Mish) ||
(activation_type == utils::ActivationTypes::GeluErf) ||
(activation_type == utils::ActivationTypes::GeluTanh) ||
(activation_type == utils::ActivationTypes::Greater) ||
(activation_type == utils::ActivationTypes::Relu) ||
(activation_type == utils::ActivationTypes::Sigmoid) ||
(activation_type == utils::ActivationTypes::Swish) ||
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes
{ActivationTypes::RoundHalfAwayFromZero, {}},
{ActivationTypes::GeluErf, {}},
{ActivationTypes::GeluTanh, {}},
{ActivationTypes::Greater, {}},
{ActivationTypes::Swish, {{0.4f}}},
{ActivationTypes::IsInf, {}}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ static std::map<ActivationTypes, std::string> activationNames = {
{ActivationTypes::RoundHalfAwayFromZero, "RoundHalfAwayFromZero"},
{ActivationTypes::GeluErf, "GeluErf"},
{ActivationTypes::GeluTanh, "GeluTanh"},
{ActivationTypes::Greater, "Greater"},
{ActivationTypes::SoftSign, "SoftSign"},
{ActivationTypes::IsInf, "IsInf"},
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ enum ActivationTypes {
RoundHalfAwayFromZero,
GeluErf,
GeluTanh,
Greater,
SoftSign,
IsInf
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
#include "openvino/op/swish.hpp"
#include "openvino/op/tan.hpp"
#include "openvino/op/tanh.hpp"
#include "openvino/op/greater.hpp"

namespace ov {
namespace test {
Expand Down Expand Up @@ -158,6 +159,8 @@ std::shared_ptr<ov::Node> make_activation(const ov::ParameterVector& parameters,
switch (activation_type) {
case ov::test::utils::ActivationTypes::LeakyRelu:
return std::make_shared<ov::op::v0::PRelu>(parameters[0], parameters[1]);
case ov::test::utils::ActivationTypes::Greater:
return std::make_shared<ov::op::v1::Greater>(parameters[0], parameters[1]);
case ov::test::utils::ActivationTypes::HardSigmoid:
return std::make_shared<ov::op::v0::HardSigmoid>(parameters[0], parameters[1], parameters[2]);
case ov::test::utils::ActivationTypes::Selu:
Expand Down

0 comments on commit 48ccbd1

Please sign in to comment.