Skip to content

Commit

Permalink
[ONNX] Added BiasAdd from com.microsoft domain (#28713)
Browse files Browse the repository at this point in the history
Details:
Microsoft Contrib Operator "BiasAdd" for ONNX RT

Tickets:
N/A
  • Loading branch information
vatsalashanubhag authored Feb 18, 2025
1 parent e9f13c9 commit 64e76e8
Show file tree
Hide file tree
Showing 3 changed files with 187 additions and 0 deletions.
80 changes: 80 additions & 0 deletions src/frontends/onnx/frontend/src/op/com.microsoft/bias_add.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
// Copyright (C) 2018-2025 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include "core/operator_set.hpp"
#include "exceptions.hpp"
#include "openvino/frontend/exception.hpp"
#include "openvino/op/add.hpp"
#include "openvino/op/constant.hpp"
#include "utils/common.hpp"

using namespace ov::op;

namespace ov {
namespace frontend {
namespace onnx {
namespace com_microsoft {
namespace opset_1 {

ov::OutputVector bias_add(const ov::frontend::onnx::Node& node) {
// Documentation: BiasAdd computes Y = X + bias + skip
// https://github.com/microsoft/onnxruntime/blob/main/docs/ContribOperators.md#commicrosoftbiasadd

common::default_op_checks(node, 3);

const auto inputs = node.get_ov_inputs();
const auto& X = inputs[0];
const auto& bias = inputs[1];
const auto& skip = inputs[2];

auto element_type = X.get_element_type();
CHECK_VALID_NODE(node,
element_type == ov::element::f16 || element_type == ov::element::f32,
"Unsupported input data type for X, expected FP16 or FP32 but got: ",
element_type);

const auto& X_shape = X.get_partial_shape();
const auto& bias_shape = bias.get_partial_shape();
const auto& skip_shape = skip.get_partial_shape();

CHECK_VALID_NODE(node,
X_shape.rank().is_static() && X_shape.rank().get_length() == 3,
"Input X must have rank 3 (N, S, C), but got: ",
X_shape);
CHECK_VALID_NODE(node,
skip_shape.rank().is_static() && skip_shape.rank().get_length() == 3,
"Input skip must have rank 3 (N, S, C), but got: ",
skip_shape);

CHECK_VALID_NODE(node,
X_shape.compatible(skip_shape),
"Input X and skip must have the same shape, but got: X=",
X_shape,
", skip=",
skip_shape);

CHECK_VALID_NODE(node,
bias_shape.rank().is_static() && bias_shape.rank().get_length() == 1,
"Input bias must have rank 1 (C), but got: ",
bias_shape);

CHECK_VALID_NODE(node,
X_shape[2].compatible(bias_shape[0]),
"Input bias shape must match the channel dimension (C) of X and skip, but got: bias=",
bias_shape,
", X=",
X_shape);

auto X_plus_bias = std::make_shared<v1::Add>(X, bias);

return {std::make_shared<v1::Add>(X_plus_bias, skip)};
}

ONNX_OP("BiasAdd", OPSET_SINCE(1), com_microsoft::opset_1::bias_add, MICROSOFT_DOMAIN);

} // namespace opset_1
} // namespace com_microsoft
} // namespace onnx
} // namespace frontend
} // namespace ov
86 changes: 86 additions & 0 deletions src/frontends/onnx/tests/models/com.microsoft/bias_add.prototxt
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
ir_version: 7
producer_name: "OpenVINO ONNX Frontend"
graph {
node {
input: "X"
input: "bias"
input: "skip"
output: "Y"
op_type: "BiasAdd"
domain: "com.microsoft"
}
name: "test_bias_add_float"
input {
name: "X"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 4
}
dim {
dim_value: 3
}
}
}
}
}
input {
name: "bias"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 3
}
}
}
}
}
input {
name: "skip"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 4
}
dim {
dim_value: 3
}
}
}
}
}
output {
name: "Y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 2
}
dim {
dim_value: 4
}
dim {
dim_value: 3
}
}
}
}
}
}
opset_import {
version: 1
}
21 changes: 21 additions & 0 deletions src/frontends/onnx/tests/onnx_import_com_microsoft.in.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1682,3 +1682,24 @@ OPENVINO_TEST(${BACKEND_NAME}, onnx_com_microsoft_qlinear_mul) {
test_case.add_expected_output<int8_t>(Shape{2, 2}, expected_output);
test_case.run();
}

OPENVINO_TEST(${BACKEND_NAME}, onnx_com_microsoft_bias_add) {
const auto model = convert_model("com.microsoft/bias_add.onnx");
auto test_case = ov::test::TestCase(model, s_device);

const std::vector<float> X = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24};
const std::vector<float> bias = {2, 0, -1};
const std::vector<float> skip = {0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6, 7, 7, 7};

const std::vector<float> expected_output = {3, 2, 2, 7, 6, 6, 11, 10, 10, 15, 14, 14,
19, 18, 18, 23, 22, 22, 27, 26, 26, 31, 30, 30};

test_case.add_input<float>(Shape{2, 4, 3}, X);
test_case.add_input<float>(Shape{3}, bias);
test_case.add_input<float>(Shape{2, 4, 3}, skip);

test_case.add_expected_output<float>(Shape{2, 4, 3}, expected_output);

test_case.run();
}

0 comments on commit 64e76e8

Please sign in to comment.