Skip to content

Commit

Permalink
Rename function and add tests for SoftmaxCrossEntropyLoss
Browse files Browse the repository at this point in the history
Renamed `impl_softmax_cross_entropy` to `onnx_softmax_crossentropy_loss` in `softmax_crossentropy_loss.cpp` and updated all function calls accordingly. Updated `ONNX_OP` macro for `SoftmaxCrossEntropyLoss` in `opset_13` to use `OPSET_SINCE(13)`.

Added new test cases in `onnx_import.in.cpp` for various scenarios of the `SoftmaxCrossEntropyLoss` operation:
- `onnx_model_softmax_crossentropy_loss_mean_weight_ii`
- `onnx_model_softmax_crossentropy_loss_sum`
- `onnx_model_softmax_crossentropy_loss_none`
- `onnx_model_softmax_crossentropy_loss_higher_dim`

Included new ONNX model files in `prototxt` format for the new test cases:
- `softmax_crossentropy_loss_higher_dim.prototxt`
- `softmax_crossentropy_loss_mean_weight_ii.prototxt`
- `softmax_crossentropy_loss_none.prototxt`
- `softmax_crossentropy_loss_sum.prototxt`

These files define the graph structure, input/output tensor shapes, and attributes for the `SoftmaxCrossEntropyLoss` operation with different reduction methods and configurations.
  • Loading branch information
AJThePro99 committed Feb 11, 2025
1 parent 3f9521d commit ac2b473
Show file tree
Hide file tree
Showing 6 changed files with 280 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ namespace ov {
namespace frontend {
namespace onnx {
namespace {
OutputVector impl_softmax_cross_entropy(const Node& node, int64_t axis_default) {
OutputVector onnx_softmax_crossentropy_loss(const Node& node, int64_t axis_default) {
const auto inputs = node.get_ov_inputs();

const auto scores = inputs[0];
Expand Down Expand Up @@ -96,15 +96,16 @@ OutputVector impl_softmax_cross_entropy(const Node& node, int64_t axis_default)
namespace ai_onnx {
namespace opset_12 {
OutputVector softmax_cross_entropy_loss(const Node& node) {
return impl_softmax_cross_entropy(node, 1);
return onnx_softmax_crossentropy_loss(node, 1);
}
ONNX_OP("SoftmaxCrossEntropyLoss", OPSET_IN(12), ai_onnx::opset_12::softmax_cross_entropy_loss);
} // namespace opset_12
namespace opset_13 {
OutputVector softmax_cross_entropy_loss(const Node& node) {
return impl_softmax_cross_entropy(node, 1);
// only difference is that opset_13 supports bfloat16 datatype
return onnx_softmax_crossentropy_loss(node, 1);
}
ONNX_OP("SoftmaxCrossEntropyLoss", OPSET_IN(13), ai_onnx::opset_13::softmax_cross_entropy_loss);
ONNX_OP("SoftmaxCrossEntropyLoss", OPSET_SINCE(13), ai_onnx::opset_13::softmax_cross_entropy_loss);
} // namespace opset_13
} // namespace ai_onnx
} // namespace onnx
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
ir_version: 7
producer_name: "OpenVINO ONNX Frontend"
graph {
node {
input: "x"
input: "y"
output: "z"
op_type: "SoftmaxCrossEntropyLoss"
attribute {
name: "reduction"
s: "mean"
type: STRING
}
}
name: "test_sce_higher_dim"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim { dim_value: 2 }
dim { dim_value: 3 }
dim { dim_value: 2 }
dim { dim_value: 2 }
}
}
}
}
input {
name: "y"
type {
tensor_type {
elem_type: 7
shape {
dim { dim_value: 2 }
dim { dim_value: 2 }
dim { dim_value: 2 }
}
}
}
}
output {
name: "z"
type {
tensor_type {
elem_type: 1
shape {}
}
}
}
}
opset_import {
version: 13
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
ir_version: 7
producer_name: "OpenVINO ONNX Frontend"
graph {
node {
input: "x"
input: "y"
input: "w"
output: "z"
op_type: "SoftmaxCrossEntropyLoss"
attribute {
name: "reduction"
s: "mean"
type: STRING
}
attribute {
name: "ignore_index"
i: -1
type: INT
}
}
name: "test_sce_mean_weight_ii"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim { dim_value: 2 }
dim { dim_value: 3 }
dim { dim_value: 2 }
}
}
}
}
input {
name: "y"
type {
tensor_type {
elem_type: 7
shape {
dim { dim_value: 2 }
dim { dim_value: 2 }
}
}
}
}
input {
name: "w"
type {
tensor_type {
elem_type: 1
shape {
dim { dim_value: 3 }
}
}
}
}
output {
name: "z"
type {
tensor_type {
elem_type: 1
shape {}
}
}
}
}
opset_import {
version: 13
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
ir_version: 7
producer_name: "OpenVINO ONNX Frontend"
graph {
node {
input: "x"
input: "y"
output: "z"
op_type: "SoftmaxCrossEntropyLoss"
attribute {
name: "reduction"
s: "none"
type: STRING
}
}
name: "test_sce_none"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim { dim_value: 2 }
dim { dim_value: 3 }
}
}
}
}
input {
name: "y"
type {
tensor_type {
elem_type: 7
shape {
dim { dim_value: 2 }
}
}
}
}
output {
name: "z"
type {
tensor_type {
elem_type: 1
shape {
dim { dim_value: 2 }
}
}
}
}
}
opset_import {
version: 13
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
ir_version: 7
producer_name: "OpenVINO ONNX Frontend"
graph {
node {
input: "x"
input: "y"
output: "z"
op_type: "SoftmaxCrossEntropyLoss"
attribute {
name: "reduction"
s: "sum"
type: STRING
}
}
name: "test_sce_sum"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim { dim_value: 2 }
dim { dim_value: 3 }
}
}
}
}
input {
name: "y"
type {
tensor_type {
elem_type: 7
shape {
dim { dim_value: 2 }
}
}
}
}
output {
name: "z"
type {
tensor_type {
elem_type: 1
shape {}
}
}
}
}
opset_import {
version: 13
}
46 changes: 46 additions & 0 deletions src/frontends/onnx/tests/onnx_import.in.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6945,3 +6945,49 @@ OPENVINO_TEST(${BACKEND_NAME}, onnx_float8e4m3fn_constant) {

test_case.run();
}

OPENVINO_TEST(${BACKEND_NAME}, onnx_model_softmax_crossentropy_loss_mean_weight_ii) {
auto model = convert_model("softmax_crossentropy_loss_mean_weight_ii.onnx");

auto test_case = ov::test::TestCase(model, s_device);
// Input x: shape (2,3,2)
test_case.add_input<float>({1.f, 2.f, 3.f, 4.f, 5.f, 6.f, 7.f, 8.f, 9.f, 10.f, 11.f, 12.f});
// Labels with ignore_index -1
test_case.add_input<int64_t>({0, -1, 2, 1});
// Weights
test_case.add_input<float>({0.5f, 1.0f, 2.0f});
// Expected output
test_case.add_expected_output<float>(Shape{}, {1.28642857f});
test_case.run();
}

OPENVINO_TEST(${BACKEND_NAME}, onnx_model_softmax_crossentropy_loss_sum) {
auto model = convert_model("softmax_crossentropy_loss_sum.onnx");

auto test_case = ov::test::TestCase(model, s_device);
test_case.add_input<float>({0.5f, 1.5f, 2.5f, 3.5f, 4.5f, 5.5f});
test_case.add_input<int64_t>({1, 2});
test_case.add_expected_output<float>(Shape{}, {1.8137f});
test_case.run();
}

OPENVINO_TEST(${BACKEND_NAME}, onnx_model_softmax_crossentropy_loss_none) {
auto model = convert_model("softmax_crossentropy_loss_none.onnx");

auto test_case = ov::test::TestCase(model, s_device);
test_case.add_input<float>({1.f, 2.f, 3.f, 4.f, 5.f, 6.f});
test_case.add_input<int64_t>({2, 1});
test_case.add_expected_output<float>(Shape{2}, {0.407f, 1.4067f});
test_case.run();
}

OPENVINO_TEST(${BACKEND_NAME}, onnx_model_softmax_crossentropy_loss_higher_dim) {
auto model = convert_model("softmax_crossentropy_loss_higher_dim.onnx");

auto test_case = ov::test::TestCase(model, s_device);
// Flattened input for shape (2,3,2,2)
test_case.add_input<float>(std::vector<float>(24, 1.0f)); // Example input
test_case.add_input<int64_t>({1, 0, 2, 1, 0, 2, 1, 1}); // Labels shape (2,2,2)
test_case.add_expected_output<float>(Shape{}, {1.2f}); // Example expected value
test_case.run();
}

0 comments on commit ac2b473

Please sign in to comment.