Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion onnxruntime/core/providers/cuda/tensor/grid_sample.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ template <typename T, bool IsNHWC>
GridSample<T, IsNHWC>::GridSample(const OpKernelInfo& info) : CudaKernel(info) {
opset_start_version_ = info.node().SinceVersion();

std::string mode_str = info.GetAttrOrDefault<std::string>("mode", "bilinear");
std::string padding_mode_str = info.GetAttrOrDefault<std::string>("padding_mode", "zeros");
align_corners_ = static_cast<bool>(info.GetAttrOrDefault<int64_t>("align_corners", 0));

Expand Down
5 changes: 2 additions & 3 deletions onnxruntime/python/tools/transformers/fusion_skiplayernorm.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,8 @@ def fuse(self, node, input_name_to_nodes, output_name_to_node):
)
return
else:
# Shape inference failed. Use default skip_index=1 (no broadcasting) since both
# Add inputs have already been verified as non-initializer dynamic tensors above.
logger.debug("symbolic shape inference failed, using default skip_index for SkipLayerNormalization")
logger.debug("skip SkipLayerNormalization fusion since symbolic shape inference failed")
return

gather_path = self.model.match_parent_path(add, ["Gather"], [None])
if gather_path is not None and self.model.find_graph_input(gather_path[0].input[1]) is None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ TYPED_TEST(GridSampleCustomTest, test_grid_sample_20_4D_linear_zeros_mixed_bound
test.AddAttribute("padding_mode", padding_mode);
test.AddAttribute("align_corners", align_corners);
test.AddOutput<TypeParam>("Y", Y_shape, Y_data);
RunTests(test, GetExecutionProviders(20));
RunTests(test, GetExecutionProviders());
}

TYPED_TEST(GridSampleCustomTest, test_grid_sample_20_4D_linear_zeros_mixed_bounds_left_top) {
Expand Down Expand Up @@ -69,6 +69,5 @@ TYPED_TEST(GridSampleCustomTest, test_grid_sample_20_4D_linear_zeros_mixed_bound
test.AddAttribute("padding_mode", padding_mode);
test.AddAttribute("align_corners", align_corners);
test.AddOutput<TypeParam>("Y", Y_shape, Y_data);
RunTests(test, GetExecutionProviders(20));
RunTests(test, GetExecutionProviders());
}

12 changes: 6 additions & 6 deletions onnxruntime/test/python/transformers/test_attention_fusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,17 +395,17 @@ def test_qwen3_normalization_fusion(self):
ssln_count = sum(1 for n in nodes if n.op_type == "SkipSimplifiedLayerNormalization")

# 4 RMSNorm patterns: pre-attn, Q-norm, K-norm, post-attn.
# Post-attn RMSNorm has an Add parent (residual) → fused as SkipSimplifiedLayerNormalization.
# Remaining 3 stay as SimplifiedLayerNormalization.
# Fallback for SkipLayerNormalization is disabled, so post-attn RMSNorm does not fuse.
# All 4 stay as SimplifiedLayerNormalization.
self.assertEqual(
sln_count,
3,
f"Expected 3 SimplifiedLayerNormalization (pre-attn + Q-norm + K-norm), got {sln_count}",
4,
f"Expected 4 SimplifiedLayerNormalization (pre-attn + Q-norm + K-norm + post-attn), got {sln_count}",
)
self.assertEqual(
ssln_count,
1,
f"Expected 1 SkipSimplifiedLayerNormalization (residual + post-attn RMSNorm), got {ssln_count}",
0,
f"Expected 0 SkipSimplifiedLayerNormalization (residual + post-attn RMSNorm failed to fuse), got {ssln_count}",
)


Expand Down
Loading