Skip to content

Commit 9263a24

Browse files
committed
Add bias check for fused_layer_norm
1 parent e01b0f6 commit 9263a24

File tree

2 files changed

+34
-0
lines changed

2 files changed

+34
-0
lines changed

paddle/phi/infermeta/multiary.cc

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2647,6 +2647,17 @@ void FusedLayerNormInferMeta(const MetaTensor& x,
26472647
x_dims_vec[i],
26482648
residual_dims_vec[i]));
26492649
}
2650+
if (config.is_runtime && bias && normalized_dims != 0) {
2651+
PADDLE_ENFORCE_EQ(bias.numel(),
2652+
normalized_dims,
2653+
common::errors::InvalidArgument(
2654+
"The numel of Input(bias) must be equal to "
2655+
"the normalized_dims of Input(X), but "
2656+
"received numel of Input(bias) is [%d], "
2657+
"received normalized_dims of Input(X) is [%d]",
2658+
bias.numel(),
2659+
normalized_dims));
2660+
}
26502661
}
26512662

26522663
int64_t rows = 1;

test/legacy_test/test_fused_layernorm_op.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1229,5 +1229,28 @@ def setUp(self):
12291229
self.quant_min_bound = -127
12301230

12311231

1232+
@unittest.skipIf(
1233+
not core.is_compiled_with_cuda() and not paddle.is_compiled_with_rocm(),
1234+
"core is not compiled with CUDA or ROCM",
1235+
)
1236+
class TestFusedLayerNorm_ZeroSize_Error(unittest.TestCase):
1237+
def test_bias_error(self):
1238+
with paddle.base.dygraph.guard():
1239+
x = paddle.randn([16, 256], dtype="float32")
1240+
bias = paddle.randn([0], dtype="float32")
1241+
residual = paddle.rand([16, 256], "float32")
1242+
self.assertRaises(
1243+
ValueError,
1244+
paddle.incubate.nn.functional.fused_layer_norm,
1245+
x=x,
1246+
norm_weight=paddle.randn([256], dtype="float32"),
1247+
norm_bias=paddle.randn([256], dtype="float32"),
1248+
epsilon=1e-06,
1249+
begin_norm_axis=1,
1250+
bias=bias,
1251+
residual=residual,
1252+
)
1253+
1254+
12321255
if __name__ == "__main__":
12331256
unittest.main()

0 commit comments

Comments
 (0)