From fb8cdc94fe3a03dce324d30a8ae6d0ef1043cdf4 Mon Sep 17 00:00:00 2001 From: Zheyu Ye Date: Tue, 25 Feb 2020 07:34:54 +0000 Subject: [PATCH] fix in_channels of LN --- scripts/language_model/model/qa.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/language_model/model/qa.py b/scripts/language_model/model/qa.py index bcaa5be436..73619efff6 100644 --- a/scripts/language_model/model/qa.py +++ b/scripts/language_model/model/qa.py @@ -43,7 +43,7 @@ def __init__(self, units=768, is_eval=False, prefix=None, params=None): with self.name_scope(): self.dense_0 = nn.Dense(units, activation='tanh', flatten=False) self.dense_1 = nn.Dense(1, flatten=False) - self.layernorm = nn.LayerNorm(epsilon=1e-12, in_channels=768) + self.layernorm = nn.LayerNorm(epsilon=1e-12, in_channels=units) def __call__(self, hidden_states,