MariaFjodorowa dschulmeist commited on
Commit
eff02af
·
verified ·
1 Parent(s): c568f0c

Fix AttributeError in _init_weights for LayerNorm (#5)

Browse files

- Fix AttributeError in _init_weights for LayerNorm (330aefb80be2e1c3cf5c0beeb92001d93d928e98)


Co-authored-by: David S <[email protected]>

Files changed (1) hide show
  1. modeling_ltgbert.py +4 -2
modeling_ltgbert.py CHANGED
@@ -255,8 +255,10 @@ class LtgbertPreTrainedModel(PreTrainedModel):
255
  elif isinstance(module, nn.Embedding):
256
  nn.init.trunc_normal_(module.weight.data, mean=0.0, std=std, a=-2*std, b=2*std)
257
  elif isinstance(module, nn.LayerNorm):
258
- module.bias.data.zero_()
259
- module.weight.data.fill_(1.0)
 
 
260
 
261
 
262
  class LtgbertModel(LtgbertPreTrainedModel):
 
255
  elif isinstance(module, nn.Embedding):
256
  nn.init.trunc_normal_(module.weight.data, mean=0.0, std=std, a=-2*std, b=2*std)
257
  elif isinstance(module, nn.LayerNorm):
258
+ if module.bias is not None:
259
+ module.bias.data.zero_()
260
+ if module.weight is not None:
261
+ module.weight.data.fill_(1.0)
262
 
263
 
264
  class LtgbertModel(LtgbertPreTrainedModel):