Skip to content

Commit

Permalink
fix embedding layernorm logic and default in config -- corrected
Browse files Browse the repository at this point in the history
  • Loading branch information
nband committed Apr 25, 2022
1 parent ab90fed commit a399360
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 6 deletions.
2 changes: 1 addition & 1 deletion npt/configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,7 +483,7 @@ def build_parser():
# #### Normalization ######################################################

parser.add_argument(
'--model_embedding_layer_norm', default=True, type='bool',
'--model_embedding_layer_norm', default=False, type='bool',
help='(Disable) use of layer normalization after in-/out-embedding.')
parser.add_argument(
'--model_att_block_layer_norm', default=True, type='bool',
Expand Down
7 changes: 2 additions & 5 deletions npt/model/npt.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,11 +142,11 @@ def __init__(self, c, metadata, device=None):
if c.model_hidden_dropout_prob else None)

# LayerNorm applied after embedding, before dropout
if self.c.embedding_layer_norm and device is None:
if self.c.model_embedding_layer_norm and device is None:
print(
'Must provide a device in NPT initialization with embedding '
'LayerNorm.')
elif self.c.embedding_layer_norm:
elif self.c.model_embedding_layer_norm:
# we batch over rows and columns
# (i.e. just normalize over E)
layer_norm_dims = [self.dim_hidden]
Expand All @@ -155,9 +155,6 @@ def __init__(self, c, metadata, device=None):
else:
self.embedding_layer_norm = None

if True:
a = 1

# *** Input In/Out Embeddings ***
# Don't use for Image Patching - those are handled by the respective
# init_image_patching
Expand Down

0 comments on commit a399360

Please sign in to comment.