Skip to content

Commit

Permalink
[Backport r0.8] Fix LAMB optimizer regex parsing (#1555)
Browse files Browse the repository at this point in the history
* Fix LAMB optimizer regex parsing

* Fix conflict.

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
Co-authored-by: gabrieldemarmiesse <[email protected]>
  • Loading branch information
3 people authored Apr 3, 2020
1 parent a9e86e8 commit 8c2d84f
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 5 deletions.
10 changes: 5 additions & 5 deletions tensorflow_addons/optimizers/lamb.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"""

import re
from typing import Optional, Union, Callable
from typing import Optional, Union, Callable, List
from typeguard import typechecked

import tensorflow as tf
Expand All @@ -42,8 +42,8 @@ def __init__(
beta_2: FloatTensorLike = 0.999,
epsilon: FloatTensorLike = 1e-6,
weight_decay_rate: FloatTensorLike = 0.0,
exclude_from_weight_decay: Optional[str] = None,
exclude_from_layer_adaptation: Optional[str] = None,
exclude_from_weight_decay: Optional[List[str]] = None,
exclude_from_layer_adaptation: Optional[List[str]] = None,
name: str = "LAMB",
**kwargs
):
Expand All @@ -59,10 +59,10 @@ def __init__(
The exponential decay rate for the 2nd moment estimates.
epsilon: A small constant for numerical stability.
weight_decay_rate: weight decay rate.
exclude_from_weight_decay: comma separated name patterns of
exclude_from_weight_decay: List of regex patterns of
variables excluded from weight decay. Variables whose name
contain a substring matching the pattern will be excluded.
exclude_from_layer_adaptation: comma separated name patterns of
exclude_from_layer_adaptation: List of regex patterns of
variables excluded from layer adaptation. Variables whose name
contain a substring matching the pattern will be excluded.
name: Optional name for the operations created when applying
Expand Down
14 changes: 14 additions & 0 deletions tensorflow_addons/optimizers/lamb_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,6 +401,20 @@ def test_get_config(self):
config = opt.get_config()
self.assertEqual(config["learning_rate"], 1e-4)

def test_exclude_weight_decay(self):
opt = lamb.LAMB(
0.01, weight_decay_rate=0.01, exclude_from_weight_decay=["var1"]
)
assert opt._do_use_weight_decay("var0")
assert not opt._do_use_weight_decay("var1")
assert not opt._do_use_weight_decay("var1_weight")

def test_exclude_layer_adaptation(self):
opt = lamb.LAMB(0.01, exclude_from_layer_adaptation=["var1"])
assert opt._do_layer_adaptation("var0")
assert not opt._do_layer_adaptation("var1")
assert not opt._do_layer_adaptation("var1_weight")


if __name__ == "__main__":
tf.test.main()

0 comments on commit 8c2d84f

Please sign in to comment.