Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add seeds to dpmodel and fix seeds in tf & pt #3880

Merged
merged 14 commits into from
Jun 19, 2024
21 changes: 19 additions & 2 deletions deepmd/dpmodel/descriptor/dpa1.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,7 @@ def __init__(
env_protection=env_protection,
trainable_ln=trainable_ln,
ln_eps=ln_eps,
seed=seed,
)
self.use_econf_tebd = use_econf_tebd
self.type_map = type_map
Expand All @@ -305,6 +306,7 @@ def __init__(
precision=precision,
use_econf_tebd=use_econf_tebd,
type_map=type_map,
seed=seed + len(neuron) * 2 + attn_layer * 3 if seed is not None else None,
)
self.tebd_dim = tebd_dim
self.concat_output_tebd = concat_output_tebd
Expand Down Expand Up @@ -625,6 +627,7 @@ def __init__(
trainable_ln: bool = True,
ln_eps: Optional[float] = 1e-5,
smooth: bool = True,
seed: Optional[int] = None,
) -> None:
self.rcut = rcut
self.rcut_smth = rcut_smth
Expand Down Expand Up @@ -674,6 +677,7 @@ def __init__(
self.activation_function,
self.resnet_dt,
self.precision,
seed=seed,
)
if self.tebd_input_mode in ["strip"]:
self.embeddings_strip = NetworkCollection(
Expand All @@ -687,6 +691,7 @@ def __init__(
self.activation_function,
self.resnet_dt,
self.precision,
seed=seed + len(self.neuron) if seed is not None else None,
)
else:
self.embeddings_strip = None
Expand All @@ -703,6 +708,7 @@ def __init__(
ln_eps=self.ln_eps,
smooth=self.smooth,
precision=self.precision,
seed=seed + len(self.neuron) * 2 if seed is not None else None,
)

wanted_shape = (self.ntypes, self.nnei, 4)
Expand Down Expand Up @@ -950,6 +956,7 @@ def __init__(
ln_eps: float = 1e-5,
smooth: bool = True,
precision: str = DEFAULT_PRECISION,
seed: Optional[int] = None,
):
"""Construct a neighbor-wise attention net."""
super().__init__()
Expand Down Expand Up @@ -982,8 +989,9 @@ def __init__(
ln_eps=ln_eps,
smooth=smooth,
precision=precision,
seed=seed + ii * 3 if seed is not None else None,
)
for _ in range(layer_num)
for ii in range(layer_num)
]

def call(
Expand Down Expand Up @@ -1076,6 +1084,7 @@ def __init__(
ln_eps: float = 1e-5,
smooth: bool = True,
precision: str = DEFAULT_PRECISION,
seed: Optional[int] = None,
):
"""Construct a neighbor-wise attention layer."""
super().__init__()
Expand All @@ -1101,9 +1110,14 @@ def __init__(
temperature=temperature,
smooth=smooth,
precision=precision,
seed=seed,
)
self.attn_layer_norm = LayerNorm(
self.embed_dim, eps=ln_eps, trainable=self.trainable_ln, precision=precision
self.embed_dim,
eps=ln_eps,
trainable=self.trainable_ln,
precision=precision,
seed=seed + 2 if seed is not None else None,
)

def call(
Expand Down Expand Up @@ -1176,6 +1190,7 @@ def __init__(
bias: bool = True,
smooth: bool = True,
precision: str = DEFAULT_PRECISION,
seed: Optional[int] = None,
):
"""Construct a multi-head neighbor-wise attention net."""
super().__init__()
Expand Down Expand Up @@ -1204,13 +1219,15 @@ def __init__(
bias=bias,
use_timestep=False,
precision=precision,
seed=seed,
)
self.out_proj = NativeLayer(
hidden_dim,
embed_dim,
bias=bias,
use_timestep=False,
precision=precision,
seed=seed + 1 if seed is not None else None,
)

def call(self, query, nei_mask, input_r=None, sw=None, attnw_shift=20.0):
Expand Down
8 changes: 8 additions & 0 deletions deepmd/dpmodel/descriptor/dpa2.py
Original file line number Diff line number Diff line change
Expand Up @@ -408,6 +408,7 @@ def init_subclass_params(sub_data, sub_class):
resnet_dt=self.repinit_args.resnet_dt,
smooth=smooth,
type_one_side=self.repinit_args.type_one_side,
seed=seed,
njzjz marked this conversation as resolved.
Show resolved Hide resolved
)
self.repformers = DescrptBlockRepformers(
self.repformer_args.rcut,
Expand Down Expand Up @@ -442,6 +443,7 @@ def init_subclass_params(sub_data, sub_class):
precision=precision,
trainable_ln=self.repformer_args.trainable_ln,
ln_eps=self.repformer_args.ln_eps,
seed=seed + len(self.repinit_args.neuron) * 2 if seed is not None else None,
)
self.use_econf_tebd = use_econf_tebd
self.type_map = type_map
Expand All @@ -453,6 +455,12 @@ def init_subclass_params(sub_data, sub_class):
precision=precision,
use_econf_tebd=use_econf_tebd,
type_map=type_map,
seed=seed
+ len(self.repinit_args.neuron) * 2
+ 1
+ self.repformer_args.nlayers * 14
if seed is not None
else None,
)
self.concat_output_tebd = concat_output_tebd
self.precision = precision
Expand Down
Loading