Skip to content

Commit

Permalink
model compatible with version 1. locally pass ut
Browse files Browse the repository at this point in the history
  • Loading branch information
Han Wang committed Apr 7, 2024
1 parent c490fed commit 4b79de9
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 16 deletions.
37 changes: 28 additions & 9 deletions deepmd/pt/model/atomic_model/base_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,12 +93,8 @@ def init_out_stat(self):
[self.atomic_output_def()[kk].size for kk in self.bias_keys]
)
self.n_out = len(self.bias_keys)
out_bias_data = torch.zeros(
[self.n_out, ntypes, self.max_out_size], dtype=dtype, device=device
)
out_std_data = torch.ones(
[self.n_out, ntypes, self.max_out_size], dtype=dtype, device=device
)
out_bias_data = self._default_bias()
out_std_data = self._default_std()
self.register_buffer("out_bias", out_bias_data)
self.register_buffer("out_std", out_std_data)

Expand Down Expand Up @@ -272,10 +268,21 @@ def serialize(self) -> dict:
@classmethod
def deserialize(cls, data: dict) -> "BaseAtomicModel":
data = copy.deepcopy(data)
variables = data.pop("@variables")
variables = data.pop("@variables", None)
variables = (
{"out_bias": None, "out_std": None} if variables is None else variables
)
obj = cls(**data)
for kk in variables.keys():
obj[kk] = to_torch_tensor(variables[kk])
obj["out_bias"] = (
to_torch_tensor(variables["out_bias"])
if variables["out_bias"] is not None
else obj._default_bias()
)
obj["out_std"] = (
to_torch_tensor(variables["out_std"])
if variables["out_std"] is not None
else obj._default_std()
)
return obj

def compute_or_load_stat(
Expand Down Expand Up @@ -432,6 +439,18 @@ def model_forward(coord, atype, box, fparam=None, aparam=None):

return model_forward

def _default_bias(self):
ntypes = self.get_ntypes()
return torch.zeros(
[self.n_out, ntypes, self.max_out_size], dtype=dtype, device=device
)

def _default_std(self):
ntypes = self.get_ntypes()
return torch.ones(
[self.n_out, ntypes, self.max_out_size], dtype=dtype, device=device
)

def _varsize(
self,
shape: List[int],
Expand Down
2 changes: 1 addition & 1 deletion deepmd/pt/model/atomic_model/dp_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def serialize(self) -> dict:
@classmethod
def deserialize(cls, data) -> "DPAtomicModel":
data = copy.deepcopy(data)
check_version_compatibility(data.pop("@version", 2), 2, 2)
check_version_compatibility(data.pop("@version", 2), 2, 1)
data.pop("@class", None)
data.pop("type", None)
descriptor_obj = BaseDescriptor.deserialize(data.pop("descriptor"))
Expand Down
4 changes: 2 additions & 2 deletions deepmd/pt/model/atomic_model/linear_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def serialize(self) -> dict:
@classmethod
def deserialize(cls, data: dict) -> "LinearEnergyAtomicModel":
data = copy.deepcopy(data)
check_version_compatibility(data.get("@version", 2), 2, 2)
check_version_compatibility(data.get("@version", 2), 2, 1)
data.pop("@class", None)
data.pop("type", None)
models = [
Expand Down Expand Up @@ -433,7 +433,7 @@ def serialize(self) -> dict:
@classmethod
def deserialize(cls, data) -> "DPZBLLinearEnergyAtomicModel":
data = copy.deepcopy(data)
check_version_compatibility(data.pop("@version", 2), 2, 2)
check_version_compatibility(data.pop("@version", 2), 2, 1)
models = [
BaseAtomicModel.get_class_by_type(model["type"]).deserialize(model)
for model in data["models"]
Expand Down
2 changes: 1 addition & 1 deletion deepmd/pt/model/atomic_model/pairtab_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ def serialize(self) -> dict:
@classmethod
def deserialize(cls, data) -> "PairTabAtomicModel":
data = copy.deepcopy(data)
check_version_compatibility(data.pop("@version", 1), 2, 2)
check_version_compatibility(data.pop("@version", 1), 2, 1)
tab = PairTab.deserialize(data.pop("tab"))
data.pop("@class", None)
data.pop("type", None)
Expand Down
22 changes: 19 additions & 3 deletions deepmd/tf/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
Union,
)

import numpy as np

from deepmd.common import (
j_get_type,
)
Expand Down Expand Up @@ -785,11 +787,16 @@ def deserialize(cls, data: dict, suffix: str = "") -> "Descriptor":
The deserialized descriptor
"""
data = copy.deepcopy(data)
check_version_compatibility(data.pop("@version", 1), 1, 1)
check_version_compatibility(data.pop("@version", 2), 2, 1)
descriptor = Descriptor.deserialize(data.pop("descriptor"), suffix=suffix)
fitting = Fitting.deserialize(data.pop("fitting"), suffix=suffix)
# BEGINE not supported keys
data.pop("atom_exclude_types")
data.pop("pair_exclude_types")
data.pop("rcond", None)
data.pop("preset_out_bias", None)
data.pop("@variables", None)
# END not supported keys
return cls(
descriptor=descriptor,
fitting_net=fitting,
Expand All @@ -813,14 +820,23 @@ def serialize(self, suffix: str = "") -> dict:
raise NotImplementedError("type embedding is not supported")
if self.spin is not None:
raise NotImplementedError("spin is not supported")

ntypes = len(self.get_type_map())
dict_fit = self.fitting.serialize(suffix=suffix)
return {
"@class": "Model",
"type": "standard",
"@version": 1,
"@version": 2,
"type_map": self.type_map,
"descriptor": self.descrpt.serialize(suffix=suffix),
"fitting": self.fitting.serialize(suffix=suffix),
"fitting": dict_fit,
# not supported yet
"atom_exclude_types": [],
"pair_exclude_types": [],
"rcond": None,
"preset_out_bias": None,
"@variables": {
"out_bias": np.zeros([1, ntypes, dict_fit["dim_out"]]),
"out_std": np.ones([1, ntypes, dict_fit["dim_out"]]),
},
}

0 comments on commit 4b79de9

Please sign in to comment.