Skip to content

Commit

Permalink
fix bug in tf tensor loss
Browse files Browse the repository at this point in the history
  • Loading branch information
ChiahsinChu committed Jan 6, 2025
1 parent 8d4c27b commit e214ee9
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 2 deletions.
4 changes: 2 additions & 2 deletions deepmd/tf/loss/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def label_requirement(self) -> list[DataRequirementItem]:
# data required
data_requirements.append(
DataRequirementItem(
"atom_" + self.label_name,
"atomic_" + self.label_name,
self.tensor_size,
atomic=True,
must=False,
Expand All @@ -176,7 +176,7 @@ def label_requirement(self) -> list[DataRequirementItem]:
if self.enable_atomic_weight:
data_requirements.append(
DataRequirementItem(
"atom_weight",
"atomic_weight",
1,
atomic=True,
must=False,
Expand Down
7 changes: 7 additions & 0 deletions deepmd/tf/train/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,13 @@ def _build_network(self, data, suffix="") -> None:
tf.int32, [None], name="t_mesh"
)
self.place_holders["is_training"] = tf.placeholder(tf.bool)
# update "atomic_" in self.place_holders.keys() with "atom_"
for kk in list(self.place_holders.keys()):
if "atomic_" in kk:
self.place_holders[kk.replace("atomic_", "atom_")] = (
self.place_holders.pop(kk)
)

self.model_pred = self.model.build(
self.place_holders["coord"],
self.place_holders["type"],
Expand Down

0 comments on commit e214ee9

Please sign in to comment.