Skip to content

Commit

Permalink
remove the dependency on inputs from inputs_zero (#1417)
Browse files Browse the repository at this point in the history
* remove the dependency on `inputs` from `inputs_zero`

When profiling on multiple CPU threads, I notice that `inputs_zero` always runs after descriptor is computed. However, Some threads have nothing to do during ProdEnvMatA.

(cherry picked from commit e704b4a)

* fix UT

* revert changes to the interface of the EnerFitting

(cherry picked from commit 4f95d01)

* Revert "fix UT"

This reverts commit c43ac3a.

* fix typo

(cherry picked from commit a4c32c7)

* initialize input_dict

* init input_dict
  • Loading branch information
njzjz authored Jan 15, 2022
1 parent d7795ca commit b88c1da
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 8 deletions.
16 changes: 10 additions & 6 deletions deepmd/fit/ener.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ def _build_lower(
def build (self,
inputs : tf.Tensor,
natoms : tf.Tensor,
input_dict : dict = {},
input_dict : dict = None,
reuse : bool = None,
suffix : str = '',
) -> tf.Tensor:
Expand Down Expand Up @@ -362,6 +362,8 @@ def build (self,
ener
The system energy
"""
if input_dict is None:
input_dict = {}
bias_atom_e = self.bias_atom_e
if self.numb_fparam > 0 and ( self.fparam_avg is None or self.fparam_inv_std is None ):
raise RuntimeError('No data stat result. one should do data statisitic, before build')
Expand Down Expand Up @@ -401,7 +403,12 @@ def build (self,
inputs = tf.reshape(inputs, [-1, self.dim_descrpt * natoms[0]])
if len(self.atom_ener):
# only for atom_ener
inputs_zero = tf.zeros_like(inputs, dtype=self.fitting_precision)
nframes = input_dict.get('nframes')
if nframes is not None:
# like inputs, but we don't want to add a dependency on inputs
inputs_zero = tf.zeros((nframes, self.dim_descrpt * natoms[0]), dtype=self.fitting_precision)
else:
inputs_zero = tf.zeros_like(inputs, dtype=self.fitting_precision)


if bias_atom_e is not None :
Expand All @@ -419,10 +426,7 @@ def build (self,
aparam = (aparam - t_aparam_avg) * t_aparam_istd
aparam = tf.reshape(aparam, [-1, self.numb_aparam * natoms[0]])

if input_dict is not None:
type_embedding = input_dict.get('type_embedding', None)
else:
type_embedding = None
type_embedding = input_dict.get('type_embedding', None)
if type_embedding is not None:
atype_embed = embed_atom_type(self.ntypes, natoms, type_embedding)
atype_embed = tf.tile(atype_embed,[tf.shape(inputs)[0],1])
Expand Down
7 changes: 5 additions & 2 deletions deepmd/model/ener.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,9 @@ def build (self,
frz_model = None,
suffix = '',
reuse = None):


if input_dict is None:
input_dict = {}
with tf.variable_scope('model_attr' + suffix, reuse = reuse) :
t_tmap = tf.constant(' '.join(self.type_map),
name = 'tmap',
Expand All @@ -144,6 +146,7 @@ def build (self,

coord = tf.reshape (coord_, [-1, natoms[1] * 3])
atype = tf.reshape (atype_, [-1, natoms[1]])
input_dict['nframes'] = tf.shape(coord)[0]

# type embedding if any
if self.typeebd is not None:
Expand Down Expand Up @@ -270,4 +273,4 @@ def build (self,

def _import_graph_def_from_frz_model(self, frz_model, feed_dict, return_elements):
graph, graph_def = load_graph_def(frz_model)
return tf.import_graph_def(graph_def, input_map = feed_dict, return_elements = return_elements, name = "")
return tf.import_graph_def(graph_def, input_map = feed_dict, return_elements = return_elements, name = "")

0 comments on commit b88c1da

Please sign in to comment.