Skip to content

Commit

Permalink
Merge pull request #16 from donatasrep/master
Browse files Browse the repository at this point in the history
Excluding AdamWeightDecayOptimizer internal variables from restoring
  • Loading branch information
thomwolf authored Nov 13, 2018
2 parents 278fd28 + 20d07b3 commit 5cd8d7a
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions convert_tf_checkpoint_to_pytorch.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,17 @@ def convert():
arrays.append(array)

for name, array in zip(names, arrays):
name = name[5:] # skip "bert/"
if not name.startswith("bert"):
print("Skipping {}".format(name))
continue
else:
name = name.replace("bert/", "") # skip "bert/"
print("Loading {}".format(name))
name = name.split('/')
if name[0] in ['redictions', 'eq_relationship']:
print("Skipping")
# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
# which are not required for using pretrained model
if name[0] in ['redictions', 'eq_relationship'] or name[-1] == "adam_v" or name[-1] == "adam_m":
print("Skipping {}".format("/".join(name)))
continue
pointer = model
for m_name in name:
Expand Down

0 comments on commit 5cd8d7a

Please sign in to comment.