Skip to content

Commit

Permalink
Support BERTSQuAD and BERTNER (intel-analytics#1401)
Browse files Browse the repository at this point in the history
* add bert squad code

* modify grad clip

* revert

* fix style

* init ner

* update examples

* minor update

* remove examples

* revert adamweightdecay

* update

* add ut
  • Loading branch information
hkvision authored Jun 11, 2019
1 parent 5a97e76 commit 5a49ee0
Showing 1 changed file with 6 additions and 3 deletions.
9 changes: 6 additions & 3 deletions pyspark/bigdl/dllib/inference/net/tf_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,8 +224,9 @@ def _get_arguments_from_loss(loss, optim_method, session, val_outputs, val_label
variables = []
grads = []
for (grad, var) in grads_vars:
variables.append(var)
grads.append(grad)
if grad is not None:
variables.append(var)
grads.append(grad)

all_required_inputs = _find_placeholders([loss])
dataset = tf.get_collection(all_required_inputs[0].name)[0]
Expand Down Expand Up @@ -327,7 +328,9 @@ def to_bigdl_optim_method(koptim_method):
if isinstance(koptim_method, TFOptimizer):
koptim_method = koptim_method.optimizer

if isinstance(koptim_method, koptimizers.Optimizer):
if isinstance(koptim_method, boptimizer.OptimMethod):
return koptim_method
elif isinstance(koptim_method, koptimizers.Optimizer):
lr = float(K.eval(koptim_method.lr))
decay = float(K.eval(koptim_method.decay))
if isinstance(koptim_method, koptimizers.Adagrad):
Expand Down

0 comments on commit 5a49ee0

Please sign in to comment.