Skip to content

Commit

Permalink
Merge 8cff4ec into b555a5d
Browse files Browse the repository at this point in the history
  • Loading branch information
dakshvar22 authored Dec 22, 2020
2 parents b555a5d + 8cff4ec commit 0255ad6
Showing 1 changed file with 27 additions and 4 deletions.
31 changes: 27 additions & 4 deletions rasa/utils/tensorflow/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -853,17 +853,32 @@ def _loss_softmax(
) -> tf.Tensor:
"""Define softmax loss."""

logits = tf.concat(
softmax_logits = tf.concat([sim_pos, sim_neg_il, sim_neg_li], axis=-1)

sigmoid_logits = tf.concat(
[sim_pos, sim_neg_il, sim_neg_ll, sim_neg_ii, sim_neg_li], axis=-1
)

# create label_ids for softmax
label_ids = tf.zeros_like(logits[..., 0], tf.int32)
softmax_label_ids = tf.zeros_like(softmax_logits[..., 0], tf.int32)

sigmoid_labels = tf.concat(
[
tf.expand_dims(tf.ones_like(sigmoid_logits[..., 0], tf.float32), -1),
tf.zeros_like(sigmoid_logits[..., 1:], tf.float32),
],
axis=-1,
)

loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=label_ids, logits=logits
softmax_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=softmax_label_ids, logits=softmax_logits
)
sigmoid_loss = tf.nn.sigmoid_cross_entropy_with_logits(
labels=sigmoid_labels, logits=sigmoid_logits
)

loss = softmax_loss + tf.reduce_mean(sigmoid_loss, axis=-1)

if self.scale_loss:
# in case of cross entropy log_likelihood = -loss
loss *= _scale_loss(-loss)
Expand All @@ -878,6 +893,14 @@ def _loss_softmax(
else:
loss = tf.reduce_mean(loss, axis=-1)

tf.print(
tf.reduce_mean(sim_pos),
tf.reduce_mean(sim_neg_ii),
tf.reduce_mean(sim_neg_il),
tf.reduce_mean(sim_neg_ll),
tf.reduce_mean(sim_neg_li),
)

# average the loss over the batch
return tf.reduce_mean(loss)

Expand Down

0 comments on commit 0255ad6

Please sign in to comment.