Skip to content

Commit 1bb3795

Browse files
committed
[cost] sequence_loss_by_example in cross_entropy_seq for TF 1.0
1 parent f795fd7 commit 1bb3795

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

tensorlayer/cost.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -216,12 +216,12 @@ def cross_entropy_seq(logits, target_seqs, batch_size=1, num_steps=None):
216216
>>> targets = tf.placeholder(tf.int32, [batch_size, num_steps])
217217
>>> cost = tf.cost.cross_entropy_seq(network.outputs, targets, batch_size, num_steps)
218218
"""
219-
try:
220-
sequence_loss_by_example = tf.nn.seq2seq.sequence_loss_by_example
219+
try: # TF 1.0
220+
sequence_loss_by_example_fn = tf.contrib.legacy_seq2seq.sequence_loss_by_example
221221
except:
222-
sequence_loss_by_example = tf.nn.seq2seq.sequence_loss_by_example
222+
sequence_loss_by_example_fn = tf.nn.seq2seq.sequence_loss_by_example
223223

224-
loss = sequence_loss_by_example(
224+
loss = sequence_loss_by_example_fn(
225225
[logits],
226226
[tf.reshape(target_seqs, [-1])],
227227
[tf.ones([batch_size * num_steps])])

0 commit comments

Comments
 (0)