diff --git a/_modules/onmt/utils/loss.html b/_modules/onmt/utils/loss.html index cb411fa8..003c46ea 100644 --- a/_modules/onmt/utils/loss.html +++ b/_modules/onmt/utils/loss.html @@ -357,19 +357,19 @@

Source code for onmt.utils.loss

             batch_stats.update(stats)
         return None, batch_stats
 
-    def _stats(self, loss, scores, target):
+    def _stats(self, loss, scores, labels):
         """
         Args:
             loss (:obj:`FloatTensor`): the loss computed by the loss criterion.
             scores (:obj:`FloatTensor`): a score for each possible output
-            target (:obj:`FloatTensor`): true targets
+            labels (:obj:`FloatTensor`): true targets
 
         Returns:
             :obj:`onmt.utils.Statistics` : statistics for this batch.
         """
         pred = scores.max(1)[1]
-        non_padding = target.ne(self.padding_idx)
-        num_correct = pred.eq(target).masked_select(non_padding).sum().item()
+        non_padding = labels.ne(self.padding_idx)
+        num_correct = pred.eq(labels).masked_select(non_padding).sum().item()
         num_non_padding = non_padding.sum().item()
         return onmt.utils.Statistics(loss.item(), num_non_padding, num_correct)
 
@@ -399,14 +399,14 @@ 

Source code for onmt.utils.loss

 
         self.confidence = 1.0 - label_smoothing
 
-    def forward(self, output, target):
+    def forward(self, output, labels):
         """
         output (FloatTensor): batch_size x n_classes
-        target (LongTensor): batch_size
+        labels (LongTensor): batch_size
         """
-        model_prob = self.one_hot.repeat(target.size(0), 1)
-        model_prob.scatter_(1, target.unsqueeze(1), self.confidence)
-        model_prob.masked_fill_((target == self.ignore_index).unsqueeze(1), 0)
+        model_prob = self.one_hot.repeat(labels.size(0), 1)
+        model_prob.scatter_(1, labels.unsqueeze(1), self.confidence)
+        model_prob.masked_fill_((labels == self.ignore_index).unsqueeze(1), 0)
 
         return F.kl_div(output, model_prob, reduction='sum')