Skip to content

Commit

Permalink
Minor refactoring.
Browse files Browse the repository at this point in the history
  • Loading branch information
cschaefer26 committed Jul 8, 2020
1 parent 844e9b9 commit 26ce726
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 11 deletions.
9 changes: 0 additions & 9 deletions trainer/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,15 +72,6 @@ def forward(self, x, target, lens):
return loss / mask.sum()


class LogL1(torch.nn.Module):

def forward(self, x, target):
target.requires_grad = False
target = torch.log(target + 1.0)
x = torch.log(x + 1.0)
return F.l1_loss(x, target)


# Adapted from https://gist.github.com/jihunchoi/f1434a77df9db1bb337417854b398df1
def pad_mask(lens, max_len):
batch_size = lens.size(0)
Expand Down
3 changes: 1 addition & 2 deletions trainer/forward_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from torch.utils.tensorboard import SummaryWriter

from models.forward_tacotron import ForwardTacotron
from trainer.common import Averager, TTSSession, MaskedL1, LogL1
from trainer.common import Averager, TTSSession, MaskedL1
from utils import hparams as hp
from utils.checkpoints import save_checkpoint
from utils.dataset import get_tts_datasets
Expand All @@ -25,7 +25,6 @@ def __init__(self, paths: Paths) -> None:
self.paths = paths
self.writer = SummaryWriter(log_dir=paths.forward_log, comment='v1')
self.l1_loss = MaskedL1()
self.log_l1_loss = LogL1()

def train(self, model: ForwardTacotron, optimizer: Optimizer) -> None:
for i, session_params in enumerate(hp.forward_schedule, 1):
Expand Down

0 comments on commit 26ce726

Please sign in to comment.