Skip to content

Commit

Permalink
AMP training level no more than O1
Browse files Browse the repository at this point in the history
Signed-off-by: begeekmyfriend <[email protected]>
  • Loading branch information
begeekmyfriend committed Feb 25, 2020
1 parent bbdd596 commit 57dddb1
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 3 deletions.
1 change: 0 additions & 1 deletion hparams.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
# set this to True if you are only interested in WaveRNN
ignore_tts = True
amp = True
amp_level = 'O2'


# DSP --------------------------------------------------------------------------------------------------------------#
Expand Down
2 changes: 1 addition & 1 deletion models/fatchord_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def generate(self, mels, save_path, batched, target, overlap, mu_law):
output = output[0]

end = time.time()
print(f'{end - start}')
print(f'Elapsed {end - start} seconds')
return save_wav(output[:wave_len], save_path)


Expand Down
2 changes: 1 addition & 1 deletion train_wavernn.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def voc_train_loop(model, loss_func, optimizer, train_set, test_set, init_lr, fi
epochs = int((total_steps - model.get_step()) // total_iters + 1)

if hp.amp:
model, optimizer = amp.initialize(model, optimizer, opt_level=hp.amp_level)
model, optimizer = amp.initialize(model, optimizer, opt_level='O1')

torch.backends.cudnn.benchmark = True

Expand Down

0 comments on commit 57dddb1

Please sign in to comment.