# MIT License
#
# Copyright (c) 2021 Soohwan Kim and Sangchun Ha and Soyoung Cho
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import torch
from openspeech.optim.scheduler.reduce_lr_on_plateau_scheduler import ReduceLROnPlateauScheduler
from openspeech.optim.scheduler.warmup_reduce_lr_on_plateau_scheduler import WarmupReduceLROnPlateauScheduler
[docs]class Optimizer(object):
"""
This is wrapper classs of torch.optim.Optimizer.
This class provides functionalities for learning rate scheduling and gradient norm clipping.
Args:
optim (torch.optim.Optimizer): optimizer object, the parameters to be optimized
should be given when instantiating the object, e.g. torch.optim.Adam, torch.optim.SGD
scheduler (openspeech.optim.scheduler, optional): learning rate scheduler
scheduler_period (int, optional): timestep with learning rate scheduler
max_grad_norm (int, optional): value used for gradient norm clipping
"""
def __init__(self, optim, scheduler=None, scheduler_period=None, max_grad_norm=0):
self.optimizer = optim
self.scheduler = scheduler
self.scheduler_period = scheduler_period
self.max_grad_norm = max_grad_norm
self.count = 0
def step(self, model):
if self.max_grad_norm > 0:
torch.nn.utils.clip_grad_norm_(model.parameters(), self.max_grad_norm)
self.optimizer.step()
if self.scheduler is not None:
self.update()
self.count += 1
if self.scheduler_period == self.count:
self.scheduler = None
self.scheduler_period = 0
self.count = 0
def set_scheduler(self, scheduler, scheduler_period):
self.scheduler = scheduler
self.scheduler_period = scheduler_period
self.count = 0
def update(self, val_loss=None):
if isinstance(self.scheduler, ReduceLROnPlateauScheduler) \
or isinstance(self.scheduler, WarmupReduceLROnPlateauScheduler):
self.scheduler.step(val_loss)
else:
self.scheduler.step()
def zero_grad(self):
self.optimizer.zero_grad()
def get_lr(self):
for g in self.optimizer.param_groups:
return g['lr']
def set_lr(self, lr):
for g in self.optimizer.param_groups:
g['lr'] = lr