There was an error while loading. Please reload this page.
1 parent a93a1f3 commit f899e9dCopy full SHA for f899e9d
torchocr/optimizer/__init__.py
@@ -8,7 +8,8 @@
8
def build_optimizer(optim_config, lr_scheduler_config, epochs, step_each_epoch, model):
9
from . import lr
10
config = copy.deepcopy(optim_config)
11
- optim = getattr(torch.optim, config.pop('name'))(params=model.parameters(), **config)
+ train_params = filter(lambda p: p.requires_grad, model.parameters())
12
+ optim = getattr(torch.optim, config.pop('name'))(params=train_params, **config)
13
14
lr_config = copy.deepcopy(lr_scheduler_config)
15
lr_config.update({'epochs': epochs, 'step_each_epoch': step_each_epoch})
0 commit comments