mmocr-demo / configs /_base_ /schedules /schedule_adam_step_5e.py
Xianbao QIAN
add Dockerfile
378b1f2
# optimizer
optimizer = dict(type='Adam', lr=1e-3)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(policy='step', step=[3, 4])
# running settings
runner = dict(type='EpochBasedRunner', max_epochs=5)
checkpoint_config = dict(interval=1)