# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0005)  # 按batch size比例调整学习率(4->8，lr也相应加倍)
optim_wrapper = dict(type='AmpOptimWrapper', optimizer=optimizer, clip_grad=None, loss_scale='dynamic')  # 启用混合精度训练

# learning policy - 基于epoch的学习率调度
param_scheduler = [
    dict(
        type='PolyLR',
        eta_min=1e-4,
        power=0.9,
        begin=0,
        end=300,  # 设置为300个epoch，与max_epochs保持一致
        by_epoch=True)  # 改为基于epoch
]

# training schedule - 基于epoch的训练配置  
train_cfg = dict(type='EpochBasedTrainLoop', max_epochs=300, val_interval=1)  # 改为每20个epoch验证一次，大幅减少验证开销
val_cfg = dict(type='ValLoop')
test_cfg = dict(type='TestLoop')

default_hooks = dict(
    timer=dict(type='IterTimerHook'),
    logger=dict(type='LoggerHook', interval=1, log_metric_by_epoch=True),  # 每10个batch记录一次，减少日志开销
    param_scheduler=dict(type='ParamSchedulerHook'),
    checkpoint=dict(type='CheckpointHook', by_epoch=True, interval=1),  # 每20个epoch保存一次，只保留最近3个
    sampler_seed=dict(type='DistSamplerSeedHook'),
    visualization=dict(type='SegVisualizationHook', draw=True, interval=1))  # 减少可视化频率，节省时间