'''upernet_beitbase_ade20k'''
import os
from .base_cfg import *


# modify dataset config
DATASET_CFG = DATASET_CFG.copy()
DATASET_CFG.update({
    'type': 'shance',
    'rootdir': '/data01/SegDataset/ShanCe_ClipData',
})
# DATASET_CFG['train']['aug_opts'] = [
#     ('Resize', {'output_size': (512, 512), 'keep_ratio': True, 'scale_range': (0.5, 2.0)}),
#     ('RandomCrop', {'crop_size': (512, 512), 'one_category_max_ratio': 0.75}),
#     ('RandomFlip', {'flip_prob': 0.5}),
#     ('PhotoMetricDistortion', {}),
#     ('Normalize', {'mean': [123.675, 116.28, 103.53], 'std': [58.395, 57.12, 57.375]}),
#     ('ToTensor', {}),
#     ('Padding', {'output_size': (128, 128), 'data_type': 'tensor'}),
# ]
# DATASET_CFG['train']['repeat_times'] = 35000
# DATASET_CFG['test']['aug_opts'] = [
#     ('Resize', {'output_size': (300, 300), 'keep_ratio': True, 'scale_range': None}),
#     ('Normalize', {'mean': [123.675, 116.28, 103.53], 'std': [58.395, 57.12, 57.375]}),
#     ('ToTensor', {}),
# ]
# modify dataloader config
DATALOADER_CFG = DATALOADER_CFG.copy()
DATALOADER_CFG['train'].update({
    'batch_size': 32
})
# modify optimizer config
OPTIMIZER_CFG = OPTIMIZER_CFG.copy()
OPTIMIZER_CFG.update({
    'lr': 0.0002,
})
# modify scheduler config
SCHEDULER_CFG = SCHEDULER_CFG.copy()
SCHEDULER_CFG.update({
    'max_epochs': 1000,
})
# modify losses config
LOSSES_CFG = LOSSES_CFG.copy()
# modify segmentor config
SEGMENTOR_CFG = SEGMENTOR_CFG.copy()
SEGMENTOR_CFG.update({
    'num_classes': 30,
})

# modify inference config
INFERENCE_CFG = INFERENCE_CFG.copy()
# modify common config
COMMON_CFG = COMMON_CFG.copy()
# modify inference config
INFERENCE_CFG = INFERENCE_CFG.copy()
# modify common config
COMMON_CFG = COMMON_CFG.copy()
COMMON_CFG['work_dir'] = 'upernet_beitbase_ShanCe_small'
COMMON_CFG['logfilepath'] = 'upernet_beitbase_ShanCe_small/upernet_beitbase_ShanCe_small.log'
COMMON_CFG['resultsavepath'] = 'upernet_beitbase_ShanCe_small/upernet_beitbase_ShanCe_small_results.pkl'
COMMON_CFG['save_interval_epochs'] = 10