
### Model
model = dict(
    type="InterpCNN2_partseg",
    num_parts=50,
    num_classes=16,
    loss=dict(type='CrossEntropy')
)

### Dataset
dataset_type = 'ShapeNetPartSeg'
data_root = '../data/shapenetcore_partanno_segmentation_benchmark_v0_normal'
train_pipeline=[
    dict(type='PointSample', num_point=2048, uniform=True, replace=True),
    dict(type='Transpose', keys=['points'], order=[1, 0])
]
val_pipline=[
    dict(type='PointSample', num_point=2048, uniform=True, replace=True),
    dict(type='Transpose', keys=['points'], order=[1, 0])
]
data = dict(
    samples_per_gpu=32,
    workers_per_gpu=4,
    train=dict(
        type=dataset_type,
        split='train',
        root=data_root,
        pipeline=train_pipeline,
    ),
    val=dict(
        type=dataset_type,
        split='test',
        root=data_root,
        pipeline=val_pipline,
    ),
    test=dict(
        type=dataset_type,
        split='test',
        root=data_root,
        pipeline=val_pipline,
    )
)

### evaluation
evaluation = dict(
    save_best = 'ins_avg_iou'
)

### Schedule
optimizer= dict(type='Adam', lr=0.001, weight_decay=0.0001)
optimizer_config = dict(
    grad_clip=dict(max_norm=1))
lr_config = dict(
    policy='step',
    step=[100, 150],
    gamma=0.2)
runner = dict(type='EpochBasedRunner', max_epochs=200)

### Runtime
checkpoint_config = dict(interval=1, max_keep_ckpts=1)
log_config = dict(
    interval=50,
    hooks=[
        dict(type='TextLoggerHook'),
    ])
custom_hooks = [
    dict(type='NumClassCheckHook', module_attr='num_parts', dataset_attr='CLASSES')
]

dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = None
resume_from = None
workflow = [('train', 1)]
seed = 0