### Model
model = dict(
    type="LightNet",
    num_class=40,
)

### Dataset
dataset_type = 'ModelNet40_ply'
data_root = '/Data/datasets/pointcloud/modelnet40_ply_hdf5_2048/'
data = dict(
    samples_per_gpu=32,
    workers_per_gpu=8,
    train=dict(
        type=dataset_type,
        split='train',
        root=data_root,
        pipeline=[dict(type='Transpose', keys=['points'], order=[1, 0])],
        shuffle=True
    ),
    val=dict(
        type=dataset_type,
        split='test',
        root=data_root,
        pipeline=[dict(type='Transpose', keys=['points'], order=[1, 0])],
        shuffle=True
    ),
    test=dict(
        type=dataset_type,
        split='test',
        root=data_root,
        pipeline=[dict(type='Transpose', keys=['points'], order=[1, 0])],
        shuffle=True
    )
)

### Schedule
optimizer= dict(type='SGD', lr=0.1, weight_decay=0.0001)
optimizer_config = dict(
    grad_clip=dict(max_norm=1))
lr_config = dict(
    policy='step',
    step=[140, 180],
    gamma=0.1)
runner = dict(type='EpochBasedRunner', max_epochs=200)

### Runtime
checkpoint_config = dict(interval=1, max_keep_ckpts=1)
log_config = dict(
    interval=100,
    hooks=[
        dict(type='TextLoggerHook'),
    ])
custom_hooks = [dict(type='NumClassCheckHook')]

dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = None
resume_from = None
workflow = [('train', 1)]
