File size: 1,637 Bytes
1cf3d00
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
{
    "framework": "pytorch",
    "task": "ocr-recognition",
    "pipeline": {
        "type": "convnextTiny-ocr-recognition"
    },
    "model": {
        "type": "OCRRecognition",
        "recognizer": "ConvNextViT",
        "inference_kwargs": {
            "img_height": 32,
            "img_width": 804,
            "do_chunking": true
        }
    },
    "preprocessor": {
        "type": "ocr-recognition"
    },
    "train": {
        "max_epochs": 30,
        "work_dir": "./work_dir",
        "dataloader": {
            "batch_size_per_gpu": 64,
            "workers_per_gpu": 0
        },
        "optimizer": {
            "type": "AdamW",
            "weight_decay": 0.01,
            "lr": 0.001,
            "options": {
                "grad_clip": {
                    "max_norm": 20
                }
            }
        },
        "lr_scheduler": {
            "type": "MultiStepLR",
            "milestones": [
                10,
                20
            ],
            "gamma": 0.1
        },
        "hooks": [
            {
                "type": "IterTimerHook"
            }
        ],
        "checkpoint": {
            "period": {
                "interval": 1,
                "save_dir": "./work_dir"
            }
        },
        "logging": {
            "interval": 1000,
            "out_dir": "./work_dir"
        }
    },
    "evaluation": {
        "dataloader": {
            "batch_size_per_gpu": 32,
            "workers_per_gpu": 0,
            "shuffle": false
        },
        "metrics": "ocr-recognition-metric",
        "period": {
            "interval": 1
        }
    }
}