Barth371 commited on
Commit
abdaa84
1 Parent(s): 4aaa687

Training in progress, step 200

Browse files
config.json CHANGED
@@ -8,169 +8,19 @@
8
  "hidden_dropout_prob": 0.0,
9
  "hidden_size": 768,
10
  "id2label": {
11
- "0": "FA 970 VW",
12
- "1": "EX-268-XQ",
13
- "2": "EK-744-NX",
14
- "3": "GF 784 CM",
15
- "4": "DW-843-EZ",
16
- "5": "FF-421-YR",
17
- "6": "FR-285-QM",
18
- "7": "GK-917-RY",
19
- "8": "EK-011-NX",
20
- "9": "ER 471 NZ",
21
- "10": "FA 671 ZC",
22
- "11": "FP 096 BS",
23
- "12": "FW-674-FZ",
24
- "13": "FQ-298-JK",
25
- "14": "FS-503-GB",
26
- "15": "EY-991-DR",
27
- "16": "EZ-453-VY",
28
- "17": "FR-392-XR",
29
- "18": "FV 168 AE",
30
- "19": "86098/GB587GT",
31
- "20": "GB-620-BY",
32
- "21": "<EMPTY>",
33
- "22": "FJ-939-TY",
34
- "23": "ET 579 FR",
35
- "24": "FM-231-YY",
36
- "25": "GB-927-WD",
37
- "26": "FE 590NF",
38
- "27": "FE-867-QA",
39
- "28": "EN 202 ML",
40
- "29": "EL-223-XF",
41
- "30": "FY-915-LM",
42
- "31": "FK-499-QK",
43
- "32": "FV-286-QK",
44
- "33": "FR-578-DC",
45
- "34": "GB-157-QG",
46
- "35": "FR-570-DG",
47
- "36": "FT688CZ",
48
- "37": "FE-834-BC",
49
- "38": "FZ 929 YF",
50
- "39": "FT-785-HW",
51
- "40": "FE-293-HZ",
52
- "41": "FT-594-AC",
53
- "42": "FL-937-AH",
54
- "43": "FJ-290-NN",
55
- "44": "FW474AH",
56
- "45": "FN-356-QX",
57
- "46": "FV-917-LV",
58
- "47": "GJ-053-HN",
59
- "48": "FJ-990-ZS",
60
- "49": "FH-738-TV",
61
- "50": "FR-322-QM",
62
- "51": "FK-484-LV",
63
- "52": "FS-127-LS",
64
- "53": "GA 127-AM",
65
- "54": "FF099KK",
66
- "55": "FE-238-CX",
67
- "56": "FS-402-BH",
68
- "57": "FH-542-QA",
69
- "58": "FE-271-HZ",
70
- "59": "EZ-177-RT",
71
- "60": "EQ-418-FA",
72
- "61": "GC 457 TV",
73
- "62": "FR 661 CG",
74
- "63": "FC-208-GD",
75
- "64": "FL-193-AJ",
76
- "65": "GA 296 FW",
77
- "66": "GF-551-DJ",
78
- "67": "FH-036-WY",
79
- "68": "GG157PV",
80
- "69": "FS-081-TF",
81
- "70": "FD-140-BP",
82
- "71": "FL-460-JN",
83
- "72": "FR-363-WL",
84
- "73": "GB 059 GR",
85
- "74": "GA-445-TA",
86
- "75": "GB-776-EE",
87
- "76": "FV-355-XK",
88
- "77": "FA 772 LB",
89
- "78": "FB-898-WZ"
90
  },
91
  "image_size": 384,
92
  "initializer_range": 0.02,
93
  "intermediate_size": 3072,
94
  "label2id": {
95
- "86098/GB587GT": 19,
96
- "<EMPTY>": 21,
97
- "DW-843-EZ": 4,
98
- "EK-011-NX": 8,
99
- "EK-744-NX": 2,
100
- "EL-223-XF": 29,
101
- "EN 202 ML": 28,
102
- "EQ-418-FA": 60,
103
- "ER 471 NZ": 9,
104
- "ET 579 FR": 23,
105
- "EX-268-XQ": 1,
106
- "EY-991-DR": 15,
107
- "EZ-177-RT": 59,
108
- "EZ-453-VY": 16,
109
- "FA 671 ZC": 10,
110
- "FA 772 LB": 77,
111
- "FA 970 VW": 0,
112
- "FB-898-WZ": 78,
113
- "FC-208-GD": 63,
114
- "FD-140-BP": 70,
115
- "FE 590NF": 26,
116
- "FE-238-CX": 55,
117
- "FE-271-HZ": 58,
118
- "FE-293-HZ": 40,
119
- "FE-834-BC": 37,
120
- "FE-867-QA": 27,
121
- "FF-421-YR": 5,
122
- "FF099KK": 54,
123
- "FH-036-WY": 67,
124
- "FH-542-QA": 57,
125
- "FH-738-TV": 49,
126
- "FJ-290-NN": 43,
127
- "FJ-939-TY": 22,
128
- "FJ-990-ZS": 48,
129
- "FK-484-LV": 51,
130
- "FK-499-QK": 31,
131
- "FL-193-AJ": 64,
132
- "FL-460-JN": 71,
133
- "FL-937-AH": 42,
134
- "FM-231-YY": 24,
135
- "FN-356-QX": 45,
136
- "FP 096 BS": 11,
137
- "FQ-298-JK": 13,
138
- "FR 661 CG": 62,
139
- "FR-285-QM": 6,
140
- "FR-322-QM": 50,
141
- "FR-363-WL": 72,
142
- "FR-392-XR": 17,
143
- "FR-570-DG": 35,
144
- "FR-578-DC": 33,
145
- "FS-081-TF": 69,
146
- "FS-127-LS": 52,
147
- "FS-402-BH": 56,
148
- "FS-503-GB": 14,
149
- "FT-594-AC": 41,
150
- "FT-785-HW": 39,
151
- "FT688CZ": 36,
152
- "FV 168 AE": 18,
153
- "FV-286-QK": 32,
154
- "FV-355-XK": 76,
155
- "FV-917-LV": 46,
156
- "FW-674-FZ": 12,
157
- "FW474AH": 44,
158
- "FY-915-LM": 30,
159
- "FZ 929 YF": 38,
160
- "GA 127-AM": 53,
161
- "GA 296 FW": 65,
162
- "GA-445-TA": 74,
163
- "GB 059 GR": 73,
164
- "GB-157-QG": 34,
165
- "GB-620-BY": 20,
166
- "GB-776-EE": 75,
167
- "GB-927-WD": 25,
168
- "GC 457 TV": 61,
169
- "GF 784 CM": 3,
170
- "GF-551-DJ": 66,
171
- "GG157PV": 68,
172
- "GJ-053-HN": 47,
173
- "GK-917-RY": 7
174
  },
175
  "layer_norm_eps": 1e-12,
176
  "max_image_length": -1,
 
8
  "hidden_dropout_prob": 0.0,
9
  "hidden_size": 768,
10
  "id2label": {
11
+ "0": 0,
12
+ "1": 1,
13
+ "2": 2,
14
+ "3": 3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  },
16
  "image_size": 384,
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
  "label2id": {
20
+ "0": 0,
21
+ "1": 1,
22
+ "2": 2,
23
+ "3": 3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  },
25
  "layer_norm_eps": 1e-12,
26
  "max_image_length": -1,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a7d7aab1f2a3a0db8bb8d7fb54cfffd91d8812a1a43810418ab1509791d653c2
3
- size 451627564
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bbb02025220fa8996986784414dd55e6a8b1b72070dd524ad9c5c02c21e9f8cd
3
+ size 451166464
runs/Feb09_14-24-45_32db55c39261/events.out.tfevents.1707488713.32db55c39261.5152.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d672705b74ac01430fec045b55fcde798f11e27e223a1a36869b091d62328e22
3
+ size 5033
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ac920be2c1b0a148f2182156e8a2dfd396b4d91a5e22cb36844ed540e228ac1d
3
  size 4600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f6eed0152237b8ee1a28fc6a4c81d5f30ca766a2f93e27288bac4c75b6df6a9
3
  size 4600