system HF staff commited on
Commit
d5df9ad
1 Parent(s): e61814a

Commit From AutoTrain

Browse files
.gitattributes CHANGED
@@ -29,3 +29,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
29
  *.zip filter=lfs diff=lfs merge=lfs -text
30
  *.zst filter=lfs diff=lfs merge=lfs -text
31
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
29
  *.zip filter=lfs diff=lfs merge=lfs -text
30
  *.zst filter=lfs diff=lfs merge=lfs -text
31
  *tfevents* filter=lfs diff=lfs merge=lfs -text
32
+ *.bin.* filter=lfs diff=lfs merge=lfs -text
33
+ *.tar.gz filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - autotrain
4
+ - text-classification
5
+ language:
6
+ - zh
7
+ widget:
8
+ - text: "I love AutoTrain 🤗"
9
+ datasets:
10
+ - yuan1729/autotrain-data-laws_1
11
+ co2_eq_emissions:
12
+ emissions: 8.667918502534315
13
+ ---
14
+
15
+ # Model Trained Using AutoTrain
16
+
17
+ - Problem type: Multi-class Classification
18
+ - Model ID: 1256348072
19
+ - CO2 Emissions (in grams): 8.6679
20
+
21
+ ## Validation Metrics
22
+
23
+ - Loss: 0.065
24
+ - Accuracy: 0.986
25
+ - Macro F1: 0.972
26
+ - Micro F1: 0.986
27
+ - Weighted F1: 0.986
28
+ - Macro Precision: 0.973
29
+ - Micro Precision: 0.986
30
+ - Weighted Precision: 0.986
31
+ - Macro Recall: 0.971
32
+ - Micro Recall: 0.986
33
+ - Weighted Recall: 0.986
34
+
35
+
36
+ ## Usage
37
+
38
+ You can use cURL to access this model:
39
+
40
+ ```
41
+ $ curl -X POST -H "Authorization: Bearer YOUR_API_KEY" -H "Content-Type: application/json" -d '{"inputs": "I love AutoTrain"}' https://api-inference.huggingface.co/models/yuan1729/autotrain-laws_1-1256348072
42
+ ```
43
+
44
+ Or Python API:
45
+
46
+ ```
47
+ from transformers import AutoModelForSequenceClassification, AutoTokenizer
48
+
49
+ model = AutoModelForSequenceClassification.from_pretrained("yuan1729/autotrain-laws_1-1256348072", use_auth_token=True)
50
+
51
+ tokenizer = AutoTokenizer.from_pretrained("yuan1729/autotrain-laws_1-1256348072", use_auth_token=True)
52
+
53
+ inputs = tokenizer("I love AutoTrain", return_tensors="pt")
54
+
55
+ outputs = model(**inputs)
56
+ ```
config.json ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "AutoTrain",
3
+ "_num_labels": 49,
4
+ "architectures": [
5
+ "BertForSequenceClassification"
6
+ ],
7
+ "attention_probs_dropout_prob": 0.1,
8
+ "classifier_dropout": null,
9
+ "directionality": "bidi",
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
+ "id2label": {
14
+ "0": "\u4e2d\u83ef\u6c11\u570b\u4e5d\u5341\u516d\u5e74\u7f6a\u72af\u6e1b\u5211\u689d\u4f8b",
15
+ "1": "\u4e2d\u83ef\u6c11\u570b\u61b2\u6cd5",
16
+ "2": "\u500b\u4eba\u8cc7\u6599\u4fdd\u8b77\u6cd5",
17
+ "3": "\u5152\u7ae5\u53ca\u5c11\u5e74\u6027\u525d\u524a\u9632\u5236\u689d\u4f8b",
18
+ "4": "\u5152\u7ae5\u53ca\u5c11\u5e74\u798f\u5229\u8207\u6b0a\u76ca\u4fdd\u969c\u6cd5",
19
+ "5": "\u5165\u51fa\u570b\u53ca\u79fb\u6c11\u6cd5",
20
+ "6": "\u516c\u53f8\u6cd5",
21
+ "7": "\u516c\u8077\u4eba\u54e1\u9078\u8209\u7f77\u514d\u6cd5",
22
+ "8": "\u516c\u8a2d\u8faf\u8b77\u4eba\u689d\u4f8b",
23
+ "9": "\u5340\u57df\u8a08\u756b\u6cd5",
24
+ "10": "\u5546\u696d\u6703\u8a08\u6cd5",
25
+ "11": "\u5546\u6a19\u6cd5",
26
+ "12": "\u59a8\u5bb3\u5175\u5f79\u6cbb\u7f6a\u689d\u4f8b",
27
+ "13": "\u5bb6\u5ead\u66b4\u529b\u9632\u6cbb\u6cd5",
28
+ "14": "\u5c31\u696d\u670d\u52d9\u6cd5",
29
+ "15": "\u5ee2\u68c4\u7269\u6e05\u7406\u6cd5",
30
+ "16": "\u5f8b\u5e2b\u6cd5",
31
+ "17": "\u6027\u4fb5\u5bb3\u72af\u7f6a\u9632\u6cbb\u6cd5",
32
+ "18": "\u6027\u9a37\u64fe\u9632\u6cbb\u6cd5",
33
+ "19": "\u61f2\u6cbb\u8d70\u79c1\u689d\u4f8b",
34
+ "20": "\u6236\u7c4d\u6cd5",
35
+ "21": "\u653f\u5e9c\u63a1\u8cfc\u6cd5",
36
+ "22": "\u68ee\u6797\u6cd5",
37
+ "23": "\u69cd\u7832\u5f48\u85e5\u5200\u68b0\u7ba1\u5236\u689d\u4f8b",
38
+ "24": "\u6bd2\u54c1\u5371\u5bb3\u9632\u5236\u689d\u4f8b",
39
+ "25": "\u6c11\u6cd5",
40
+ "26": "\u6c34\u571f\u4fdd\u6301\u6cd5",
41
+ "27": "\u6d17\u9322\u9632\u5236\u6cd5",
42
+ "28": "\u7a05\u6350\u7a3d\u5fb5\u6cd5",
43
+ "29": "\u7aca\u76dc\u72af\u8d13\u7269\u72af\u4fdd\u5b89\u8655\u5206\u689d\u4f8b",
44
+ "30": "\u7ba1\u5236\u85e5\u54c1\u7ba1\u7406\u689d\u4f8b",
45
+ "31": "\u7d44\u7e54\u72af\u7f6a\u9632\u5236\u689d\u4f8b",
46
+ "32": "\u7f70\u91d1\u7f70\u9370\u63d0\u9ad8\u6a19\u6e96\u689d\u4f8b",
47
+ "33": "\u8077\u696d\u5b89\u5168\u885b\u751f\u6cd5",
48
+ "34": "\u81fa\u7063\u5730\u5340\u8207\u5927\u9678\u5730\u5340\u4eba\u6c11\u95dc\u4fc2\u689d\u4f8b",
49
+ "35": "\u8457\u4f5c\u6b0a\u6cd5",
50
+ "36": "\u85e5\u4e8b\u6cd5",
51
+ "37": "\u8b49\u5238\u4ea4\u6613\u6cd5",
52
+ "38": "\u8caa\u6c61\u6cbb\u7f6a\u689d\u4f8b",
53
+ "39": "\u8ecd\u4e8b\u5be9\u5224\u6cd5",
54
+ "40": "\u8f49\u8b93\u6bd2\u54c1\u52a0\u91cd\u5176\u5211\u4e4b\u6578\u91cf\u6a19\u6e96",
55
+ "41": "\u901a\u8a0a\u4fdd\u969c\u53ca\u76e3\u5bdf\u6cd5",
56
+ "42": "\u9053\u8def\u4ea4\u901a\u5b89\u5168\u898f\u5247",
57
+ "43": "\u9053\u8def\u4ea4\u901a\u6a19\u8a8c\u6a19\u7dda\u865f\u8a8c\u8a2d\u7f6e\u898f\u5247",
58
+ "44": "\u9053\u8def\u4ea4\u901a\u7ba1\u7406\u8655\u7f70\u689d\u4f8b",
59
+ "45": "\u9280\u884c\u6cd5",
60
+ "46": "\u9678\u6d77\u7a7a\u8ecd\u5211\u6cd5",
61
+ "47": "\u96fb\u4fe1\u6cd5",
62
+ "48": "\u96fb\u5b50\u904a\u6232\u5834\u696d\u7ba1\u7406\u689d\u4f8b"
63
+ },
64
+ "initializer_range": 0.02,
65
+ "intermediate_size": 3072,
66
+ "label2id": {
67
+ "\u4e2d\u83ef\u6c11\u570b\u4e5d\u5341\u516d\u5e74\u7f6a\u72af\u6e1b\u5211\u689d\u4f8b": 0,
68
+ "\u4e2d\u83ef\u6c11\u570b\u61b2\u6cd5": 1,
69
+ "\u500b\u4eba\u8cc7\u6599\u4fdd\u8b77\u6cd5": 2,
70
+ "\u5152\u7ae5\u53ca\u5c11\u5e74\u6027\u525d\u524a\u9632\u5236\u689d\u4f8b": 3,
71
+ "\u5152\u7ae5\u53ca\u5c11\u5e74\u798f\u5229\u8207\u6b0a\u76ca\u4fdd\u969c\u6cd5": 4,
72
+ "\u5165\u51fa\u570b\u53ca\u79fb\u6c11\u6cd5": 5,
73
+ "\u516c\u53f8\u6cd5": 6,
74
+ "\u516c\u8077\u4eba\u54e1\u9078\u8209\u7f77\u514d\u6cd5": 7,
75
+ "\u516c\u8a2d\u8faf\u8b77\u4eba\u689d\u4f8b": 8,
76
+ "\u5340\u57df\u8a08\u756b\u6cd5": 9,
77
+ "\u5546\u696d\u6703\u8a08\u6cd5": 10,
78
+ "\u5546\u6a19\u6cd5": 11,
79
+ "\u59a8\u5bb3\u5175\u5f79\u6cbb\u7f6a\u689d\u4f8b": 12,
80
+ "\u5bb6\u5ead\u66b4\u529b\u9632\u6cbb\u6cd5": 13,
81
+ "\u5c31\u696d\u670d\u52d9\u6cd5": 14,
82
+ "\u5ee2\u68c4\u7269\u6e05\u7406\u6cd5": 15,
83
+ "\u5f8b\u5e2b\u6cd5": 16,
84
+ "\u6027\u4fb5\u5bb3\u72af\u7f6a\u9632\u6cbb\u6cd5": 17,
85
+ "\u6027\u9a37\u64fe\u9632\u6cbb\u6cd5": 18,
86
+ "\u61f2\u6cbb\u8d70\u79c1\u689d\u4f8b": 19,
87
+ "\u6236\u7c4d\u6cd5": 20,
88
+ "\u653f\u5e9c\u63a1\u8cfc\u6cd5": 21,
89
+ "\u68ee\u6797\u6cd5": 22,
90
+ "\u69cd\u7832\u5f48\u85e5\u5200\u68b0\u7ba1\u5236\u689d\u4f8b": 23,
91
+ "\u6bd2\u54c1\u5371\u5bb3\u9632\u5236\u689d\u4f8b": 24,
92
+ "\u6c11\u6cd5": 25,
93
+ "\u6c34\u571f\u4fdd\u6301\u6cd5": 26,
94
+ "\u6d17\u9322\u9632\u5236\u6cd5": 27,
95
+ "\u7a05\u6350\u7a3d\u5fb5\u6cd5": 28,
96
+ "\u7aca\u76dc\u72af\u8d13\u7269\u72af\u4fdd\u5b89\u8655\u5206\u689d\u4f8b": 29,
97
+ "\u7ba1\u5236\u85e5\u54c1\u7ba1\u7406\u689d\u4f8b": 30,
98
+ "\u7d44\u7e54\u72af\u7f6a\u9632\u5236\u689d\u4f8b": 31,
99
+ "\u7f70\u91d1\u7f70\u9370\u63d0\u9ad8\u6a19\u6e96\u689d\u4f8b": 32,
100
+ "\u8077\u696d\u5b89\u5168\u885b\u751f\u6cd5": 33,
101
+ "\u81fa\u7063\u5730\u5340\u8207\u5927\u9678\u5730\u5340\u4eba\u6c11\u95dc\u4fc2\u689d\u4f8b": 34,
102
+ "\u8457\u4f5c\u6b0a\u6cd5": 35,
103
+ "\u85e5\u4e8b\u6cd5": 36,
104
+ "\u8b49\u5238\u4ea4\u6613\u6cd5": 37,
105
+ "\u8caa\u6c61\u6cbb\u7f6a\u689d\u4f8b": 38,
106
+ "\u8ecd\u4e8b\u5be9\u5224\u6cd5": 39,
107
+ "\u8f49\u8b93\u6bd2\u54c1\u52a0\u91cd\u5176\u5211\u4e4b\u6578\u91cf\u6a19\u6e96": 40,
108
+ "\u901a\u8a0a\u4fdd\u969c\u53ca\u76e3\u5bdf\u6cd5": 41,
109
+ "\u9053\u8def\u4ea4\u901a\u5b89\u5168\u898f\u5247": 42,
110
+ "\u9053\u8def\u4ea4\u901a\u6a19\u8a8c\u6a19\u7dda\u865f\u8a8c\u8a2d\u7f6e\u898f\u5247": 43,
111
+ "\u9053\u8def\u4ea4\u901a\u7ba1\u7406\u8655\u7f70\u689d\u4f8b": 44,
112
+ "\u9280\u884c\u6cd5": 45,
113
+ "\u9678\u6d77\u7a7a\u8ecd\u5211\u6cd5": 46,
114
+ "\u96fb\u4fe1\u6cd5": 47,
115
+ "\u96fb\u5b50\u904a\u6232\u5834\u696d\u7ba1\u7406\u689d\u4f8b": 48
116
+ },
117
+ "layer_norm_eps": 1e-12,
118
+ "max_length": 192,
119
+ "max_position_embeddings": 512,
120
+ "model_type": "bert",
121
+ "num_attention_heads": 12,
122
+ "num_hidden_layers": 12,
123
+ "output_past": true,
124
+ "pad_token_id": 0,
125
+ "padding": "max_length",
126
+ "pooler_fc_size": 768,
127
+ "pooler_num_attention_heads": 12,
128
+ "pooler_num_fc_layers": 3,
129
+ "pooler_size_per_head": 128,
130
+ "pooler_type": "first_token_transform",
131
+ "position_embedding_type": "absolute",
132
+ "problem_type": "single_label_classification",
133
+ "torch_dtype": "float32",
134
+ "transformers_version": "4.20.0",
135
+ "type_vocab_size": 2,
136
+ "use_cache": true,
137
+ "vocab_size": 21128
138
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:888f1ba84e64ecb616bcc73233cd1fe55a022f4738ed93528fcefa6a0903f968
3
+ size 409292333
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "do_lower_case": true,
4
+ "mask_token": "[MASK]",
5
+ "name_or_path": "AutoTrain",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "special_tokens_map_file": "/app/.cache/huggingface/transformers/8183382ed1d3dbb68e827ee934b131f7a2fded38f6b5d6e94cf29c9e363e7cde.dd8bd9bfd3664b530ea4e645105f557769387b3da9f79bdb55ed556bdd80611d",
9
+ "strip_accents": null,
10
+ "tokenize_chinese_chars": true,
11
+ "tokenizer_class": "BertTokenizer",
12
+ "unk_token": "[UNK]"
13
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff