elisachen commited on
Commit
b301595
1 Parent(s): 30089dc

Upload LlamaForSequenceClassification

Browse files
Files changed (2) hide show
  1. config.json +39 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -9,19 +9,57 @@
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
11
  "hidden_size": 2048,
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 5632,
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  "max_position_embeddings": 2048,
15
  "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 22,
18
  "num_key_value_heads": 4,
19
  "pretraining_tp": 1,
 
 
 
 
 
 
 
 
 
 
 
 
20
  "rms_norm_eps": 1e-05,
21
  "rope_scaling": null,
22
  "rope_theta": 10000.0,
23
  "tie_word_embeddings": false,
24
- "torch_dtype": "float32",
25
  "transformers_version": "4.38.2",
26
  "use_cache": true,
27
  "vocab_size": 32000
 
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
11
  "hidden_size": 2048,
12
+ "id2label": {
13
+ "0": "LABEL_0",
14
+ "1": "LABEL_1",
15
+ "2": "LABEL_2",
16
+ "3": "LABEL_3",
17
+ "4": "LABEL_4",
18
+ "5": "LABEL_5",
19
+ "6": "LABEL_6",
20
+ "7": "LABEL_7",
21
+ "8": "LABEL_8",
22
+ "9": "LABEL_9",
23
+ "10": "LABEL_10"
24
+ },
25
  "initializer_range": 0.02,
26
  "intermediate_size": 5632,
27
+ "label2id": {
28
+ "LABEL_0": 0,
29
+ "LABEL_1": 1,
30
+ "LABEL_10": 10,
31
+ "LABEL_2": 2,
32
+ "LABEL_3": 3,
33
+ "LABEL_4": 4,
34
+ "LABEL_5": 5,
35
+ "LABEL_6": 6,
36
+ "LABEL_7": 7,
37
+ "LABEL_8": 8,
38
+ "LABEL_9": 9
39
+ },
40
  "max_position_embeddings": 2048,
41
  "model_type": "llama",
42
  "num_attention_heads": 32,
43
  "num_hidden_layers": 22,
44
  "num_key_value_heads": 4,
45
  "pretraining_tp": 1,
46
+ "quantization_config": {
47
+ "backend": "autoawq",
48
+ "bits": 4,
49
+ "do_fuse": false,
50
+ "fuse_max_seq_len": null,
51
+ "group_size": 128,
52
+ "modules_to_fuse": null,
53
+ "modules_to_not_convert": null,
54
+ "quant_method": "awq",
55
+ "version": "gemm",
56
+ "zero_point": true
57
+ },
58
  "rms_norm_eps": 1e-05,
59
  "rope_scaling": null,
60
  "rope_theta": 10000.0,
61
  "tie_word_embeddings": false,
62
+ "torch_dtype": "float16",
63
  "transformers_version": "4.38.2",
64
  "use_cache": true,
65
  "vocab_size": 32000
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0668a5b7cef598eadeaf2ad2ab19189f7cc165ed4d5eca0007e521b654485afa
3
- size 4138088880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc26e079cb762ea92d2cb1018edfb1bc31556af142ac7c81307d4722a3a38476
3
+ size 634723464