Poonnnnnnnn
commited on
Commit
•
e18283b
1
Parent(s):
302bc3b
Upload model
Browse files- adapter_config.json +14 -0
- adapter_model.bin +3 -0
adapter_config.json
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"base_model_name_or_path": "pythainlp/wangchanglm-7.5B-sft-en-sharded",
|
3 |
+
"inference_mode": true,
|
4 |
+
"num_attention_heads": 24,
|
5 |
+
"num_layers": 32,
|
6 |
+
"num_transformer_submodules": 1,
|
7 |
+
"num_virtual_tokens": 10,
|
8 |
+
"peft_type": "PROMPT_TUNING",
|
9 |
+
"prompt_tuning_init": "TEXT",
|
10 |
+
"prompt_tuning_init_text": "\u0e08\u0e07\u0e41\u0e15\u0e48\u0e07\u0e19\u0e34\u0e22\u0e32\u0e22\u0e15\u0e48\u0e2d\u0e08\u0e32\u0e01\u0e40\u0e23\u0e37\u0e48\u0e2d\u0e07\u0e22\u0e48\u0e2d\u0e15\u0e48\u0e2d\u0e44\u0e1b\u0e19\u0e35\u0e49:",
|
11 |
+
"task_type": "CAUSAL_LM",
|
12 |
+
"token_dim": 4096,
|
13 |
+
"tokenizer_name_or_path": "pythainlp/wangchanglm-7.5B-sft-en-sharded"
|
14 |
+
}
|
adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a09942a18da812033bd319a4ef0a09e41aa63c592c32f05af5fc694dac47955e
|
3 |
+
size 164605
|