HomoLiang commited on
Commit
9ab20e4
1 Parent(s): 370b148

Training in progress, step 100

Browse files
adapter_config.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "auto_mapping": null,
3
+ "base_model_name_or_path": "yentinglin/Taiwan-LLM-7B-v2.0-chat",
4
+ "encoder_dropout": 0.0,
5
+ "encoder_hidden_size": 128,
6
+ "encoder_num_layers": 2,
7
+ "encoder_reparameterization_type": "MLP",
8
+ "inference_mode": true,
9
+ "num_attention_heads": 32,
10
+ "num_layers": 32,
11
+ "num_transformer_submodules": 1,
12
+ "num_virtual_tokens": 20,
13
+ "peft_type": "P_TUNING",
14
+ "revision": null,
15
+ "task_type": "CAUSAL_LM",
16
+ "token_dim": 4096
17
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:869a8364ea2b9500e864e5b6785d033877e5cc17b5949d45bbd2ccd11284df43
3
+ size 327800
runs/Nov26_05-04-36_cf585e61886c/events.out.tfevents.1700975078.cf585e61886c.26.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d193234695d8906251a54aac8c0ab069936096f966d37651ad5daccfd9cb8c65
3
+ size 20159
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07180df518c3a990e14da91c971ddc1d558cd34aec0d4315e87e98d196bfc9c7
3
+ size 4219