HomoLiang commited on
Commit
e0fbaa0
1 Parent(s): 9ab20e4

Training in progress, step 100

Browse files
adapter_config.json CHANGED
@@ -2,14 +2,14 @@
2
  "auto_mapping": null,
3
  "base_model_name_or_path": "yentinglin/Taiwan-LLM-7B-v2.0-chat",
4
  "encoder_dropout": 0.0,
5
- "encoder_hidden_size": 128,
6
  "encoder_num_layers": 2,
7
  "encoder_reparameterization_type": "MLP",
8
  "inference_mode": true,
9
  "num_attention_heads": 32,
10
  "num_layers": 32,
11
  "num_transformer_submodules": 1,
12
- "num_virtual_tokens": 20,
13
  "peft_type": "P_TUNING",
14
  "revision": null,
15
  "task_type": "CAUSAL_LM",
 
2
  "auto_mapping": null,
3
  "base_model_name_or_path": "yentinglin/Taiwan-LLM-7B-v2.0-chat",
4
  "encoder_dropout": 0.0,
5
+ "encoder_hidden_size": 256,
6
  "encoder_num_layers": 2,
7
  "encoder_reparameterization_type": "MLP",
8
  "inference_mode": true,
9
  "num_attention_heads": 32,
10
  "num_layers": 32,
11
  "num_transformer_submodules": 1,
12
+ "num_virtual_tokens": 10,
13
  "peft_type": "P_TUNING",
14
  "revision": null,
15
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:869a8364ea2b9500e864e5b6785d033877e5cc17b5949d45bbd2ccd11284df43
3
- size 327800
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f603adae877f1eca419613125a59b860804d8d90a743546fb2878183836ab9fd
3
+ size 163960
runs/Nov26_06-17-38_26da521fff1e/events.out.tfevents.1700979460.26da521fff1e.27.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d69a64ccc00d0c79e70efe4a1dd3144dd59a6a6938c86f82c2bd48d989d17b18
3
+ size 20162
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:07180df518c3a990e14da91c971ddc1d558cd34aec0d4315e87e98d196bfc9c7
3
  size 4219
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8a298aea41145aa25baa36e26c30cb67ca5f56e10588c9d9d8d62d0b2e465d2
3
  size 4219