SachinKaushik commited on
Commit
18bebdb
1 Parent(s): 61e98e4

mathsLlama

Browse files
adapter_config.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_model_name_or_path": "openlm-research/open_llama_7b",
3
+ "bias": "none",
4
+ "fan_in_fan_out": false,
5
+ "inference_mode": true,
6
+ "init_lora_weights": true,
7
+ "lora_alpha": 16,
8
+ "lora_dropout": 0.05,
9
+ "modules_to_save": null,
10
+ "peft_type": "LORA",
11
+ "r": 8,
12
+ "target_modules": [
13
+ "q_proj",
14
+ "v_proj"
15
+ ],
16
+ "task_type": "CAUSAL_LM"
17
+ }
adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:685a66fbf3625735571cc2329d8e973c842f8737e0c6e780e23c72104d3b7abc
3
+ size 16822989
runs/Jul14_05-36-59_457f26273382/events.out.tfevents.1689313022.457f26273382.29.1 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:60cdfcee7f00ba552ba56f42108f3ed7095c3de401e4406e019cfbc95ad60775
3
- size 4985
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2e638a5c6ceb1936b50cf1564be066205a317648da9125d73615a429e1e2c6b
3
+ size 5339