Undi95 commited on
Commit
be50e3f
1 Parent(s): 7992cfb

Upload leyleylora.yml

Browse files
Files changed (1) hide show
  1. leyleylora.yml +78 -0
leyleylora.yml ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_model: ./NeverSleep_Noromaid-13b-v0.1.1/
2
+ model_type: LlamaForCausalLM
3
+ tokenizer_type: LlamaTokenizer
4
+ is_llama_derived_model: true
5
+
6
+ load_in_8bit: true
7
+ load_in_4bit: false
8
+ strict: false
9
+
10
+ datasets:
11
+ - path: lol/LRP.jsonl
12
+ type: completion
13
+ dataset_prepared_path:
14
+ val_set_size: 0.05
15
+ output_dir: ./lora-out5
16
+
17
+ sequence_len: 4096
18
+ sample_packing: false
19
+ pad_to_sequence_len: false
20
+
21
+ adapter: lora
22
+ lora_model_dir:
23
+ lora_r: 256
24
+ lora_alpha: 512
25
+ lora_dropout: 0.05
26
+ lora_target_linear: true
27
+ lora_fan_in_fan_out:
28
+ lora_target_modules:
29
+ - gate_proj
30
+ - down_proj
31
+ - up_proj
32
+ - q_proj
33
+ - v_proj
34
+ - k_proj
35
+ - o_proj
36
+
37
+ wandb_project: leyleytest4-noro
38
+ wandb_entity:
39
+ wandb_watch:
40
+ wandb_run_id:
41
+ wandb_log_model:
42
+
43
+ gradient_accumulation_steps: 2
44
+ micro_batch_size: 2
45
+ num_epochs: 50
46
+ optimizer: adamw_bnb_8bit
47
+ lr_scheduler: constant
48
+ learning_rate: 0.00000025
49
+
50
+ train_on_inputs: true
51
+ group_by_length: false
52
+ bf16: true
53
+ fp16: false
54
+ tf32: true
55
+
56
+ gradient_checkpointing: true
57
+ early_stopping_patience:
58
+ resume_from_checkpoint:
59
+ local_rank:
60
+ logging_steps: 1
61
+ xformers_attention:
62
+ flash_attention: true
63
+
64
+ warmup_steps: 10
65
+ eval_steps: 0.05
66
+ eval_table_size:
67
+ eval_table_max_new_tokens: 128
68
+ eval_sample_packing: false
69
+ save_steps: 65
70
+ debug:
71
+ deepspeed:
72
+ weight_decay: 0.0
73
+ fsdp:
74
+ fsdp_config:
75
+ special_tokens:
76
+ bos_token: "<s>"
77
+ eos_token: "</s>"
78
+ unk_token: "<unk>"