DatPySci commited on
Commit
1e12c13
·
verified ·
1 Parent(s): 681ba7c

Delete models/LoRA-EvoLM-1B-160BT-CPT-Ep1-Omega-GRPO-step300

Browse files
models/LoRA-EvoLM-1B-160BT-CPT-Ep1-Omega-GRPO-step300/adapter_config.json DELETED
@@ -1,49 +0,0 @@
1
- {
2
- "task_type": "CAUSAL_LM",
3
- "peft_type": "LORA",
4
- "auto_mapping": null,
5
- "peft_version": "0.18.1",
6
- "base_model_name_or_path": "/dev/shm/verl-cache/f94a13b34a9873ada45a6fd30683b0f9/evolm-1B-160BT-MixedFW8FM42-Ep1",
7
- "revision": null,
8
- "inference_mode": false,
9
- "r": 32,
10
- "target_modules": [
11
- "o_proj",
12
- "gate_proj",
13
- "k_proj",
14
- "v_proj",
15
- "q_proj",
16
- "down_proj",
17
- "up_proj"
18
- ],
19
- "exclude_modules": null,
20
- "lora_alpha": 64,
21
- "lora_dropout": 0.0,
22
- "fan_in_fan_out": false,
23
- "bias": "none",
24
- "use_rslora": false,
25
- "modules_to_save": null,
26
- "init_lora_weights": true,
27
- "layers_to_transform": null,
28
- "layers_pattern": null,
29
- "rank_pattern": {},
30
- "alpha_pattern": {},
31
- "megatron_config": null,
32
- "megatron_core": "megatron.core",
33
- "trainable_token_indices": null,
34
- "loftq_config": {},
35
- "eva_config": null,
36
- "corda_config": null,
37
- "use_dora": false,
38
- "alora_invocation_tokens": null,
39
- "use_qalora": false,
40
- "qalora_group_size": 16,
41
- "layer_replication": null,
42
- "runtime_config": {
43
- "ephemeral_gpu_offload": false
44
- },
45
- "lora_bias": false,
46
- "target_parameters": null,
47
- "arrow_config": null,
48
- "ensure_weight_tying": false
49
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models/LoRA-EvoLM-1B-160BT-CPT-Ep1-Omega-GRPO-step300/adapter_model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:09a8110fd2b1c11d8341437b6feb4033df57a12164d8c12c6b1c1b47ea33b8d5
3
- size 94748560