mfirth commited on
Commit
1039580
·
verified ·
1 Parent(s): e50a50c

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +7 -0
config.json CHANGED
@@ -46,6 +46,13 @@
46
  "original_max_position_embeddings": 4096,
47
  "type": "yarn"
48
  },
 
 
 
 
 
 
 
49
  "rope_theta": 10000,
50
  "routed_scaling_factor": 1.0,
51
  "scoring_func": "softmax",
 
46
  "original_max_position_embeddings": 4096,
47
  "type": "yarn"
48
  },
49
+ "slora_adapters": {
50
+ "7": {
51
+ "reft_rank": 32,
52
+ "lora_rank": 32,
53
+ "capacity": 0.25
54
+ }
55
+ },
56
  "rope_theta": 10000,
57
  "routed_scaling_factor": 1.0,
58
  "scoring_func": "softmax",