Commit 
							
							·
						
						cd31277
	
1
								Parent(s):
							
							6c217d9
								
Update hybrid_config.json
Browse files- hybrid_config.json +0 -4
 
    	
        hybrid_config.json
    CHANGED
    
    | 
         @@ -27,14 +27,10 @@ 
     | 
|
| 27 | 
         
             
                "kv_lora_rank": 160,
         
     | 
| 28 | 
         
             
                "q_lora_rank": 2048,
         
     | 
| 29 | 
         
             
                "use_lora_layer_norm": false,
         
     | 
| 30 | 
         
            -
                "use_fixed_rank_for_first_and_last_block": true,
         
     | 
| 31 | 
         
             
                "use_full_kv_head": false,
         
     | 
| 32 | 
         
            -
                "layer_rank_list": {},
         
     | 
| 33 | 
         
             
                "qk_rope_head_dim": 64,
         
     | 
| 34 | 
         
             
                "v_head_dim": 128,
         
     | 
| 35 | 
         
             
                "qk_nope_head_dim": 64,
         
     | 
| 36 | 
         
            -
                "q_energy_ratio": null,
         
     | 
| 37 | 
         
            -
                "kv_energy_ratio": null,
         
     | 
| 38 | 
         
             
                "qkv_rank_divisor": 8,
         
     | 
| 39 | 
         
             
                "max_position_embeddings": 131072,
         
     | 
| 40 | 
         
             
                "rope_theta": 500000.0,
         
     | 
| 
         | 
|
| 27 | 
         
             
                "kv_lora_rank": 160,
         
     | 
| 28 | 
         
             
                "q_lora_rank": 2048,
         
     | 
| 29 | 
         
             
                "use_lora_layer_norm": false,
         
     | 
| 
         | 
|
| 30 | 
         
             
                "use_full_kv_head": false,
         
     | 
| 
         | 
|
| 31 | 
         
             
                "qk_rope_head_dim": 64,
         
     | 
| 32 | 
         
             
                "v_head_dim": 128,
         
     | 
| 33 | 
         
             
                "qk_nope_head_dim": 64,
         
     | 
| 
         | 
|
| 
         | 
|
| 34 | 
         
             
                "qkv_rank_divisor": 8,
         
     | 
| 35 | 
         
             
                "max_position_embeddings": 131072,
         
     | 
| 36 | 
         
             
                "rope_theta": 500000.0,
         
     |