pigas commited on
Commit
fbde423
1 Parent(s): e94bb99

Upload PhiForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +24 -0
  2. model.safetensors +2 -2
config.json CHANGED
@@ -19,6 +19,30 @@
19
  "num_key_value_heads": 32,
20
  "partial_rotary_factor": 0.4,
21
  "qk_layernorm": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  "resid_pdrop": 0.1,
23
  "rope_scaling": null,
24
  "rope_theta": 10000.0,
 
19
  "num_key_value_heads": 32,
20
  "partial_rotary_factor": 0.4,
21
  "qk_layernorm": false,
22
+ "quantization_config": {
23
+ "batch_size": 1,
24
+ "bits": 4,
25
+ "block_name_to_quantize": null,
26
+ "cache_block_outputs": true,
27
+ "damp_percent": 0.1,
28
+ "dataset": "c4",
29
+ "desc_act": false,
30
+ "exllama_config": {
31
+ "version": 1
32
+ },
33
+ "group_size": 128,
34
+ "max_input_length": null,
35
+ "model_seqlen": null,
36
+ "module_name_preceding_first_block": null,
37
+ "modules_in_block_to_quantize": null,
38
+ "pad_token_id": null,
39
+ "quant_method": "gptq",
40
+ "sym": true,
41
+ "tokenizer": null,
42
+ "true_sequential": true,
43
+ "use_cuda_fp16": false,
44
+ "use_exllama": true
45
+ },
46
  "resid_pdrop": 0.1,
47
  "rope_scaling": null,
48
  "rope_theta": 10000.0,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2c42e73d1e5c308f32604ad0e601d10e74003f007c17cea8126f92b6bd4f9012
3
- size 524735712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bc9a7ab5c39038a0e3af61847a6ecce0c2a30ba28518fde79043b51811db6d7
3
+ size 1836707464