LoneStriker commited on
Commit
903f88b
1 Parent(s): c4de592

Upload 3 files

Browse files
Files changed (3) hide show
  1. README.md +19 -0
  2. adapter_config.json +32 -0
  3. config.json +17 -4
README.md ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ inference: false
3
+ language:
4
+ - en
5
+ library_name: transformers
6
+ pipeline_tag: text-generation
7
+ tags:
8
+ - mixtral
9
+ license: apache-2.0
10
+ datasets:
11
+ - jondurbin/airoboros-3.2
12
+ ---
13
+
14
+ # Air-Striker-Mixtral-8x7B-ZLoss
15
+
16
+ Experimental model, trained using config and [Transformers/Axolotl](https://github.com/DocShotgun/axolotl) forks provided by [Doctor-Shotgun](https://huggingface.co/Doctor-Shotgun)
17
+
18
+ Model was fine-tuned from [Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) with airoboros-3.2 dataset, for 4 epochs, ChatML prompt format at 8K context length.
19
+
adapter_config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "/users/ubuntu/models/Mixtral-8x7B-v0.1",
5
+ "bias": "none",
6
+ "fan_in_fan_out": null,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "loftq_config": {},
12
+ "lora_alpha": 16,
13
+ "lora_dropout": 0.07,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
+ "modules_to_save": null,
17
+ "peft_type": "LORA",
18
+ "r": 64,
19
+ "rank_pattern": {},
20
+ "revision": null,
21
+ "target_modules": [
22
+ "q_proj",
23
+ "w1",
24
+ "v_proj",
25
+ "gate",
26
+ "o_proj",
27
+ "w3",
28
+ "k_proj",
29
+ "w2"
30
+ ],
31
+ "task_type": "CAUSAL_LM"
32
+ }
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/home/hien/models/Mixtral-8x7B-v0.1",
3
  "architectures": [
4
  "MixtralForCausalLM"
5
  ],
@@ -17,14 +17,27 @@
17
  "num_hidden_layers": 32,
18
  "num_key_value_heads": 8,
19
  "num_local_experts": 8,
20
- "output_router_logits": false,
 
 
 
 
 
 
 
 
 
 
 
 
21
  "rms_norm_eps": 1e-05,
22
  "rope_theta": 1000000.0,
23
  "router_aux_loss_coef": 0.02,
 
24
  "sliding_window": null,
25
  "tie_word_embeddings": false,
26
- "torch_dtype": "float16",
27
  "transformers_version": "4.37.0.dev0",
28
- "use_cache": true,
29
  "vocab_size": 32000
30
  }
 
1
  {
2
+ "_name_or_path": "/users/ubuntu/models/Mixtral-8x7B-v0.1",
3
  "architectures": [
4
  "MixtralForCausalLM"
5
  ],
 
17
  "num_hidden_layers": 32,
18
  "num_key_value_heads": 8,
19
  "num_local_experts": 8,
20
+ "output_router_logits": true,
21
+ "quantization_config": {
22
+ "bnb_4bit_compute_dtype": "bfloat16",
23
+ "bnb_4bit_quant_type": "nf4",
24
+ "bnb_4bit_use_double_quant": true,
25
+ "llm_int8_enable_fp32_cpu_offload": false,
26
+ "llm_int8_has_fp16_weight": false,
27
+ "llm_int8_skip_modules": null,
28
+ "llm_int8_threshold": 6.0,
29
+ "load_in_4bit": true,
30
+ "load_in_8bit": false,
31
+ "quant_method": "bitsandbytes"
32
+ },
33
  "rms_norm_eps": 1e-05,
34
  "rope_theta": 1000000.0,
35
  "router_aux_loss_coef": 0.02,
36
+ "router_z_loss_coef": 0.001,
37
  "sliding_window": null,
38
  "tie_word_embeddings": false,
39
+ "torch_dtype": "bfloat16",
40
  "transformers_version": "4.37.0.dev0",
41
+ "use_cache": false,
42
  "vocab_size": 32000
43
  }