hafidhsoekma commited on
Commit
4c7fbbf
1 Parent(s): f2cd5f6

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -49
config.json DELETED
@@ -1,49 +0,0 @@
1
- {
2
- "base_model_name_or_path": "nferroukhi/WizardLM-Uncensored-Falcon-7b-sharded-bf16",
3
- "peft_model_id": "azale-ai/DukunLM-Uncensored-7B",
4
- "_name_or_path": "nferroukhi/WizardLM-Uncensored-Falcon-7b-sharded-bf16",
5
- "alibi": false,
6
- "apply_residual_connection_post_layernorm": false,
7
- "architectures": [
8
- "RWForCausalLM"
9
- ],
10
- "attention_dropout": 0.0,
11
- "auto_map": {
12
- "AutoConfig": "configuration_RW.RWConfig",
13
- "AutoModelForCausalLM": "modelling_RW.RWForCausalLM"
14
- },
15
- "bias": false,
16
- "bos_token_id": 1,
17
- "eos_token_id": 2,
18
- "hidden_dropout": 0.0,
19
- "hidden_size": 4544,
20
- "initializer_range": 0.02,
21
- "layer_norm_epsilon": 1e-05,
22
- "model_type": "RefinedWebModel",
23
- "multi_query": true,
24
- "n_head": 71,
25
- "n_layer": 32,
26
- "parallel_attn": true,
27
- "torch_dtype": "bfloat16",
28
- "transformers_version": "4.31.0.dev0",
29
- "use_cache": true,
30
- "vocab_size": 65025,
31
- "fan_in_fan_out": false,
32
- "inference_mode": true,
33
- "init_lora_weights": true,
34
- "layers_pattern": null,
35
- "layers_to_transform": null,
36
- "lora_alpha": 16.0,
37
- "lora_dropout": 0.1,
38
- "modules_to_save": null,
39
- "peft_type": "LORA",
40
- "r": 64,
41
- "revision": null,
42
- "target_modules": [
43
- "dense",
44
- "dense_4h_to_h",
45
- "dense_h_to_4h",
46
- "query_key_value"
47
- ],
48
- "task_type": "CAUSAL_LM"
49
- }