Abhaykoul commited on
Commit
ecced96
1 Parent(s): dc37558

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +25 -84
config.json CHANGED
@@ -1,86 +1,27 @@
1
  {
2
- "vocab_size": 32000,
3
- "max_position_embeddings": 2048,
4
- "hidden_size": 2048,
5
- "intermediate_size": 5632,
6
- "num_hidden_layers": 22,
7
- "num_attention_heads": 32,
8
- "num_key_value_heads": 4,
9
- "hidden_act": "silu",
10
- "initializer_range": 0.02,
11
- "rms_norm_eps": 1e-05,
12
- "pretraining_tp": 1,
13
- "use_cache": true,
14
- "rope_theta": 10000.0,
15
- "rope_scaling": null,
16
- "attention_bias": false,
17
- "attention_dropout": 0.0,
18
- "return_dict": true,
19
- "output_hidden_states": false,
20
- "output_attentions": false,
21
- "torchscript": false,
22
- "torch_dtype": null,
23
- "use_bfloat16": false,
24
- "tf_legacy_loss": false,
25
- "pruned_heads": {},
26
- "tie_word_embeddings": false,
27
- "chunk_size_feed_forward": 0,
28
- "is_encoder_decoder": false,
29
- "is_decoder": false,
30
- "cross_attention_hidden_size": null,
31
- "add_cross_attention": false,
32
- "tie_encoder_decoder": false,
33
- "max_length": 20,
34
- "min_length": 0,
35
- "do_sample": false,
36
- "early_stopping": false,
37
- "num_beams": 1,
38
- "num_beam_groups": 1,
39
- "diversity_penalty": 0.0,
40
- "temperature": 1.0,
41
- "top_k": 50,
42
- "top_p": 1.0,
43
- "typical_p": 1.0,
44
- "repetition_penalty": 1.0,
45
- "length_penalty": 1.0,
46
- "no_repeat_ngram_size": 0,
47
- "encoder_no_repeat_ngram_size": 0,
48
- "bad_words_ids": null,
49
- "num_return_sequences": 1,
50
- "output_scores": false,
51
- "return_dict_in_generate": false,
52
- "forced_bos_token_id": null,
53
- "forced_eos_token_id": null,
54
- "remove_invalid_values": false,
55
- "exponential_decay_length_penalty": null,
56
- "suppress_tokens": null,
57
- "begin_suppress_tokens": null,
58
- "architectures": [
59
- "LlamaForCausalLM"
60
- ],
61
- "finetuning_task": null,
62
- "id2label": {
63
- "0": "LABEL_0",
64
- "1": "LABEL_1"
65
- },
66
- "label2id": {
67
- "LABEL_0": 0,
68
- "LABEL_1": 1
69
- },
70
- "tokenizer_class": null,
71
- "prefix": null,
72
- "bos_token_id": 1,
73
- "pad_token_id": null,
74
- "eos_token_id": 2,
75
- "sep_token_id": null,
76
- "decoder_start_token_id": null,
77
- "task_specific_params": null,
78
- "problem_type": null,
79
- "_name_or_path": "/root/.cache/huggingface/hub/models--UnfilteredAI--Mia-1B/snapshots/47a9fce2da11fe00cded2973391272c5aee5bd00",
80
- "transformers_version": "4.38.2",
81
- "model_type": "llama",
82
- "quantization": {
83
- "group_size": 64,
84
- "bits": 4
85
- }
86
  }
 
1
  {
2
+ "_name_or_path": "UnfilteredAI/Mia-1B",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 2048,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 5632,
14
+ "max_position_embeddings": 2048,
15
+ "model_type": "llama",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 22,
18
+ "num_key_value_heads": 4,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "transformers_version": "4.38.2",
25
+ "use_cache": true,
26
+ "vocab_size": 32000
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  }