File size: 1,017 Bytes
c100477
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
{
	"_name_or_path": "/workspace/models/Noromaid-20b-v0.1.1",
	"architectures": [
		"LlamaForCausalLM"
	],
	"attention_bias": false,
	"attention_dropout": 0.0,
	"bos_token_id": 1,
	"eos_token_id": 2,
	"hidden_act": "silu",
	"hidden_size": 5120,
	"initializer_range": 0.02,
	"intermediate_size": 13824,
	"max_position_embeddings": 4096,
	"model_type": "llama",
	"num_attention_heads": 40,
	"num_hidden_layers": 62,
	"num_key_value_heads": 40,
	"pad_token_id": 0,
	"pretraining_tp": 1,
	"quantization_config": {
		"bits": 8,
		"damp_percent": 0.01,
		"desc_act": true,
		"group_size": 128,
		"is_marlin_format": false,
		"model_file_base_name": null,
		"model_name_or_path": null,
		"quant_method": "gptq",
		"static_groups": false,
		"sym": true,
		"true_sequential": true
	},
	"rms_norm_eps": 1e-05,
	"rope_scaling": {
		"type": "dynamic",
		"factor": 2.0
	},
	"rope_theta": 26177,
	"tie_word_embeddings": false,
	"torch_dtype": "float16",
	"transformers_version": "4.38.2",
	"use_cache": false,
	"vocab_size": 32000
}