Jeethu commited on
Commit
cd4138a
1 Parent(s): 0016a03

Add config.json

Browse files
Files changed (1) hide show
  1. config.json +90 -0
config.json ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "0.1.0",
3
+ "model_type": "llama",
4
+ "quantization": "w4a16g128",
5
+ "model_config": {
6
+ "hidden_size": 3072,
7
+ "intermediate_size": 8192,
8
+ "num_attention_heads": 24,
9
+ "num_hidden_layers": 28,
10
+ "rms_norm_eps": 1e-05,
11
+ "vocab_size": 128256,
12
+ "tie_word_embeddings": true,
13
+ "position_embedding_base": 500000.0,
14
+ "rope_scaling": {
15
+ "factor": 32.0,
16
+ "high_freq_factor": 4.0,
17
+ "low_freq_factor": 1.0,
18
+ "original_max_position_embeddings": 8192,
19
+ "rope_type": "llama3"
20
+ },
21
+ "context_window_size": 8192,
22
+ "prefill_chunk_size": 128,
23
+ "num_key_value_heads": 8,
24
+ "head_dim": 128,
25
+ "tensor_parallel_shards": 1,
26
+ "pipeline_parallel_stages": 1,
27
+ "max_batch_size": 128
28
+ },
29
+ "vocab_size": 128256,
30
+ "context_window_size": 8192,
31
+ "sliding_window_size": -1,
32
+ "prefill_chunk_size": 128,
33
+ "attention_sink_size": -1,
34
+ "tensor_parallel_shards": 1,
35
+ "pipeline_parallel_stages": 1,
36
+ "temperature": 0.6,
37
+ "presence_penalty": 0.0,
38
+ "frequency_penalty": 0.0,
39
+ "repetition_penalty": 1.0,
40
+ "top_p": 0.9,
41
+ "tokenizer_files": [
42
+ "tokenizer.json",
43
+ "tokenizer_config.json"
44
+ ],
45
+ "tokenizer_info": {
46
+ "token_postproc_method": "byte_level",
47
+ "prepend_space_in_encode": false,
48
+ "strip_space_in_decode": false
49
+ },
50
+ "conv_template": {
51
+ "name": "llama-3_1",
52
+ "system_template": "<|start_header_id|>system<|end_header_id|>\n\n{system_message}<|eot_id|>",
53
+ "system_message": "You are a helpful, respectful and honest assistant.",
54
+ "system_prefix_token_ids": [
55
+ 128000
56
+ ],
57
+ "add_role_after_system_message": true,
58
+ "roles": {
59
+ "user": "<|start_header_id|>user",
60
+ "assistant": "<|start_header_id|>assistant",
61
+ "tool": "<|start_header_id|>ipython"
62
+ },
63
+ "role_templates": {
64
+ "user": "{user_message}",
65
+ "assistant": "{assistant_message}",
66
+ "tool": "{tool_message}"
67
+ },
68
+ "messages": [],
69
+ "seps": [
70
+ "<|eot_id|>"
71
+ ],
72
+ "role_content_sep": "<|end_header_id|>\n\n",
73
+ "role_empty_sep": "<|end_header_id|>\n\n",
74
+ "stop_str": [],
75
+ "stop_token_ids": [
76
+ 128001,
77
+ 128008,
78
+ 128009
79
+ ],
80
+ "function_string": "",
81
+ "use_function_calling": false
82
+ },
83
+ "pad_token_id": 0,
84
+ "bos_token_id": 128000,
85
+ "eos_token_id": [
86
+ 128001,
87
+ 128008,
88
+ 128009
89
+ ]
90
+ }