aixk commited on
Commit
c6550c2
·
1 Parent(s): 5f383db

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # pai (Transformers)
2
+
3
+ ## Purpose
4
+ - Daily conversation model
5
+ - Non-coding assistant behavior
6
+
7
+ ## Architecture
8
+ - Custom architecture: StaiForCausalLM
9
+
10
+ - hidden_size: 1024
11
+ - intermediate_size: 4096
12
+ - layers: 16
13
+ - heads: 16
14
+ - kv_heads: 8
15
+ - max_seq_len: 160
16
+ - rope_theta: 10000
17
+ - qk_norm: True
18
+ - local_mixer: True
19
+ - neftune_alpha: 2.0
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "dtype": "float32",
9
+ "eos_token_id": 2,
10
+ "head_dim": 64,
11
+ "hidden_act": "silu",
12
+ "hidden_dropout": 0.0,
13
+ "hidden_size": 1280,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 5120,
16
+ "max_position_embeddings": 128,
17
+ "mlp_bias": false,
18
+ "model_type": "llama",
19
+ "num_attention_heads": 20,
20
+ "num_hidden_layers": 20,
21
+ "num_key_value_heads": 5,
22
+ "pad_token_id": 0,
23
+ "pretraining_tp": 1,
24
+ "rms_norm_eps": 1e-06,
25
+ "rope_parameters": {
26
+ "rope_theta": 10000.0,
27
+ "rope_type": "default"
28
+ },
29
+ "tie_word_embeddings": false,
30
+ "transformers_version": "5.0.0",
31
+ "use_cache": false,
32
+ "vocab_size": 32000
33
+ }
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "output_attentions": false,
6
+ "output_hidden_states": false,
7
+ "pad_token_id": 0,
8
+ "transformers_version": "5.0.0",
9
+ "use_cache": false
10
+ }
merge_info.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"workers": ["worker-2d111c58", "worker-3342997f", "worker-3df5e737", "worker-56830549", "worker-5be16d56", "worker-6b20109f", "worker-941beca4", "worker-aa180a5e", "worker-e44d5af5", "worker-200c78ad"], "weights": [20308.0, 20308.0, 20308.0, 20308.0, 20308.0, 20308.0, 20308.0, 20308.0, 20308.0, 20308.0], "merged_count": 10, "updated_at": 1775114918}
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f1d2d916e462f907494557202d6f653135f6172596d956b74bad43240c6a041
3
+ size 2228454760
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "backend": "tokenizers",
3
+ "bos_token": "<bos>",
4
+ "clean_up_tokenization_spaces": false,
5
+ "eos_token": "<eos>",
6
+ "extra_special_tokens": [
7
+ "<|im_start|>",
8
+ "<|im_end|>"
9
+ ],
10
+ "is_local": true,
11
+ "model_max_length": 1000000,
12
+ "pad_token": "<pad>",
13
+ "tokenizer_class": "TokenizersBackend",
14
+ "unk_token": "<unk>"
15
+ }