aixk commited on
Commit
4f51630
·
1 Parent(s): 8d18a50

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. README.md +19 -0
  2. config.json +24 -0
  3. tokenizer.json +0 -0
  4. tokenizer_config.json +15 -0
README.md ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # axai-stack-0_3B (Transformers)
2
+
3
+ ## Purpose
4
+ - Daily conversation model
5
+ - Non-coding assistant behavior
6
+
7
+ ## Architecture
8
+ - Custom architecture: AxaiForCausalLM
9
+
10
+ - hidden_size: 768
11
+ - intermediate_size: 3072
12
+ - layers: 24
13
+ - heads: 12
14
+ - kv_heads: 6
15
+ - max_seq_len: 128
16
+ - rope_theta: 10000
17
+ - qk_norm: True
18
+ - local_mixer: True
19
+ - neftune_alpha: 0.0
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "AxaiForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "dtype": "float32",
7
+ "hidden_dropout": 0.0,
8
+ "hidden_size": 768,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 3072,
11
+ "max_position_embeddings": 128,
12
+ "model_type": "axai",
13
+ "neftune_alpha": 0.0,
14
+ "num_attention_heads": 12,
15
+ "num_hidden_layers": 24,
16
+ "num_key_value_heads": 6,
17
+ "qk_norm": true,
18
+ "rezero_init": 1.0,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_theta": 10000.0,
21
+ "transformers_version": "5.0.0",
22
+ "use_cache": false,
23
+ "vocab_size": 32000
24
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "backend": "tokenizers",
3
+ "bos_token": "<bos>",
4
+ "clean_up_tokenization_spaces": false,
5
+ "eos_token": "<eos>",
6
+ "extra_special_tokens": [
7
+ "<|im_start|>",
8
+ "<|im_end|>"
9
+ ],
10
+ "is_local": true,
11
+ "model_max_length": 1000000,
12
+ "pad_token": "<pad>",
13
+ "tokenizer_class": "TokenizersBackend",
14
+ "unk_token": "<unk>"
15
+ }