alpindale commited on
Commit
3ccc048
1 Parent(s): 8f096af

Add files using upload-large-folder tool

Browse files
config.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/austin/disk1/models/magnum-v4-123b",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 12288,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 28672,
14
+ "max_position_embeddings": 131072,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 96,
17
+ "num_hidden_layers": 88,
18
+ "num_key_value_heads": 8,
19
+ "quantization_config": {
20
+ "quant_config": {
21
+ "offload_meta": false,
22
+ "scale_quant_params": null,
23
+ "weight_quant_params": {
24
+ "axis": 1,
25
+ "channel_wise": true,
26
+ "group_size": 128,
27
+ "nbits": 4,
28
+ "optimize": true,
29
+ "round_zero": true,
30
+ "view_as_float": false
31
+ },
32
+ "zero_quant_params": null
33
+ },
34
+ "quant_method": "hqq",
35
+ "skip_modules": [
36
+ "lm_head"
37
+ ]
38
+ },
39
+ "rms_norm_eps": 1e-05,
40
+ "rope_theta": 1000000.0,
41
+ "sliding_window": null,
42
+ "tie_word_embeddings": false,
43
+ "torch_dtype": "float16",
44
+ "transformers_version": "4.46.1",
45
+ "use_cache": false,
46
+ "vocab_size": 32768
47
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "transformers_version": "4.46.1"
7
+ }
model-00001-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c43b4929aa702fa5ee6d1288396e2d99b5b73215cf964b347237582bb0ca73fc
3
+ size 4843179600
model-00002-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9361a9c48761cdc52540021d6611e53244be6092f456fae2a0857bc195ff3831
3
+ size 4960473520
model-00003-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5deae7068afa6eaf5eefb45e228bacf473cef8e8e20a12f03aa7bdc000e8ca21
3
+ size 4973837004
model-00004-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a185c8629d29732d7d2c78657e3b6046700fa52d768de3338e49e212c6404164
3
+ size 4960424736
model-00005-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:511718e7be165abf7e9bdd24971a2eed093fd088834c8b4450058194cce5ce59
3
+ size 4960474224
model-00006-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:960298a46b3db3975b0a289e602eeff0870b9011401ce3037211c2b14f0e857a
3
+ size 4960474200
model-00007-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5a9f3400a7076d7bca1259484d020c75ce3d667b1193e441202aea95387a9dd
3
+ size 4973837004
model-00008-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f43ccf037e84e75b9d028c3205c066a4624537d6f3f0abbaac6dc27e0c49a4f9
3
+ size 4960424736
model-00009-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68ad38666ea61df23a179f7bd30926df506cd74c6855338e4c1d266070fdd1ec
3
+ size 4960474224
model-00010-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fec12a4e0b1d5c6b4fbc41d5699b2bba3d11fdf78852293a3beb9aefab55358f
3
+ size 4960474200
model-00011-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdf056cc3ad2ba38b8189e1f2c3de6d3fe372a73166d96c7926a091c85ad3b9b
3
+ size 4973837004
model-00012-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89417e84ee8be9c553286fc508cd294fb947ca45f35b8e950cd132e7ef612159
3
+ size 4960424736
model-00013-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab938ffe1657ae05918b0a0ea7d1dfdf70331417708acf0e3a523456f29e0fad
3
+ size 4960474224
model-00014-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9c14fc0d5dde4d4c3b4f34fa3551282285887f99181122759e87f36d0b296df
3
+ size 1915104476
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff