alpindale commited on
Commit
6efe12f
1 Parent(s): 49f5d7a

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "alpindale/Mistral-Large-Instruct-2407-FP8",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "compression_config": {
9
+ "config_groups": {
10
+ "group_0": {
11
+ "input_activations": {
12
+ "actorder": null,
13
+ "block_structure": null,
14
+ "dynamic": false,
15
+ "group_size": null,
16
+ "num_bits": 8,
17
+ "observer": "minmax",
18
+ "observer_kwargs": {},
19
+ "strategy": "tensor",
20
+ "symmetric": true,
21
+ "type": "float"
22
+ },
23
+ "output_activations": null,
24
+ "targets": [
25
+ "Linear"
26
+ ],
27
+ "weights": {
28
+ "actorder": null,
29
+ "block_structure": null,
30
+ "dynamic": false,
31
+ "group_size": null,
32
+ "num_bits": 8,
33
+ "observer": "minmax",
34
+ "observer_kwargs": {},
35
+ "strategy": "tensor",
36
+ "symmetric": true,
37
+ "type": "float"
38
+ }
39
+ }
40
+ },
41
+ "format": "float-quantized",
42
+ "global_compression_ratio": 1.4645550488838328,
43
+ "ignore": [
44
+ "lm_head"
45
+ ],
46
+ "kv_cache_scheme": {
47
+ "actorder": null,
48
+ "block_structure": null,
49
+ "dynamic": false,
50
+ "group_size": null,
51
+ "num_bits": 8,
52
+ "observer": "minmax",
53
+ "observer_kwargs": {},
54
+ "strategy": "tensor",
55
+ "symmetric": true,
56
+ "type": "float"
57
+ },
58
+ "quant_method": "compressed-tensors",
59
+ "quantization_status": "compressed"
60
+ },
61
+ "eos_token_id": 2,
62
+ "head_dim": 128,
63
+ "hidden_act": "silu",
64
+ "hidden_size": 12288,
65
+ "initializer_range": 0.02,
66
+ "intermediate_size": 28672,
67
+ "max_position_embeddings": 131072,
68
+ "model_type": "mistral",
69
+ "num_attention_heads": 96,
70
+ "num_hidden_layers": 88,
71
+ "num_key_value_heads": 8,
72
+ "rms_norm_eps": 1e-05,
73
+ "rope_theta": 1000000.0,
74
+ "sliding_window": null,
75
+ "tie_word_embeddings": false,
76
+ "torch_dtype": "bfloat16",
77
+ "transformers_version": "4.44.1",
78
+ "use_cache": true,
79
+ "vocab_size": 32768
80
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.44.1"
6
+ }
model-00001-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c343dd12dd65984728542120bfcf72cc77ad4cf94d71a8a1d1b42772ab2fa2d5
3
+ size 4957823496
model-00002-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00e11fa3d95912f2ab6713c357a5c734aa5f7fd546eba18e94d30c8df5ca0991
3
+ size 4831995744
model-00003-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aedac62887af003d3b9f13209ebddea0f4ea9ba86201d7a5f9b46a5a4fe49fa8
3
+ size 4857209744
model-00004-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4669ef81b28d4921e404d19afb4d95cb1ad49b0923de868bfe953057f59e9ce8
3
+ size 4831995840
model-00005-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f6676e50bf200d9b23330710ef13e7efdf2043e0754744c0e036a0e39830cd4
3
+ size 4857209824
model-00006-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:afc3b68acf9632fc7de7fe0e929a4a2c643bbaf9b978f12ceba3cc16cb9bf087
3
+ size 4831995840
model-00007-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b71e400d564d05cd3bcf0046af6bccc4c5c52dd5dcf24314cdeb52dad0f4846
3
+ size 4857209824
model-00008-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90823bcac0c2bd65fc1ad648d1fdacf4034adfdcc3ce24eb8eafbafcb162d28a
3
+ size 4831995840
model-00009-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19f2c643c29c9f235555bd9d38c9fdbffd9b33ddfb7862029fa70357f34f72d8
3
+ size 4857209824
model-00010-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5254a99a1b159887ec69dee55fd7566a4486c0abbacb0bad00c0f479fd294ec
3
+ size 4831995840
model-00011-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5a50eb11dedbe3066a3c5350ed3443416636c9bc93570aa25632debec8f6609
3
+ size 4857209824
model-00012-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a0e8db6dac294f511d0c6633abbcdde9adedef3c2eceb958a6095025cbbd4ea
3
+ size 4831995840
model-00013-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dc251b076b08b8fea891bf7b374a636b3ed74183084ca4a29a9773a29128439
3
+ size 4857209824
model-00014-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0448d41f60a293b6ba9681378e611acf63a7c5d19fee1bbd23f0be18c3769e16
3
+ size 4831995840
model-00015-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f826b3cf10a827813cc29105a64f30c8298e9fdc2997afe84ab06503c327ddcd
3
+ size 4857209824
model-00016-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6953e18dde47096a02d80502401cc341d6e9d0a13b7b98bd79f7cdc16673f06d
3
+ size 4831995840
model-00017-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5cebc3bc7f6a9b2774672a90e35ea541e9c5c7b86cfd708b5330469dd96930b1
3
+ size 4857209824
model-00018-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31bcccb310fa62caa97f2d8e64494eeedf82342c109310c4c686a29249686f09
3
+ size 4831995840
model-00019-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf653005c955bd5874b2ef79321dffb904b9bbad0a9c0ebaa637896c19f58876
3
+ size 4857209824
model-00020-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c371610b7fb888e7c472b4b3f39a92d7255e3c0d9d3f041f3d614e2dd475c94
3
+ size 4831995840
model-00021-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6adc0c90a1e1d698db25e197d8d1e4d4b993894f85b6d964b30935dbd005914
3
+ size 4857209824
model-00022-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9df8803f89555a2ddf94439640f0b6b863415bcb59de5206f9235fae15878aac
3
+ size 4831995840
model-00023-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e608ebfe7789ebc004ca3a14b1372bac812207e33b875d9778d6e48fec56d90b
3
+ size 4857209824
model-00024-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89f2d2ec6c454137b7bf552a8ef70bc32ce68799f899dbe1b1a4b5123ebc3710
3
+ size 4831995840
model-00025-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e8e0528d2289d25c9b3be43d45153d4abae1a7b5aae07ff05561160ee312e0d
3
+ size 4857209824
model-00026-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f0937b48b19e09354561a10c17653c4f7e719c78c207955fa80487b95866782
3
+ size 2189503512
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59f95e28944c062244741268596badc900df86c7f5ded05088d2da22a7379e06
3
+ size 587583
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff