khaimaitien commited on
Commit
db2d7b1
1 Parent(s): be8ba6e

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -35,3 +35,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  qa-expert-7B-V1.0.q4_0.gguf filter=lfs diff=lfs merge=lfs -text
37
  qa-expert-7B-V1.0.q8_0.gguf filter=lfs diff=lfs merge=lfs -text
 
 
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  qa-expert-7B-V1.0.q4_0.gguf filter=lfs diff=lfs merge=lfs -text
37
  qa-expert-7B-V1.0.q8_0.gguf filter=lfs diff=lfs merge=lfs -text
38
+ qa-expert-7B-V1.0.f16.gguf filter=lfs diff=lfs merge=lfs -text
added_tokens.json CHANGED
@@ -2,7 +2,7 @@
2
  "</s>": 2,
3
  "<s>": 1,
4
  "<unk>": 0,
5
- "<|bof|>": 32000,
6
  "<|eof|>": 32001,
7
- "<|eot|>": 32002
8
  }
 
2
  "</s>": 2,
3
  "<s>": 1,
4
  "<unk>": 0,
5
+ "<|bof|>": 32002,
6
  "<|eof|>": 32001,
7
+ "<|eot|>": 32000
8
  }
qa-expert-7B-V1.0.f16.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:622c2cb207c8e32756d429424afefd1c9ce09cc2c7a354999ecf49771f98c1b6
3
+ size 14484780640
qa-expert-7B-V1.0.q4_0.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:231ab864dd7e6f4fcb9a807c3d4726008490ca38849392601bccf1262b40d761
3
- size 4108933408
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9471c1fe4b0d7b0124f1512dbc0d60fc01fc58d52ee3314499361706bad1e30
3
+ size 4108933344
qa-expert-7B-V1.0.q8_0.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5e639e85324910e4b323fcfe10d189020716d8efe91562d23eca5c7b46027c2b
3
  size 7695883392
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5aaa7901767fb547df29c9122ded80fb5669c530f9bbe25411ae1f1ecbd396bb
3
  size 7695883392
special_tokens_map.json CHANGED
@@ -2,10 +2,12 @@
2
  "additional_special_tokens": [
3
  "<unk>",
4
  "<s>",
5
- "</s>"
 
 
 
6
  ],
7
  "bos_token": "<s>",
8
  "eos_token": "</s>",
9
- "pad_token": "</s>",
10
  "unk_token": "<unk>"
11
  }
 
2
  "additional_special_tokens": [
3
  "<unk>",
4
  "<s>",
5
+ "</s>",
6
+ "<|eot|>",
7
+ "<|eof|>",
8
+ "<|bof|>"
9
  ],
10
  "bos_token": "<s>",
11
  "eos_token": "</s>",
 
12
  "unk_token": "<unk>"
13
  }
tokenizer_config.json CHANGED
@@ -27,41 +27,44 @@
27
  "special": true
28
  },
29
  "32000": {
30
- "content": "<|bof|>",
31
  "lstrip": true,
32
- "normalized": true,
33
  "rstrip": true,
34
  "single_word": false,
35
- "special": false
36
  },
37
  "32001": {
38
  "content": "<|eof|>",
39
  "lstrip": true,
40
- "normalized": true,
41
  "rstrip": true,
42
  "single_word": false,
43
- "special": false
44
  },
45
  "32002": {
46
- "content": "<|eot|>",
47
  "lstrip": true,
48
- "normalized": true,
49
  "rstrip": true,
50
  "single_word": false,
51
- "special": false
52
  }
53
  },
54
  "additional_special_tokens": [
55
  "<unk>",
56
  "<s>",
57
- "</s>"
 
 
 
58
  ],
59
  "bos_token": "<s>",
60
  "clean_up_tokenization_spaces": false,
61
  "eos_token": "</s>",
62
  "legacy": true,
63
  "model_max_length": 1000000000000000019884624838656,
64
- "pad_token": "</s>",
65
  "sp_model_kwargs": {},
66
  "spaces_between_special_tokens": false,
67
  "tokenizer_class": "LlamaTokenizer",
 
27
  "special": true
28
  },
29
  "32000": {
30
+ "content": "<|eot|>",
31
  "lstrip": true,
32
+ "normalized": false,
33
  "rstrip": true,
34
  "single_word": false,
35
+ "special": true
36
  },
37
  "32001": {
38
  "content": "<|eof|>",
39
  "lstrip": true,
40
+ "normalized": false,
41
  "rstrip": true,
42
  "single_word": false,
43
+ "special": true
44
  },
45
  "32002": {
46
+ "content": "<|bof|>",
47
  "lstrip": true,
48
+ "normalized": false,
49
  "rstrip": true,
50
  "single_word": false,
51
+ "special": true
52
  }
53
  },
54
  "additional_special_tokens": [
55
  "<unk>",
56
  "<s>",
57
+ "</s>",
58
+ "<|eot|>",
59
+ "<|eof|>",
60
+ "<|bof|>"
61
  ],
62
  "bos_token": "<s>",
63
  "clean_up_tokenization_spaces": false,
64
  "eos_token": "</s>",
65
  "legacy": true,
66
  "model_max_length": 1000000000000000019884624838656,
67
+ "pad_token": null,
68
  "sp_model_kwargs": {},
69
  "spaces_between_special_tokens": false,
70
  "tokenizer_class": "LlamaTokenizer",