Zardos commited on
Commit
51107f2
β€’
1 Parent(s): 2d5c6fb

safetensors

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/run/media/ralf/dat1/ml/nlp/models/mistral_7b/Misted-7B",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "/run/media/ralf/dat1/ml/nlp/models/mistral_7b/Mistral-7B-v0.1/",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
generation_config.json CHANGED
@@ -1,7 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
- "eos_token_id": 32000,
5
- "transformers_version": "4.35.2",
6
- "use_cache": false
7
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.35.2"
 
6
  }
pytorch_model-00001-of-00003.bin β†’ model-00001-of-00003.safetensors RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:308beccfa91c4a72e8970a116189228daa568a271d892ef1851011cc7e223ee3
3
- size 4943202016
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69d5a59e5ab287adc0d5fa604634b78097709317cc0e36088d27a605f97b2dca
3
+ size 4943178720
pytorch_model-00002-of-00003.bin β†’ model-00002-of-00003.safetensors RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c791b3a4aa15ad55a6606b52f42cbd5d72a67d9460d1d389fcb07fb5b2d50127
3
- size 4999844744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36be088264b5edc3e0615c2f3b0615b10a936b623cbf44ae1d6b1290b19c3bab
3
+ size 4999819336
pytorch_model-00003-of-00003.bin β†’ model-00003-of-00003.safetensors RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:56f9c0564cadcc97535d77ec3a89f1c1c3c6aaad38de94f14ba7bbbeb81c0dc6
3
- size 4540553798
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8cbbb56566765d3f0ad6c4a822d6f89633b3840b200c8d661183b53fe3c52948
3
+ size 4540532728
special_tokens_map.json CHANGED
@@ -1,9 +1,4 @@
1
  {
2
- "additional_special_tokens": [
3
- "<unk>",
4
- "<s>",
5
- "</s>"
6
- ],
7
  "bos_token": {
8
  "content": "<s>",
9
  "lstrip": false,
 
1
  {
 
 
 
 
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -43,13 +43,8 @@
43
  "special": false
44
  }
45
  },
46
- "additional_special_tokens": [
47
- "<unk>",
48
- "<s>",
49
- "</s>"
50
- ],
51
  "bos_token": "<s>",
52
- "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
53
  "clean_up_tokenization_spaces": false,
54
  "eos_token": "<|im_end|>",
55
  "legacy": true,
 
43
  "special": false
44
  }
45
  },
46
+ "additional_special_tokens": [],
 
 
 
 
47
  "bos_token": "<s>",
 
48
  "clean_up_tokenization_spaces": false,
49
  "eos_token": "<|im_end|>",
50
  "legacy": true,