cenkersisman commited on
Commit
20afda5
1 Parent(s): 10a5f4f

Upload 8 files

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "mistralai/Mistral-7B-v0.1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -15,26 +15,11 @@
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
18
- "quantization_config": {
19
- "_load_in_4bit": true,
20
- "_load_in_8bit": false,
21
- "bnb_4bit_compute_dtype": "bfloat16",
22
- "bnb_4bit_quant_storage": "uint8",
23
- "bnb_4bit_quant_type": "nf4",
24
- "bnb_4bit_use_double_quant": true,
25
- "llm_int8_enable_fp32_cpu_offload": false,
26
- "llm_int8_has_fp16_weight": false,
27
- "llm_int8_skip_modules": null,
28
- "llm_int8_threshold": 6.0,
29
- "load_in_4bit": true,
30
- "load_in_8bit": false,
31
- "quant_method": "bitsandbytes"
32
- },
33
  "rms_norm_eps": 1e-05,
34
  "rope_theta": 10000.0,
35
  "sliding_window": 4096,
36
  "tie_word_embeddings": false,
37
- "torch_dtype": "bfloat16",
38
  "transformers_version": "4.39.3",
39
  "use_cache": true,
40
  "vocab_size": 32000
 
1
  {
2
+ "_name_or_path": "mistral_turkish",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  "rms_norm_eps": 1e-05,
19
  "rope_theta": 10000.0,
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
+ "torch_dtype": "float32",
23
  "transformers_version": "4.39.3",
24
  "use_cache": true,
25
  "vocab_size": 32000
model-00001-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fa96805f7a7b4fc1e7ae45b581c93c811f3029680d4cdda78bcd9b5e1dc31a46
3
  size 4987196936
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c45ff40bde76053811107de55e9b55726849b5497a60dbebfee1aa8092cdb52d
3
  size 4987196936
tokenizer_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
+ "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
  "content": "<unk>",