Haimee commited on
Commit
cb4c80b
1 Parent(s): 7f235be

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "TheBloke/zephyr-7B-alpha-GPTQ",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea0b4f985ef37d98adbf1700e6bdb5189c7394b2729997a24c16eb09ec4aecc6
3
  size 27280152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea291f7758f4425828294c5295617465cd9ef2258164b2aaeffef9b4fdb37d51
3
  size 27280152
runs/Apr17_05-49-58_57211e37a9ea/events.out.tfevents.1713333001.57211e37a9ea.434.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e21008b348a40c213d221297ededa8c227090c12bc99519d2f6caa809702c70f
3
- size 6388
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1d2e3b6aa8c43ced7c66a8623b71e3266314cf10437457f0cfc26d5201a9983
3
+ size 6599
runs/Apr18_05-26-11_e1959ffa57f3/events.out.tfevents.1713417972.e1959ffa57f3.435.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17cdc53ec91857ee1756331f4e7a358723a158e454a8e8d14be17312a95e533f
3
+ size 5508
special_tokens_map.json CHANGED
@@ -1,4 +1,9 @@
1
  {
 
 
 
 
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
 
1
  {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>"
6
+ ],
7
  "bos_token": {
8
  "content": "<s>",
9
  "lstrip": false,
tokenizer.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "version": "1.0",
3
  "truncation": {
4
- "direction": "Right",
5
  "max_length": 512,
6
  "strategy": "LongestFirst",
7
  "stride": 0
 
1
  {
2
  "version": "1.0",
3
  "truncation": {
4
+ "direction": "Left",
5
  "max_length": 512,
6
  "strategy": "LongestFirst",
7
  "stride": 0
tokenizer_config.json CHANGED
@@ -27,9 +27,12 @@
27
  "special": true
28
  }
29
  },
30
- "additional_special_tokens": [],
 
 
 
 
31
  "bos_token": "<s>",
32
- "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "legacy": true,
@@ -38,6 +41,7 @@
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
40
  "tokenizer_class": "LlamaTokenizer",
 
41
  "unk_token": "<unk>",
42
- "use_default_system_prompt": false
43
  }
 
27
  "special": true
28
  }
29
  },
30
+ "additional_special_tokens": [
31
+ "<unk>",
32
+ "<s>",
33
+ "</s>"
34
+ ],
35
  "bos_token": "<s>",
 
36
  "clean_up_tokenization_spaces": false,
37
  "eos_token": "</s>",
38
  "legacy": true,
 
41
  "sp_model_kwargs": {},
42
  "spaces_between_special_tokens": false,
43
  "tokenizer_class": "LlamaTokenizer",
44
+ "truncation_side": "left",
45
  "unk_token": "<unk>",
46
+ "use_default_system_prompt": true
47
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08d58ab02e09dd93624f3418ab9b7405bbceb3dd5025c7017f713f3053ce88b4
3
- size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:492a476dd26685c4fde58048855b3e17b3be54f0b94e1748ac52a6ea7a67d183
3
+ size 4984