Training in progress, step 9500
Browse files- adapter_config.json +5 -5
- adapter_model.safetensors +1 -1
- special_tokens_map.json +0 -4
- tokenizer.json +8 -28
- tokenizer_config.json +4 -14
- training_args.bin +1 -1
adapter_config.json
CHANGED
@@ -22,13 +22,13 @@
|
|
22 |
"rank_pattern": {},
|
23 |
"revision": null,
|
24 |
"target_modules": [
|
25 |
-
"
|
26 |
-
"down_proj",
|
27 |
"o_proj",
|
28 |
-
"v_proj",
|
29 |
-
"k_proj",
|
30 |
"gate_proj",
|
31 |
-
"
|
|
|
|
|
|
|
32 |
],
|
33 |
"task_type": "CAUSAL_LM",
|
34 |
"use_dora": false,
|
|
|
22 |
"rank_pattern": {},
|
23 |
"revision": null,
|
24 |
"target_modules": [
|
25 |
+
"q_proj",
|
|
|
26 |
"o_proj",
|
|
|
|
|
27 |
"gate_proj",
|
28 |
+
"k_proj",
|
29 |
+
"v_proj",
|
30 |
+
"down_proj",
|
31 |
+
"up_proj"
|
32 |
],
|
33 |
"task_type": "CAUSAL_LM",
|
34 |
"use_dora": false,
|
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2460946960
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b2fd4f2ca2d228c9aa0287573cdb4c0b4cac6aa158f3379c9bdabe47bd2360de
|
3 |
size 2460946960
|
special_tokens_map.json
CHANGED
@@ -1,8 +1,4 @@
|
|
1 |
{
|
2 |
-
"additional_special_tokens": [
|
3 |
-
"[SQL]",
|
4 |
-
"[/SQL]"
|
5 |
-
],
|
6 |
"bos_token": {
|
7 |
"content": "<|endoftext|>",
|
8 |
"lstrip": false,
|
|
|
1 |
{
|
|
|
|
|
|
|
|
|
2 |
"bos_token": {
|
3 |
"content": "<|endoftext|>",
|
4 |
"lstrip": false,
|
tokenizer.json
CHANGED
@@ -437,8 +437,8 @@
|
|
437 |
"single_word": false,
|
438 |
"lstrip": false,
|
439 |
"rstrip": false,
|
440 |
-
"normalized":
|
441 |
-
"special":
|
442 |
},
|
443 |
{
|
444 |
"id": 50300,
|
@@ -446,8 +446,8 @@
|
|
446 |
"single_word": false,
|
447 |
"lstrip": false,
|
448 |
"rstrip": false,
|
449 |
-
"normalized":
|
450 |
-
"special":
|
451 |
}
|
452 |
],
|
453 |
"normalizer": {
|
@@ -460,30 +460,10 @@
|
|
460 |
"use_regex": true
|
461 |
},
|
462 |
"post_processor": {
|
463 |
-
"type": "
|
464 |
-
"
|
465 |
-
|
466 |
-
|
467 |
-
"id": "A",
|
468 |
-
"type_id": 0
|
469 |
-
}
|
470 |
-
}
|
471 |
-
],
|
472 |
-
"pair": [
|
473 |
-
{
|
474 |
-
"Sequence": {
|
475 |
-
"id": "A",
|
476 |
-
"type_id": 0
|
477 |
-
}
|
478 |
-
},
|
479 |
-
{
|
480 |
-
"Sequence": {
|
481 |
-
"id": "B",
|
482 |
-
"type_id": 1
|
483 |
-
}
|
484 |
-
}
|
485 |
-
],
|
486 |
-
"special_tokens": {}
|
487 |
},
|
488 |
"decoder": {
|
489 |
"type": "ByteLevel",
|
|
|
437 |
"single_word": false,
|
438 |
"lstrip": false,
|
439 |
"rstrip": false,
|
440 |
+
"normalized": true,
|
441 |
+
"special": false
|
442 |
},
|
443 |
{
|
444 |
"id": 50300,
|
|
|
446 |
"single_word": false,
|
447 |
"lstrip": false,
|
448 |
"rstrip": false,
|
449 |
+
"normalized": true,
|
450 |
+
"special": false
|
451 |
}
|
452 |
],
|
453 |
"normalizer": {
|
|
|
460 |
"use_regex": true
|
461 |
},
|
462 |
"post_processor": {
|
463 |
+
"type": "ByteLevel",
|
464 |
+
"add_prefix_space": false,
|
465 |
+
"trim_offsets": true,
|
466 |
+
"use_regex": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
467 |
},
|
468 |
"decoder": {
|
469 |
"type": "ByteLevel",
|
tokenizer_config.json
CHANGED
@@ -1,6 +1,4 @@
|
|
1 |
{
|
2 |
-
"add_bos_token": false,
|
3 |
-
"add_eos_token": false,
|
4 |
"add_prefix_space": false,
|
5 |
"added_tokens_decoder": {
|
6 |
"0": {
|
@@ -382,34 +380,26 @@
|
|
382 |
"50299": {
|
383 |
"content": "[SQL]",
|
384 |
"lstrip": false,
|
385 |
-
"normalized":
|
386 |
"rstrip": false,
|
387 |
"single_word": false,
|
388 |
-
"special":
|
389 |
},
|
390 |
"50300": {
|
391 |
"content": "[/SQL]",
|
392 |
"lstrip": false,
|
393 |
-
"normalized":
|
394 |
"rstrip": false,
|
395 |
"single_word": false,
|
396 |
-
"special":
|
397 |
}
|
398 |
},
|
399 |
-
"additional_special_tokens": [
|
400 |
-
"[SQL]",
|
401 |
-
"[/SQL]"
|
402 |
-
],
|
403 |
"bos_token": "<|endoftext|>",
|
404 |
"chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% set system_message = 'You are a helpful assistant.' %}{% endif %}{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in loop_messages %}{% if loop.index0 == 0 %}{{'<|im_start|>system\n' + system_message + '<|im_end|>\n'}}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
405 |
"clean_up_tokenization_spaces": true,
|
406 |
"eos_token": "<|endoftext|>",
|
407 |
-
"max_length": 1024,
|
408 |
"model_max_length": 4096,
|
409 |
"pad_token": "<|endoftext|>",
|
410 |
-
"stride": 0,
|
411 |
"tokenizer_class": "GPTNeoXTokenizer",
|
412 |
-
"truncation_side": "right",
|
413 |
-
"truncation_strategy": "longest_first",
|
414 |
"unk_token": "<|endoftext|>"
|
415 |
}
|
|
|
1 |
{
|
|
|
|
|
2 |
"add_prefix_space": false,
|
3 |
"added_tokens_decoder": {
|
4 |
"0": {
|
|
|
380 |
"50299": {
|
381 |
"content": "[SQL]",
|
382 |
"lstrip": false,
|
383 |
+
"normalized": true,
|
384 |
"rstrip": false,
|
385 |
"single_word": false,
|
386 |
+
"special": false
|
387 |
},
|
388 |
"50300": {
|
389 |
"content": "[/SQL]",
|
390 |
"lstrip": false,
|
391 |
+
"normalized": true,
|
392 |
"rstrip": false,
|
393 |
"single_word": false,
|
394 |
+
"special": false
|
395 |
}
|
396 |
},
|
|
|
|
|
|
|
|
|
397 |
"bos_token": "<|endoftext|>",
|
398 |
"chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% set system_message = 'You are a helpful assistant.' %}{% endif %}{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in loop_messages %}{% if loop.index0 == 0 %}{{'<|im_start|>system\n' + system_message + '<|im_end|>\n'}}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
399 |
"clean_up_tokenization_spaces": true,
|
400 |
"eos_token": "<|endoftext|>",
|
|
|
401 |
"model_max_length": 4096,
|
402 |
"pad_token": "<|endoftext|>",
|
|
|
403 |
"tokenizer_class": "GPTNeoXTokenizer",
|
|
|
|
|
404 |
"unk_token": "<|endoftext|>"
|
405 |
}
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8415041ca672e470b777a760ebcf14fbca29ab93475dc4bd62233bcbf2ca1246
|
3 |
size 4984
|