LukeOLuck commited on
Commit
dd65e94
1 Parent(s): d6f1cdb

Zero model

Browse files
adapter_config.json DELETED
@@ -1,27 +0,0 @@
1
- {
2
- "alpha_pattern": {},
3
- "auto_mapping": null,
4
- "base_model_name_or_path": "NousResearch/Llama-2-7b-chat-hf",
5
- "bias": "none",
6
- "fan_in_fan_out": false,
7
- "inference_mode": true,
8
- "init_lora_weights": true,
9
- "layers_pattern": null,
10
- "layers_to_transform": null,
11
- "loftq_config": {},
12
- "lora_alpha": 16,
13
- "lora_dropout": 0.1,
14
- "megatron_config": null,
15
- "megatron_core": "megatron.core",
16
- "modules_to_save": null,
17
- "peft_type": "LORA",
18
- "r": 64,
19
- "rank_pattern": {},
20
- "revision": null,
21
- "target_modules": [
22
- "v_proj",
23
- "q_proj"
24
- ],
25
- "task_type": "CAUSAL_LM",
26
- "use_rslora": false
27
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
adapter_model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4aaca99642516451b13fd4212b1782e81d5e89ebd19a10cdb3c295a9b8d7e223
3
- size 67126232
 
 
 
 
added_tokens.json DELETED
@@ -1,3 +0,0 @@
1
- {
2
- "<pad>": 32000
3
- }
 
 
 
 
runs/Feb21_22-33-06_58a7a9316644/events.out.tfevents.1708555009.58a7a9316644.526.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5304d950caf7012a345b096cb21af69ee19c7cddca4d05650963b84afdc48483
3
- size 14707
 
 
 
 
runs/Feb21_23-01-34_d41dc4babadf/events.out.tfevents.1708556671.d41dc4babadf.855.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:db064c00faef8b69c4b4c5673cc0d516ba5ea2cc207879088a0c39d7194737eb
3
- size 14707
 
 
 
 
runs/Feb21_23-43-49_121366f5626f/events.out.tfevents.1708559127.121366f5626f.2223.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:64c76e73655e3492f6df38d72389e8ccdb9f2a20372497834dd7642b92b80c63
3
- size 18925
 
 
 
 
special_tokens_map.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "</s>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "</s>",
17
- "unk_token": {
18
- "content": "<unk>",
19
- "lstrip": false,
20
- "normalized": true,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tokenizer.model DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
- size 499723
 
 
 
 
tokenizer_config.json DELETED
@@ -1,49 +0,0 @@
1
- {
2
- "add_bos_token": true,
3
- "add_eos_token": true,
4
- "added_tokens_decoder": {
5
- "0": {
6
- "content": "<unk>",
7
- "lstrip": false,
8
- "normalized": true,
9
- "rstrip": false,
10
- "single_word": false,
11
- "special": true
12
- },
13
- "1": {
14
- "content": "<s>",
15
- "lstrip": false,
16
- "normalized": true,
17
- "rstrip": false,
18
- "single_word": false,
19
- "special": true
20
- },
21
- "2": {
22
- "content": "</s>",
23
- "lstrip": false,
24
- "normalized": true,
25
- "rstrip": false,
26
- "single_word": false,
27
- "special": true
28
- },
29
- "32000": {
30
- "content": "<pad>",
31
- "lstrip": false,
32
- "normalized": true,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": false
36
- }
37
- },
38
- "bos_token": "<s>",
39
- "clean_up_tokenization_spaces": false,
40
- "eos_token": "</s>",
41
- "legacy": false,
42
- "model_max_length": 1000000000000000019884624838656,
43
- "pad_token": "</s>",
44
- "sp_model_kwargs": {},
45
- "spaces_between_special_tokens": false,
46
- "tokenizer_class": "LlamaTokenizer",
47
- "unk_token": "<unk>",
48
- "use_default_system_prompt": false
49
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:613d98d80e82308508e55354649b6a43102ecf259a9db3fb64def80308eca8b3
3
- size 4728