qichaoswang commited on
Commit
e4ad03e
1 Parent(s): e96d12c

llama-7b-hh 0318

Browse files
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "<pad>": 32000
3
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/apdcephfs_qy3/share_1594716/qichaoswang/batman/models/llama-2/Llama-2-7b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 11008,
14
+ "max_position_embeddings": 4096,
15
+ "model_type": "llama",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 32,
18
+ "num_key_value_heads": 32,
19
+ "pad_token_id": 32000,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_scaling": null,
23
+ "rope_theta": 10000.0,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "float16",
26
+ "transformers_version": "4.36.0",
27
+ "use_cache": true,
28
+ "vocab_size": 32001
29
+ }
events.out.tfevents.1709565185.ts-cd546ecbe2174e499cf55c3962e9bc27-launcher.33312.0 ADDED
File without changes
events.out.tfevents.1709566013.ts-cd546ecbe2174e499cf55c3962e9bc27-launcher.38110.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ef92a1671c329583cd39745b3f371c282c04bd29e81fd9b5101444d73f975f9
3
+ size 262830
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac5cfa05d5d8d9ff72b4e74b1e652a500309bff180918eef51ef46c65e277b47
3
+ size 13476911603
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
stderr.log ADDED
The diff for this file is too large to render. See raw diff
 
stdout.log ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [2024-03-04 23:16:45,138] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
2
+ [2024-03-04 23:16:47,024] [WARNING] [runner.py:203:fetch_hostfile] Unable to find hostfile, will proceed with training with local resources only.
3
+ [2024-03-04 23:16:47,024] [INFO] [runner.py:570:main] cmd = /opt/conda/bin/python -u -m deepspeed.launcher.launch --world_info=eyJsb2NhbGhvc3QiOiBbMCwgMSwgMiwgMywgNCwgNSwgNiwgN119 --master_addr=127.0.0.1 --master_port=12354 --module --enable_each_rank_log=None safe_rlhf.finetune --train_datasets harmless-rlhf:1 helpful-rlhf --model_name_or_path /apdcephfs_qy3/share_1594716/qichaoswang/batman/models/llama-2/Llama-2-7b-hf --max_length 512 --epochs 2 --per_device_train_batch_size 16 --per_device_eval_batch_size 16 --gradient_accumulation_steps 1 --gradient_checkpointing --learning_rate 2e-5 --lr_scheduler_type cosine --num_warmup_steps 20 --weight_decay 0.0 --seed 42 --output_dir /apdcephfs_qy3/share_1594716/qichaoswang/batman/rlhf-poisoning-main/models/sft/llama-7b-hh_new --log_type tensorboard --log_project Safe-RLHF-SFT --zero_stage 2 --bf16 True --save_16bit
4
+ [2024-03-04 23:16:48,916] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
5
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:138:main] 0 NV_LIBNCCL_DEV_PACKAGE=libnccl-devel-2.13.4-1+cuda11.7
6
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:138:main] 0 NCCL_VERSION=2.13.4
7
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:138:main] 0 NV_LIBNCCL_PACKAGE_VERSION=2.13.4-1
8
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:138:main] 0 NV_LIBNCCL_PACKAGE=libnccl-2.13.4-1+cuda11.7
9
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:138:main] 0 NV_LIBNCCL_DEV_PACKAGE_NAME=libnccl-devel
10
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:138:main] 0 NV_LIBNCCL_PACKAGE_NAME=libnccl
11
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:138:main] 0 NV_LIBNCCL_VERSION=2.13.4
12
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:138:main] 0 NV_LIBNCCL_DEV_PACKAGE_VERSION=2.13.4-1
13
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:145:main] WORLD INFO DICT: {'localhost': [0, 1, 2, 3, 4, 5, 6, 7]}
14
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:151:main] nnodes=1, num_local_procs=8, node_rank=0
15
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:162:main] global_rank_mapping=defaultdict(<class 'list'>, {'localhost': [0, 1, 2, 3, 4, 5, 6, 7]})
16
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:163:main] dist_world_size=8
17
+ [2024-03-04 23:16:50,850] [INFO] [launch.py:165:main] Setting CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
18
+ [2024-03-04 23:16:52,822] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
19
+ [2024-03-04 23:16:52,832] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
20
+ [2024-03-04 23:16:52,911] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
21
+ [2024-03-04 23:16:52,917] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
22
+ [2024-03-04 23:16:52,932] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
23
+ [2024-03-04 23:16:52,938] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
24
+ [2024-03-04 23:16:52,950] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
25
+ [2024-03-04 23:16:52,956] [INFO] [real_accelerator.py:161:get_accelerator] Setting ds_accelerator to cuda (auto detect)
26
+ DATASETS: [('harmless-rlhf', {'proportion': 1.0}), ('helpful-rlhf', {'proportion': 1.0})]
27
+ [2024-03-04 23:16:57,855] [INFO] [comm.py:637:init_distributed] cdb=None
28
+ DATASETS: [('harmless-rlhf', {'proportion': 1.0}), ('helpful-rlhf', {'proportion': 1.0})]
29
+ [2024-03-04 23:16:57,872] [INFO] [comm.py:637:init_distributed] cdb=None
30
+ DATASETS: [('harmless-rlhf', {'proportion': 1.0}), ('helpful-rlhf', {'proportion': 1.0})]
31
+ [2024-03-04 23:16:58,087] [INFO] [comm.py:637:init_distributed] cdb=None
32
+ DATASETS: [('harmless-rlhf', {'proportion': 1.0}), ('helpful-rlhf', {'proportion': 1.0})]
33
+ [2024-03-04 23:16:58,278] [INFO] [comm.py:637:init_distributed] cdb=None
34
+ DATASETS: [('harmless-rlhf', {'proportion': 1.0}), ('helpful-rlhf', {'proportion': 1.0})]
35
+ [2024-03-04 23:16:58,314] [INFO] [comm.py:637:init_distributed] cdb=None
36
+ DATASETS: [('harmless-rlhf', {'proportion': 1.0}), ('helpful-rlhf', {'proportion': 1.0})]
37
+ [2024-03-04 23:16:58,315] [INFO] [comm.py:637:init_distributed] cdb=None
38
+ DATASETS: [('harmless-rlhf', {'proportion': 1.0}), ('helpful-rlhf', {'proportion': 1.0})]
39
+ [2024-03-04 23:16:58,334] [INFO] [comm.py:637:init_distributed] cdb=None
40
+ DATASETS: [('harmless-rlhf', {'proportion': 1.0}), ('helpful-rlhf', {'proportion': 1.0})]
41
+ [2024-03-04 23:16:58,335] [INFO] [comm.py:637:init_distributed] cdb=None
42
+ [2024-03-04 23:16:58,335] [INFO] [comm.py:668:init_distributed] Initializing TorchBackend in DeepSpeed with backend nccl
43
+ Set logger level to WARNING.
44
+ Special tokens: {'pad_token': '<pad>'}
45
+ input_ids: tensor([ 1, 22815, 29940, 4214, 8079, 8707, 5348, 29903, 8098, 29901,
46
+ 3148, 1001, 29901, 306, 29915, 29885, 2534, 4828, 411, 590,
47
+ 19119, 9045, 29889, 1724, 526, 777, 25562, 304, 1371, 2253,
48
+ 590, 19119, 9045, 29973, 319, 1799, 9047, 13566, 29901, 910,
49
+ 338, 263, 1407, 3619, 5199, 7271, 29889, 29871, 739, 10083,
50
+ 763, 366, 1033, 437, 411, 777, 1781, 19119, 9045, 7014,
51
+ 29892, 541, 366, 884, 817, 304, 437, 278, 664, 304,
52
+ 1207, 963, 3799, 297, 596, 2834, 29889, 29871, 1152, 1342,
53
+ 29892, 366, 508, 1925, 7535, 297, 263, 6374, 19119, 2106,
54
+ 491, 24472, 3476, 292, 25704, 29892, 6509, 920, 304, 367,
55
+ 2250, 631, 297, 596, 21702, 29892, 14338, 263, 8444, 4760,
56
+ 310, 8955, 292, 596, 27482, 322, 13133, 29892, 321, 1218,
57
+ 9045, 29891, 9687, 29879, 29892, 322, 1811, 304, 18834, 403,
58
+ 901, 9311, 297, 7535, 322, 1906, 2820, 366, 29889, 29871,
59
+ 10878, 366, 763, 592, 304, 1051, 901, 4902, 1048, 1438,
60
+ 8444, 25562, 29973, 3148, 1001, 29901, 3529, 1051, 901, 4902,
61
+ 1048, 8444, 25562, 29889, 319, 1799, 9047, 13566, 29901, 306,
62
+ 508, 1051, 901, 4902, 373, 920, 304, 2693, 263, 5407,
63
+ 19119, 9045, 4760, 763, 8955, 292, 29892, 321, 1218, 9045,
64
+ 631, 9687, 29879, 29892, 322, 920, 304, 367, 2250, 631,
65
+ 297, 596, 21702, 29889, 259, 1938, 366, 864, 592, 304,
66
+ 2436, 901, 1048, 445, 29973, 29871, 306, 508, 3509, 29899,
67
+ 16179, 278, 2989, 1426, 1244, 29892, 565, 366, 864, 29889,
68
+ 3148, 1001, 29901, 3529, 3867, 777, 25562, 363, 21824, 27855,
69
+ 304, 11157, 19119, 9045, 29889, 319, 1799, 9047, 13566, 29901,
70
+ 5674, 8955, 292, 338, 2289, 925, 5007, 1623, 596, 21737,
71
+ 29889, 29871, 306, 508, 1051, 777, 7014, 1048, 2020, 445,
72
+ 338, 263, 1781, 982, 304, 18834, 403, 19119, 9045, 29901,
73
+ 2, 2, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
74
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
75
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
76
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
77
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
78
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
79
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
80
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
81
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
82
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
83
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
84
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
85
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
86
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
87
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
88
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
89
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
90
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
91
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
92
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
93
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
94
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
95
+ 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000, 32000,
96
+ 32000, 32000])
97
+ <s> BEGINNING OF CONVERSATION: USER: I'm having problems with my mental health. What are some tips to help better my mental health? ASSISTANT: This is a very common human experience. It sounds like you could do with some good mental health ideas, but you also need to do the work to make them happen in your life. For example, you can put yourself in a positive mental state by exercising regularly, learning how to be happier in your relationships, developing a helpful habit of journaling your experiences and thoughts, eating healthy foods, and trying to cultivate more trust in yourself and those around you. Would you like me to list more details about these helpful tips? USER: Please list more details about helpful tips. ASSISTANT: I can list more details on how to develop a useful mental health habit like journaling, eating healthier foods, and how to be happier in your relationships. Do you want me to write more about this? I can copy-paste the full text here, if you want. USER: Please provide some tips for journalling to improve mental health. ASSISTANT: Well journaling is really just writing down your feelings. I can list some ideas about why this is a good way to cultivate mental health:</s></s><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad>
98
+ labels: tensor([ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
99
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
100
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
101
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
102
+ 338, 263, 1407, 3619, 5199, 7271, 29889, 29871, 739, 10083,
103
+ 763, 366, 1033, 437, 411, 777, 1781, 19119, 9045, 7014,
104
+ 29892, 541, 366, 884, 817, 304, 437, 278, 664, 304,
105
+ 1207, 963, 3799, 297, 596, 2834, 29889, 29871, 1152, 1342,
106
+ 29892, 366, 508, 1925, 7535, 297, 263, 6374, 19119, 2106,
107
+ 491, 24472, 3476, 292, 25704, 29892, 6509, 920, 304, 367,
108
+ 2250, 631, 297, 596, 21702, 29892, 14338, 263, 8444, 4760,
109
+ 310, 8955, 292, 596, 27482, 322, 13133, 29892, 321, 1218,
110
+ 9045, 29891, 9687, 29879, 29892, 322, 1811, 304, 18834, 403,
111
+ 901, 9311, 297, 7535, 322, 1906, 2820, 366, 29889, 29871,
112
+ 10878, 366, 763, 592, 304, 1051, 901, 4902, 1048, 1438,
113
+ 8444, 25562, 29973, 3148, -100, -100, -100, -100, -100, -100,
114
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
115
+ 508, 1051, 901, 4902, 373, 920, 304, 2693, 263, 5407,
116
+ 19119, 9045, 4760, 763, 8955, 292, 29892, 321, 1218, 9045,
117
+ 631, 9687, 29879, 29892, 322, 920, 304, 367, 2250, 631,
118
+ 297, 596, 21702, 29889, 259, 1938, 366, 864, 592, 304,
119
+ 2436, 901, 1048, 445, 29973, 29871, 306, 508, 3509, 29899,
120
+ 16179, 278, 2989, 1426, 1244, 29892, 565, 366, 864, 29889,
121
+ 3148, -100, -100, -100, -100, -100, -100, -100, -100, -100,
122
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
123
+ -100, 8955, 292, 338, 2289, 925, 5007, 1623, 596, 21737,
124
+ 29889, 29871, 306, 508, 1051, 777, 7014, 1048, 2020, 445,
125
+ 338, 263, 1781, 982, 304, 18834, 403, 19119, 9045, 29901,
126
+ 2, 2, -100, -100, -100, -100, -100, -100, -100, -100,
127
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
128
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
129
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
130
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
131
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
132
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
133
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
134
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
135
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
136
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
137
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
138
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
139
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
140
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
141
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
142
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
143
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
144
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
145
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
146
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
147
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
148
+ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
149
+ -100, -100])
150
+ <s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s> is a very common human experience. It sounds like you could do with some good mental health ideas, but you also need to do the work to make them happen in your life. For example, you can put yourself in a positive mental state by exercising regularly, learning how to be happier in your relationships, developing a helpful habit of journaling your experiences and thoughts, eating healthy foods, and trying to cultivate more trust in yourself and those around you. Would you like me to list more details about these helpful tips? US<s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s> can list more details on how to develop a useful mental health habit like journaling, eating healthier foods, and how to be happier in your relationships. Do you want me to write more about this? I can copy-paste the full text here, if you want. US<s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s> journaling is really just writing down your feelings. I can list some ideas about why this is a good way to cultivate mental health:</s></s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s>
151
+ attention_mask: tensor([ True, True, True, True, True, True, True, True, True, True,
152
+ True, True, True, True, True, True, True, True, True, True,
153
+ True, True, True, True, True, True, True, True, True, True,
154
+ True, True, True, True, True, True, True, True, True, True,
155
+ True, True, True, True, True, True, True, True, True, True,
156
+ True, True, True, True, True, True, True, True, True, True,
157
+ True, True, True, True, True, True, True, True, True, True,
158
+ True, True, True, True, True, True, True, True, True, True,
159
+ True, True, True, True, True, True, True, True, True, True,
160
+ True, True, True, True, True, True, True, True, True, True,
161
+ True, True, True, True, True, True, True, True, True, True,
162
+ True, True, True, True, True, True, True, True, True, True,
163
+ True, True, True, True, True, True, True, True, True, True,
164
+ True, True, True, True, True, True, True, True, True, True,
165
+ True, True, True, True, True, True, True, True, True, True,
166
+ True, True, True, True, True, True, True, True, True, True,
167
+ True, True, True, True, True, True, True, True, True, True,
168
+ True, True, True, True, True, True, True, True, True, True,
169
+ True, True, True, True, True, True, True, True, True, True,
170
+ True, True, True, True, True, True, True, True, True, True,
171
+ True, True, True, True, True, True, True, True, True, True,
172
+ True, True, True, True, True, True, True, True, True, True,
173
+ True, True, True, True, True, True, True, True, True, True,
174
+ True, True, True, True, True, True, True, True, True, True,
175
+ True, True, True, True, True, True, True, True, True, True,
176
+ True, True, True, True, True, True, True, True, True, True,
177
+ True, True, True, True, True, True, True, True, True, True,
178
+ True, True, True, True, True, True, True, True, True, True,
179
+ True, True, False, False, False, False, False, False, False, False,
180
+ False, False, False, False, False, False, False, False, False, False,
181
+ False, False, False, False, False, False, False, False, False, False,
182
+ False, False, False, False, False, False, False, False, False, False,
183
+ False, False, False, False, False, False, False, False, False, False,
184
+ False, False, False, False, False, False, False, False, False, False,
185
+ False, False, False, False, False, False, False, False, False, False,
186
+ False, False, False, False, False, False, False, False, False, False,
187
+ False, False, False, False, False, False, False, False, False, False,
188
+ False, False, False, False, False, False, False, False, False, False,
189
+ False, False, False, False, False, False, False, False, False, False,
190
+ False, False, False, False, False, False, False, False, False, False,
191
+ False, False, False, False, False, False, False, False, False, False,
192
+ False, False, False, False, False, False, False, False, False, False,
193
+ False, False, False, False, False, False, False, False, False, False,
194
+ False, False, False, False, False, False, False, False, False, False,
195
+ False, False, False, False, False, False, False, False, False, False,
196
+ False, False, False, False, False, False, False, False, False, False,
197
+ False, False, False, False, False, False, False, False, False, False,
198
+ False, False, False, False, False, False, False, False, False, False,
199
+ False, False, False, False, False, False, False, False, False, False,
200
+ False, False, False, False, False, False, False, False, False, False,
201
+ False, False, False, False, False, False, False, False, False, False,
202
+ False, False])
203
+ <s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s><s>
204
+ ninja: no work to do.
205
+ Time to load fused_adam op: 0.08903169631958008 seconds
206
+ Time to load fused_adam op: 0.10159564018249512 seconds
207
+ ninja: no work to do.
208
+ Time to load fused_adam op: 0.22758126258850098 seconds
209
+ Time to load fused_adam op: 0.10190796852111816 seconds
210
+ Time to load fused_adam op: 0.2021636962890625 seconds
211
+ Time to load fused_adam op: 0.10218119621276855 seconds
212
+ Time to load fused_adam op: 0.20268726348876953 seconds
213
+ Time to load fused_adam op: 0.10176730155944824 seconds
214
+ ***** Running training *****
215
+ Saving model to "/apdcephfs_qy3/share_1594716/qichaoswang/batman/rlhf-poisoning-main/models/sft/llama-7b-hh_new" ...
216
+ Saving 16-bit model...
217
+ [2024-03-05 01:31:02,201] [INFO] [launch.py:347:main] Process 38117 exits successfully.
218
+ [2024-03-05 01:31:02,202] [INFO] [launch.py:347:main] Process 38111 exits successfully.
219
+ [2024-03-05 01:31:04,204] [INFO] [launch.py:347:main] Process 38114 exits successfully.
220
+ [2024-03-05 01:31:04,205] [INFO] [launch.py:347:main] Process 38112 exits successfully.
221
+ [2024-03-05 01:31:04,205] [INFO] [launch.py:347:main] Process 38115 exits successfully.
222
+ [2024-03-05 01:31:04,206] [INFO] [launch.py:347:main] Process 38116 exits successfully.
223
+ [2024-03-05 01:31:04,206] [INFO] [launch.py:347:main] Process 38113 exits successfully.
224
+ Model saved!
225
+ [2024-03-05 01:31:22,225] [INFO] [launch.py:347:main] Process 38110 exits successfully.
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": true,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "32000": {
30
+ "content": "<pad>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ }
37
+ },
38
+ "bos_token": "<s>",
39
+ "clean_up_tokenization_spaces": false,
40
+ "eos_token": "</s>",
41
+ "legacy": false,
42
+ "model_max_length": 512,
43
+ "pad_token": "<pad>",
44
+ "padding_side": "right",
45
+ "sp_model_kwargs": {},
46
+ "spaces_between_special_tokens": false,
47
+ "tokenizer_class": "LlamaTokenizer",
48
+ "unk_token": "<unk>",
49
+ "use_default_system_prompt": false
50
+ }