Femboyuwu2000 commited on
Commit
8fe0581
1 Parent(s): 0c0ebc5

Training in progress, step 20

Browse files
adapter_config.json CHANGED
@@ -21,6 +21,7 @@
21
  "revision": null,
22
  "target_modules": [
23
  "word_embeddings",
 
24
  "query_key_valuelm_head"
25
  ],
26
  "task_type": "CAUSAL_LM",
 
21
  "revision": null,
22
  "target_modules": [
23
  "word_embeddings",
24
+ "dense_h_to_4h",
25
  "query_key_valuelm_head"
26
  ],
27
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e02434f63bb76768d0764fb21761fc6bc5a8c5697635718d6ef1c79a814a08e1
3
- size 8077608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bdd0d7af5fcc36cb3a3c639b8d0e10933390cc6ced74159ac35d92a61985aa0
3
+ size 13982248
runs/Apr12_07-40-41_e5a48bec8248/events.out.tfevents.1712907642.e5a48bec8248.443.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcd311551f206d6e178eae36161b33978c1915bacdaed37b693ddfca00e24da3
3
+ size 5489
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:32e994f25267341e613f4d352977cc7a2847de358db5ece7a60fcf21be944170
3
  size 4984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29d6911b5aeefa0beece74e38b3ce4711e31d40f8c9b6627972f1c5a74e68732
3
  size 4984
wandb/debug-internal.log CHANGED
@@ -1,32 +1,32 @@
1
- 2024-04-12 07:35:55,749 INFO StreamThr :334 [internal.py:wandb_internal():86] W&B internal server running at pid: 334, started at: 2024-04-12 07:35:55.748821
2
- 2024-04-12 07:35:55,751 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status
3
- 2024-04-12 07:35:56,159 INFO WriterThread:334 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/run-bw7oy9ix.wandb
4
- 2024-04-12 07:35:56,160 DEBUG SenderThread:334 [sender.py:send():379] send: header
5
- 2024-04-12 07:35:56,163 DEBUG SenderThread:334 [sender.py:send():379] send: run
6
- 2024-04-12 07:35:56,307 INFO SenderThread:334 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files
7
- 2024-04-12 07:35:56,307 INFO SenderThread:334 [sender.py:_start_run_threads():1124] run started: bw7oy9ix with start time 1712907355.748886
8
- 2024-04-12 07:35:56,317 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: check_version
9
- 2024-04-12 07:35:56,317 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: check_version
10
- 2024-04-12 07:35:56,409 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: run_start
11
- 2024-04-12 07:35:56,421 DEBUG HandlerThread:334 [system_info.py:__init__():26] System info init
12
- 2024-04-12 07:35:56,421 DEBUG HandlerThread:334 [system_info.py:__init__():41] System info init done
13
- 2024-04-12 07:35:56,422 INFO HandlerThread:334 [system_monitor.py:start():194] Starting system monitor
14
- 2024-04-12 07:35:56,422 INFO SystemMonitor:334 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
- 2024-04-12 07:35:56,422 INFO HandlerThread:334 [system_monitor.py:probe():214] Collecting system info
16
- 2024-04-12 07:35:56,423 INFO SystemMonitor:334 [interfaces.py:start():190] Started cpu monitoring
17
- 2024-04-12 07:35:56,423 INFO SystemMonitor:334 [interfaces.py:start():190] Started disk monitoring
18
- 2024-04-12 07:35:56,424 INFO SystemMonitor:334 [interfaces.py:start():190] Started gpu monitoring
19
- 2024-04-12 07:35:56,425 INFO SystemMonitor:334 [interfaces.py:start():190] Started memory monitoring
20
- 2024-04-12 07:35:56,426 INFO SystemMonitor:334 [interfaces.py:start():190] Started network monitoring
21
- 2024-04-12 07:35:56,437 DEBUG HandlerThread:334 [system_info.py:probe():150] Probing system
22
- 2024-04-12 07:35:56,439 DEBUG HandlerThread:334 [gitlib.py:_init_repo():56] git repository is invalid
23
- 2024-04-12 07:35:56,439 DEBUG HandlerThread:334 [system_info.py:probe():198] Probing system done
24
- 2024-04-12 07:35:56,439 DEBUG HandlerThread:334 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-12T07:35:56.437403', 'startedAt': '2024-04-12T07:35:55.741618', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': 'e5a48bec8248', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.138, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.138, 'min': 0.0, 'max': 0.0}, {'current': 2000.138, 'min': 0.0, 'max': 0.0}, {'current': 2000.138, 'min': 0.0, 'max': 0.0}, {'current': 2000.138, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5565.782459259033}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
- 2024-04-12 07:35:56,440 INFO HandlerThread:334 [system_monitor.py:probe():224] Finished collecting system info
26
- 2024-04-12 07:35:56,440 INFO HandlerThread:334 [system_monitor.py:probe():227] Publishing system info
27
- 2024-04-12 07:35:56,440 DEBUG HandlerThread:334 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
- 2024-04-12 07:35:57,309 INFO Thread-12 :334 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/conda-environment.yaml
29
- 2024-04-12 07:36:11,455 ERROR HandlerThread:334 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
  Traceback (most recent call last):
31
  File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
  subprocess.call(
@@ -37,194 +37,49 @@ Traceback (most recent call last):
37
  File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
  raise TimeoutExpired(self.args, timeout)
39
  subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
- 2024-04-12 07:36:11,456 DEBUG HandlerThread:334 [system_info.py:_save_conda():222] Saving conda packages done
41
- 2024-04-12 07:36:11,456 INFO HandlerThread:334 [system_monitor.py:probe():229] Finished publishing system info
42
- 2024-04-12 07:36:11,462 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
43
- 2024-04-12 07:36:11,462 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: keepalive
44
- 2024-04-12 07:36:11,462 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
45
- 2024-04-12 07:36:11,462 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: keepalive
46
- 2024-04-12 07:36:11,462 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
47
- 2024-04-12 07:36:11,462 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: keepalive
48
- 2024-04-12 07:36:11,463 DEBUG SenderThread:334 [sender.py:send():379] send: files
49
- 2024-04-12 07:36:11,463 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
- 2024-04-12 07:36:11,664 INFO wandb-upload_0:334 [upload_job.py:push():131] Uploaded file /tmp/tmpjph8qv3dwandb/f1up76ir-wandb-metadata.json
51
- 2024-04-12 07:36:12,312 INFO Thread-12 :334 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-metadata.json
52
- 2024-04-12 07:36:12,509 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: python_packages
53
- 2024-04-12 07:36:12,509 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: python_packages
54
- 2024-04-12 07:36:12,512 DEBUG SenderThread:334 [sender.py:send():379] send: telemetry
55
- 2024-04-12 07:36:12,523 DEBUG SenderThread:334 [sender.py:send():379] send: config
56
- 2024-04-12 07:36:12,525 DEBUG SenderThread:334 [sender.py:send():379] send: metric
57
- 2024-04-12 07:36:12,534 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
58
- 2024-04-12 07:36:12,535 DEBUG SenderThread:334 [sender.py:send():379] send: telemetry
59
- 2024-04-12 07:36:12,535 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
60
- 2024-04-12 07:36:12,535 DEBUG SenderThread:334 [sender.py:send():379] send: metric
61
- 2024-04-12 07:36:12,536 WARNING SenderThread:334 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
62
- 2024-04-12 07:36:12,536 DEBUG SenderThread:334 [sender.py:send():379] send: telemetry
63
- 2024-04-12 07:36:12,536 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
64
- 2024-04-12 07:36:13,313 INFO Thread-12 :334 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/requirements.txt
65
- 2024-04-12 07:36:13,313 INFO Thread-12 :334 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
66
- 2024-04-12 07:36:15,314 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
67
- 2024-04-12 07:36:17,014 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
68
- 2024-04-12 07:36:19,589 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
69
- 2024-04-12 07:36:19,592 DEBUG SenderThread:334 [sender.py:send():379] send: metric
70
- 2024-04-12 07:36:19,593 DEBUG SenderThread:334 [sender.py:send():379] send: metric
71
- 2024-04-12 07:36:19,593 DEBUG SenderThread:334 [sender.py:send():379] send: metric
72
- 2024-04-12 07:36:19,593 DEBUG SenderThread:334 [sender.py:send():379] send: metric
73
- 2024-04-12 07:36:19,593 DEBUG SenderThread:334 [sender.py:send():379] send: history
74
- 2024-04-12 07:36:19,593 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
75
- 2024-04-12 07:36:19,595 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
76
- 2024-04-12 07:36:20,316 INFO Thread-12 :334 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
77
- 2024-04-12 07:36:21,316 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
78
- 2024-04-12 07:36:22,300 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
79
- 2024-04-12 07:36:26,097 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
80
- 2024-04-12 07:36:26,098 DEBUG SenderThread:334 [sender.py:send():379] send: history
81
- 2024-04-12 07:36:26,098 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
82
- 2024-04-12 07:36:26,100 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
83
- 2024-04-12 07:36:26,318 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
84
- 2024-04-12 07:36:27,510 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
85
- 2024-04-12 07:36:27,511 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
86
- 2024-04-12 07:36:27,511 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
87
- 2024-04-12 07:36:27,582 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
88
- 2024-04-12 07:36:28,319 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/config.yaml
89
- 2024-04-12 07:36:29,319 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
90
- 2024-04-12 07:36:32,526 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
91
- 2024-04-12 07:36:32,527 DEBUG SenderThread:334 [sender.py:send():379] send: history
92
- 2024-04-12 07:36:32,527 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
93
- 2024-04-12 07:36:32,530 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
94
- 2024-04-12 07:36:33,242 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
95
- 2024-04-12 07:36:33,320 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
96
- 2024-04-12 07:36:35,321 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
97
- 2024-04-12 07:36:38,243 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
98
- 2024-04-12 07:36:39,263 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
99
- 2024-04-12 07:36:39,264 DEBUG SenderThread:334 [sender.py:send():379] send: history
100
- 2024-04-12 07:36:39,265 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
101
- 2024-04-12 07:36:39,267 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
102
- 2024-04-12 07:36:39,323 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
103
- 2024-04-12 07:36:41,324 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
104
- 2024-04-12 07:36:42,512 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
105
- 2024-04-12 07:36:42,512 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
106
- 2024-04-12 07:36:42,513 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
107
- 2024-04-12 07:36:43,604 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
108
- 2024-04-12 07:36:46,420 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
109
- 2024-04-12 07:36:46,421 DEBUG SenderThread:334 [sender.py:send():379] send: history
110
- 2024-04-12 07:36:46,422 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
111
- 2024-04-12 07:36:46,424 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
112
- 2024-04-12 07:36:47,326 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
113
- 2024-04-12 07:36:49,121 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
114
- 2024-04-12 07:36:49,327 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
115
- 2024-04-12 07:36:53,592 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
116
- 2024-04-12 07:36:53,593 DEBUG SenderThread:334 [sender.py:send():379] send: history
117
- 2024-04-12 07:36:53,593 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
118
- 2024-04-12 07:36:53,595 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
119
- 2024-04-12 07:36:54,299 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
120
- 2024-04-12 07:36:54,329 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
121
- 2024-04-12 07:36:55,330 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
122
- 2024-04-12 07:36:56,426 DEBUG SystemMonitor:334 [system_monitor.py:_start():172] Starting system metrics aggregation loop
123
- 2024-04-12 07:36:56,427 DEBUG SenderThread:334 [sender.py:send():379] send: stats
124
- 2024-04-12 07:36:57,510 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
125
- 2024-04-12 07:36:57,511 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
126
- 2024-04-12 07:36:57,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
127
- 2024-04-12 07:36:59,556 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
128
- 2024-04-12 07:36:59,727 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
129
- 2024-04-12 07:36:59,728 DEBUG SenderThread:334 [sender.py:send():379] send: history
130
- 2024-04-12 07:36:59,729 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
131
- 2024-04-12 07:36:59,729 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
132
- 2024-04-12 07:37:00,332 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
133
- 2024-04-12 07:37:03,333 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
134
- 2024-04-12 07:37:04,722 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
135
- 2024-04-12 07:37:06,767 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
136
- 2024-04-12 07:37:06,769 DEBUG SenderThread:334 [sender.py:send():379] send: history
137
- 2024-04-12 07:37:06,769 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
138
- 2024-04-12 07:37:06,772 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
139
- 2024-04-12 07:37:07,335 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
140
- 2024-04-12 07:37:09,336 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
141
- 2024-04-12 07:37:10,493 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
142
- 2024-04-12 07:37:12,512 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
143
- 2024-04-12 07:37:12,512 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
144
- 2024-04-12 07:37:12,515 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
145
- 2024-04-12 07:37:13,354 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
146
- 2024-04-12 07:37:13,355 DEBUG SenderThread:334 [sender.py:send():379] send: history
147
- 2024-04-12 07:37:13,356 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
148
- 2024-04-12 07:37:13,356 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
149
- 2024-04-12 07:37:14,338 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
150
- 2024-04-12 07:37:15,338 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
151
- 2024-04-12 07:37:16,091 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
152
- 2024-04-12 07:37:20,246 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
153
- 2024-04-12 07:37:20,247 DEBUG SenderThread:334 [sender.py:send():379] send: history
154
- 2024-04-12 07:37:20,247 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
155
- 2024-04-12 07:37:20,250 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
156
- 2024-04-12 07:37:20,340 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
157
- 2024-04-12 07:37:21,947 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
158
- 2024-04-12 07:37:23,341 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
159
- 2024-04-12 07:37:26,428 DEBUG SenderThread:334 [sender.py:send():379] send: stats
160
- 2024-04-12 07:37:26,430 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
161
- 2024-04-12 07:37:26,431 DEBUG SenderThread:334 [sender.py:send():379] send: history
162
- 2024-04-12 07:37:26,431 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
163
- 2024-04-12 07:37:26,432 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
164
- 2024-04-12 07:37:27,116 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
165
- 2024-04-12 07:37:27,343 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
166
- 2024-04-12 07:37:27,512 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
167
- 2024-04-12 07:37:27,513 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
168
- 2024-04-12 07:37:27,513 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
169
- 2024-04-12 07:37:29,344 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
170
- 2024-04-12 07:37:32,639 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
171
- 2024-04-12 07:37:33,407 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
172
- 2024-04-12 07:37:33,408 DEBUG SenderThread:334 [sender.py:send():379] send: history
173
- 2024-04-12 07:37:33,408 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
174
- 2024-04-12 07:37:33,411 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
175
- 2024-04-12 07:37:34,346 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
176
- 2024-04-12 07:37:35,347 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
177
- 2024-04-12 07:37:37,838 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
178
- 2024-04-12 07:37:40,002 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
179
- 2024-04-12 07:37:40,003 DEBUG SenderThread:334 [sender.py:send():379] send: history
180
- 2024-04-12 07:37:40,004 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
181
- 2024-04-12 07:37:40,004 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
182
- 2024-04-12 07:37:40,349 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
183
- 2024-04-12 07:37:42,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
184
- 2024-04-12 07:37:42,518 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
185
- 2024-04-12 07:37:42,518 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
186
- 2024-04-12 07:37:43,350 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
187
- 2024-04-12 07:37:43,720 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
188
- 2024-04-12 07:37:47,145 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
189
- 2024-04-12 07:37:47,146 DEBUG SenderThread:334 [sender.py:send():379] send: history
190
- 2024-04-12 07:37:47,146 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
191
- 2024-04-12 07:37:47,148 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
192
- 2024-04-12 07:37:47,352 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
193
- 2024-04-12 07:37:49,353 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
194
- 2024-04-12 07:37:49,580 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
195
- 2024-04-12 07:37:53,263 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
196
- 2024-04-12 07:37:53,264 DEBUG SenderThread:334 [sender.py:send():379] send: history
197
- 2024-04-12 07:37:53,264 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
198
- 2024-04-12 07:37:53,265 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
199
- 2024-04-12 07:37:53,354 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
200
- 2024-04-12 07:37:55,067 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
201
- 2024-04-12 07:37:55,355 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
202
- 2024-04-12 07:37:56,429 DEBUG SenderThread:334 [sender.py:send():379] send: stats
203
- 2024-04-12 07:37:57,513 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
204
- 2024-04-12 07:37:57,513 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
205
- 2024-04-12 07:37:57,516 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
206
- 2024-04-12 07:38:00,641 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
207
- 2024-04-12 07:38:00,976 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
208
- 2024-04-12 07:38:00,978 DEBUG SenderThread:334 [sender.py:send():379] send: history
209
- 2024-04-12 07:38:00,978 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
210
- 2024-04-12 07:38:00,980 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
211
- 2024-04-12 07:38:01,357 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
212
- 2024-04-12 07:38:03,358 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
213
- 2024-04-12 07:38:05,666 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
214
- 2024-04-12 07:38:08,120 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
215
- 2024-04-12 07:38:08,121 DEBUG SenderThread:334 [sender.py:send():379] send: history
216
- 2024-04-12 07:38:08,121 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
217
- 2024-04-12 07:38:08,121 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
218
- 2024-04-12 07:38:08,360 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
219
- 2024-04-12 07:38:09,361 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
220
- 2024-04-12 07:38:11,565 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
221
- 2024-04-12 07:38:12,513 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
222
- 2024-04-12 07:38:12,513 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
223
- 2024-04-12 07:38:12,514 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
224
- 2024-04-12 07:38:15,302 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: partial_history
225
- 2024-04-12 07:38:15,303 DEBUG SenderThread:334 [sender.py:send():379] send: history
226
- 2024-04-12 07:38:15,303 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: summary_record
227
- 2024-04-12 07:38:15,305 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
228
- 2024-04-12 07:38:15,363 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
229
- 2024-04-12 07:38:16,980 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
230
- 2024-04-12 07:38:17,364 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
 
1
+ 2024-04-12 07:40:44,508 INFO StreamThr :485 [internal.py:wandb_internal():86] W&B internal server running at pid: 485, started at: 2024-04-12 07:40:44.508065
2
+ 2024-04-12 07:40:44,510 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-12 07:40:44,782 INFO WriterThread:485 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240412_074044-widovcn3/run-widovcn3.wandb
4
+ 2024-04-12 07:40:44,782 DEBUG SenderThread:485 [sender.py:send():379] send: header
5
+ 2024-04-12 07:40:44,785 DEBUG SenderThread:485 [sender.py:send():379] send: run
6
+ 2024-04-12 07:40:44,894 INFO SenderThread:485 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240412_074044-widovcn3/files
7
+ 2024-04-12 07:40:44,895 INFO SenderThread:485 [sender.py:_start_run_threads():1124] run started: widovcn3 with start time 1712907644.507718
8
+ 2024-04-12 07:40:44,903 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-12 07:40:44,904 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-12 07:40:44,996 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-12 07:40:45,008 DEBUG HandlerThread:485 [system_info.py:__init__():26] System info init
12
+ 2024-04-12 07:40:45,008 DEBUG HandlerThread:485 [system_info.py:__init__():41] System info init done
13
+ 2024-04-12 07:40:45,008 INFO HandlerThread:485 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-12 07:40:45,008 INFO SystemMonitor:485 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-12 07:40:45,008 INFO HandlerThread:485 [system_monitor.py:probe():214] Collecting system info
16
+ 2024-04-12 07:40:45,009 INFO SystemMonitor:485 [interfaces.py:start():190] Started cpu monitoring
17
+ 2024-04-12 07:40:45,009 INFO SystemMonitor:485 [interfaces.py:start():190] Started disk monitoring
18
+ 2024-04-12 07:40:45,011 INFO SystemMonitor:485 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-12 07:40:45,012 INFO SystemMonitor:485 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-12 07:40:45,013 INFO SystemMonitor:485 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-12 07:40:45,023 DEBUG HandlerThread:485 [system_info.py:probe():150] Probing system
22
+ 2024-04-12 07:40:45,026 DEBUG HandlerThread:485 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-12 07:40:45,026 DEBUG HandlerThread:485 [system_info.py:probe():198] Probing system done
24
+ 2024-04-12 07:40:45,026 DEBUG HandlerThread:485 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-12T07:40:45.023659', 'startedAt': '2024-04-12T07:40:44.501264', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': 'e5a48bec8248', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.138, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.138, 'min': 0.0, 'max': 0.0}, {'current': 2000.138, 'min': 0.0, 'max': 0.0}, {'current': 2000.138, 'min': 0.0, 'max': 0.0}, {'current': 2000.138, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5565.782573699951}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-12 07:40:45,026 INFO HandlerThread:485 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-12 07:40:45,026 INFO HandlerThread:485 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-12 07:40:45,026 DEBUG HandlerThread:485 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-12 07:40:45,897 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/conda-environment.yaml
29
+ 2024-04-12 07:41:00,040 ERROR HandlerThread:485 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
  Traceback (most recent call last):
31
  File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
  subprocess.call(
 
37
  File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
  raise TimeoutExpired(self.args, timeout)
39
  subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-12 07:41:00,041 DEBUG HandlerThread:485 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-12 07:41:00,042 INFO HandlerThread:485 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-12 07:41:00,047 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-12 07:41:00,047 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-12 07:41:00,047 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-12 07:41:00,048 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-12 07:41:00,048 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-12 07:41:00,048 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: keepalive
48
+ 2024-04-12 07:41:00,048 DEBUG SenderThread:485 [sender.py:send():379] send: files
49
+ 2024-04-12 07:41:00,049 INFO SenderThread:485 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
+ 2024-04-12 07:41:00,245 INFO wandb-upload_0:485 [upload_job.py:push():131] Uploaded file /tmp/tmpqpxjmay6wandb/iki4gmew-wandb-metadata.json
51
+ 2024-04-12 07:41:00,900 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/wandb-metadata.json
52
+ 2024-04-12 07:41:01,034 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: python_packages
53
+ 2024-04-12 07:41:01,034 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: python_packages
54
+ 2024-04-12 07:41:01,038 DEBUG SenderThread:485 [sender.py:send():379] send: telemetry
55
+ 2024-04-12 07:41:01,043 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: stop_status
56
+ 2024-04-12 07:41:01,043 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: stop_status
57
+ 2024-04-12 07:41:01,051 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: internal_messages
58
+ 2024-04-12 07:41:01,102 DEBUG SenderThread:485 [sender.py:send():379] send: config
59
+ 2024-04-12 07:41:01,103 DEBUG SenderThread:485 [sender.py:send():379] send: metric
60
+ 2024-04-12 07:41:01,103 DEBUG SenderThread:485 [sender.py:send():379] send: telemetry
61
+ 2024-04-12 07:41:01,104 DEBUG SenderThread:485 [sender.py:send():379] send: metric
62
+ 2024-04-12 07:41:01,104 WARNING SenderThread:485 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
63
+ 2024-04-12 07:41:01,104 DEBUG SenderThread:485 [sender.py:send():379] send: telemetry
64
+ 2024-04-12 07:41:01,900 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/output.log
65
+ 2024-04-12 07:41:01,901 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/requirements.txt
66
+ 2024-04-12 07:41:03,901 INFO Thread-12 :485 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/output.log
67
+ 2024-04-12 07:41:05,568 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
68
+ 2024-04-12 07:41:10,569 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
69
+ 2024-04-12 07:41:15,575 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
70
+ 2024-04-12 07:41:15,907 INFO Thread-12 :485 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/config.yaml
71
+ 2024-04-12 07:41:16,035 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: stop_status
72
+ 2024-04-12 07:41:16,036 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: internal_messages
73
+ 2024-04-12 07:41:16,036 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: stop_status
74
+ 2024-04-12 07:41:21,106 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
75
+ 2024-04-12 07:41:22,063 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: partial_history
76
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: metric
77
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: metric
78
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: metric
79
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: metric
80
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: history
81
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: summary_record
82
+ 2024-04-12 07:41:22,068 INFO SenderThread:485 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
83
+ 2024-04-12 07:41:22,910 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/wandb-summary.json
84
+ 2024-04-12 07:41:23,910 INFO Thread-12 :485 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/output.log
85
+ 2024-04-12 07:41:26,797 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
wandb/debug.log CHANGED
@@ -1,31 +1,31 @@
1
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_setup.py:_flush():76] Configure stats pid to 289
3
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_setup.py:_flush():76] Applying login settings: {}
9
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/logs/debug.log
10
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/logs/debug-internal.log
11
- 2024-04-12 07:35:55,744 INFO MainThread:289 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7c2cd1fccd60>
12
- 2024-04-12 07:35:55,745 INFO MainThread:289 [wandb_init.py:init():567] calling init triggers
13
- 2024-04-12 07:35:55,745 INFO MainThread:289 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
  config: {}
15
- 2024-04-12 07:35:55,745 INFO MainThread:289 [wandb_init.py:init():617] starting backend
16
- 2024-04-12 07:35:55,745 INFO MainThread:289 [wandb_init.py:init():621] setting up manager
17
- 2024-04-12 07:35:55,747 INFO MainThread:289 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
- 2024-04-12 07:35:55,748 INFO MainThread:289 [wandb_init.py:init():629] backend started and connected
19
- 2024-04-12 07:35:55,761 INFO MainThread:289 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
- 2024-04-12 07:35:56,159 INFO MainThread:289 [wandb_init.py:init():721] updated telemetry
21
- 2024-04-12 07:35:56,162 INFO MainThread:289 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
- 2024-04-12 07:35:56,316 INFO MainThread:289 [wandb_run.py:_on_init():2344] communicating current version
23
- 2024-04-12 07:35:56,402 INFO MainThread:289 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
 
25
- 2024-04-12 07:35:56,403 INFO MainThread:289 [wandb_init.py:init():805] starting run threads in backend
26
- 2024-04-12 07:36:12,510 INFO MainThread:289 [wandb_run.py:_console_start():2323] atexit reg
27
- 2024-04-12 07:36:12,510 INFO MainThread:289 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
- 2024-04-12 07:36:12,511 INFO MainThread:289 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
- 2024-04-12 07:36:12,511 INFO MainThread:289 [wandb_run.py:_redirect():2268] Redirects installed.
30
- 2024-04-12 07:36:12,512 INFO MainThread:289 [wandb_init.py:init():848] run started, returning control to user process
31
- 2024-04-12 07:36:12,518 INFO MainThread:289 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 250880, 'hidden_size': 1536, 'n_layer': 24, 'n_head': 16, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'use_cache': False, 'pretraining_tp': 1, 'apply_residual_connection_post_layernorm': False, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'bos_token_id': 1, 'eos_token_id': 2, 'slow_but_exact': False, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'bfloat16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['BloomForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': 3, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'bigscience/bloomz-1b1', 'transformers_version': '4.39.3', 'attention_softmax_in_fp32': True, 'bias_dropout_fusion': True, 'unk_token_id': 0, 'masked_softmax_fusion': True, 'model_type': 'bloom', 'n_inner': None, 'offset_alibi': 100, 'seq_length': 2048, 'skip_bias_add': True, 'skip_bias_add_qkv': False, 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': True, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.0001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 5, 'max_steps': 20000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr12_07-35-52_e5a48bec8248', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 0, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_8bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': 'Femboyuwu2000/bloomz-1b1-vn-chat', 'hub_strategy': 'checkpoint', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
 
1
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Configure stats pid to 443
3
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240412_074044-widovcn3/logs/debug.log
10
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240412_074044-widovcn3/logs/debug-internal.log
11
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x79825186be20>
12
+ 2024-04-12 07:40:44,504 INFO MainThread:443 [wandb_init.py:init():567] calling init triggers
13
+ 2024-04-12 07:40:44,504 INFO MainThread:443 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
  config: {}
15
+ 2024-04-12 07:40:44,504 INFO MainThread:443 [wandb_init.py:init():617] starting backend
16
+ 2024-04-12 07:40:44,504 INFO MainThread:443 [wandb_init.py:init():621] setting up manager
17
+ 2024-04-12 07:40:44,506 INFO MainThread:443 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
+ 2024-04-12 07:40:44,507 INFO MainThread:443 [wandb_init.py:init():629] backend started and connected
19
+ 2024-04-12 07:40:44,521 INFO MainThread:443 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
+ 2024-04-12 07:40:44,781 INFO MainThread:443 [wandb_init.py:init():721] updated telemetry
21
+ 2024-04-12 07:40:44,784 INFO MainThread:443 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
+ 2024-04-12 07:40:44,902 INFO MainThread:443 [wandb_run.py:_on_init():2344] communicating current version
23
+ 2024-04-12 07:40:44,988 INFO MainThread:443 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
 
25
+ 2024-04-12 07:40:44,988 INFO MainThread:443 [wandb_init.py:init():805] starting run threads in backend
26
+ 2024-04-12 07:41:01,035 INFO MainThread:443 [wandb_run.py:_console_start():2323] atexit reg
27
+ 2024-04-12 07:41:01,035 INFO MainThread:443 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
+ 2024-04-12 07:41:01,036 INFO MainThread:443 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
+ 2024-04-12 07:41:01,036 INFO MainThread:443 [wandb_run.py:_redirect():2268] Redirects installed.
30
+ 2024-04-12 07:41:01,037 INFO MainThread:443 [wandb_init.py:init():848] run started, returning control to user process
31
+ 2024-04-12 07:41:01,044 INFO MainThread:443 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 250880, 'hidden_size': 1536, 'n_layer': 24, 'n_head': 16, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'use_cache': False, 'pretraining_tp': 1, 'apply_residual_connection_post_layernorm': False, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'bos_token_id': 1, 'eos_token_id': 2, 'slow_but_exact': False, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'bfloat16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['BloomForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': 3, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'bigscience/bloomz-1b1', 'transformers_version': '4.39.3', 'attention_softmax_in_fp32': True, 'bias_dropout_fusion': True, 'unk_token_id': 0, 'masked_softmax_fusion': True, 'model_type': 'bloom', 'n_inner': None, 'offset_alibi': 100, 'seq_length': 2048, 'skip_bias_add': True, 'skip_bias_add_qkv': False, 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': True, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.0001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 5, 'max_steps': 20000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr12_07-40-41_e5a48bec8248', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 0, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_8bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': 'Femboyuwu2000/bloomz-1b1-vn-chat', 'hub_strategy': 'checkpoint', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
wandb/run-20240412_073555-bw7oy9ix/files/output.log CHANGED
@@ -35,3 +35,42 @@
35
  /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
36
  warnings.warn(
37
  /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
36
  warnings.warn(
37
  /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
38
+ warnings.warn(
39
+ trainable params: 2,019,328 || all params: 1,067,333,632 || trainable%: 0.18919370096266205
40
+ PeftModelForCausalLM(
41
+ (base_model): LoraModel(
42
+ (model): BloomForCausalLM(
43
+ (transformer): BloomModel(
44
+ (word_embeddings): lora.Embedding(
45
+ (base_layer): Embedding(250880, 1536)
46
+ (lora_dropout): ModuleDict(
47
+ (default): Dropout(p=0.01, inplace=False)
48
+ )
49
+ (lora_A): ModuleDict()
50
+ (lora_B): ModuleDict()
51
+ (lora_embedding_A): ParameterDict( (default): Parameter containing: [torch.cuda.FloatTensor of size 8x250880 (cuda:0)])
52
+ (lora_embedding_B): ParameterDict( (default): Parameter containing: [torch.cuda.FloatTensor of size 1536x8 (cuda:0)])
53
+ )
54
+ (word_embeddings_layernorm): LayerNorm((1536,), eps=1e-05, elementwise_affine=True)
55
+ (h): ModuleList(
56
+ (0-23): 24 x BloomBlock(
57
+ (input_layernorm): LayerNorm((1536,), eps=1e-05, elementwise_affine=True)
58
+ (self_attention): BloomAttention(
59
+ (query_key_value): Linear4bit(in_features=1536, out_features=4608, bias=True)
60
+ (dense): Linear4bit(in_features=1536, out_features=1536, bias=True)
61
+ (attention_dropout): Dropout(p=0.0, inplace=False)
62
+ )
63
+ (post_attention_layernorm): LayerNorm((1536,), eps=1e-05, elementwise_affine=True)
64
+ (mlp): BloomMLP(
65
+ (dense_h_to_4h): Linear4bit(in_features=1536, out_features=6144, bias=True)
66
+ (gelu_impl): BloomGelu()
67
+ (dense_4h_to_h): Linear4bit(in_features=6144, out_features=1536, bias=True)
68
+ )
69
+ )
70
+ )
71
+ (ln_f): LayerNorm((1536,), eps=1e-05, elementwise_affine=True)
72
+ )
73
+ (lm_head): Linear(in_features=1536, out_features=250880, bias=False)
74
+ )
75
+ )
76
+ )
wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json CHANGED
@@ -1 +1 @@
1
- {"train/loss": 4.3795, "train/grad_norm": 78.24981689453125, "train/learning_rate": 1.8e-05, "train/epoch": 0.01, "train/global_step": 360, "_timestamp": 1712907495.3015952, "_runtime": 139.55270910263062, "_step": 17}
 
1
+ {"train/loss": 4.3795, "train/grad_norm": 78.24981689453125, "train/learning_rate": 1.8e-05, "train/epoch": 0.01, "train/global_step": 360, "_timestamp": 1712907495.3015952, "_runtime": 139.55270910263062, "_step": 17, "_wandb": {"runtime": 146}}
wandb/run-20240412_073555-bw7oy9ix/logs/debug-internal.log CHANGED
@@ -228,3 +228,203 @@ subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after
228
  2024-04-12 07:38:15,363 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
229
  2024-04-12 07:38:16,980 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
230
  2024-04-12 07:38:17,364 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
228
  2024-04-12 07:38:15,363 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
229
  2024-04-12 07:38:16,980 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
230
  2024-04-12 07:38:17,364 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
231
+ 2024-04-12 07:38:18,991 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: pause
232
+ 2024-04-12 07:38:18,991 INFO HandlerThread:334 [handler.py:handle_request_pause():708] stopping system metrics thread
233
+ 2024-04-12 07:38:18,991 INFO HandlerThread:334 [system_monitor.py:finish():203] Stopping system monitor
234
+ 2024-04-12 07:38:18,991 DEBUG SystemMonitor:334 [system_monitor.py:_start():179] Finished system metrics aggregation loop
235
+ 2024-04-12 07:38:18,991 DEBUG SystemMonitor:334 [system_monitor.py:_start():183] Publishing last batch of metrics
236
+ 2024-04-12 07:38:18,993 INFO HandlerThread:334 [interfaces.py:finish():202] Joined cpu monitor
237
+ 2024-04-12 07:38:18,993 INFO HandlerThread:334 [interfaces.py:finish():202] Joined disk monitor
238
+ 2024-04-12 07:38:19,003 INFO HandlerThread:334 [interfaces.py:finish():202] Joined gpu monitor
239
+ 2024-04-12 07:38:19,004 INFO HandlerThread:334 [interfaces.py:finish():202] Joined memory monitor
240
+ 2024-04-12 07:38:19,004 INFO HandlerThread:334 [interfaces.py:finish():202] Joined network monitor
241
+ 2024-04-12 07:38:19,004 DEBUG SenderThread:334 [sender.py:send():379] send: stats
242
+ 2024-04-12 07:38:22,005 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
243
+ 2024-04-12 07:38:27,006 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
244
+ 2024-04-12 07:38:27,513 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
245
+ 2024-04-12 07:38:27,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
246
+ 2024-04-12 07:38:27,514 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
247
+ 2024-04-12 07:38:32,611 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
248
+ 2024-04-12 07:38:37,612 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
249
+ 2024-04-12 07:38:42,513 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
250
+ 2024-04-12 07:38:42,513 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
251
+ 2024-04-12 07:38:42,514 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
252
+ 2024-04-12 07:38:42,619 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
253
+ 2024-04-12 07:38:47,620 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
254
+ 2024-04-12 07:38:52,621 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
255
+ 2024-04-12 07:38:57,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
256
+ 2024-04-12 07:38:57,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
257
+ 2024-04-12 07:38:57,514 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
258
+ 2024-04-12 07:38:58,590 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
259
+ 2024-04-12 07:39:03,591 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
260
+ 2024-04-12 07:39:06,814 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: resume
261
+ 2024-04-12 07:39:06,814 INFO HandlerThread:334 [handler.py:handle_request_resume():699] starting system metrics thread
262
+ 2024-04-12 07:39:06,814 INFO HandlerThread:334 [system_monitor.py:start():194] Starting system monitor
263
+ 2024-04-12 07:39:06,814 INFO SystemMonitor:334 [system_monitor.py:_start():158] Starting system asset monitoring threads
264
+ 2024-04-12 07:39:06,815 INFO SystemMonitor:334 [interfaces.py:start():190] Started cpu monitoring
265
+ 2024-04-12 07:39:06,815 INFO SystemMonitor:334 [interfaces.py:start():190] Started disk monitoring
266
+ 2024-04-12 07:39:06,818 INFO SystemMonitor:334 [interfaces.py:start():190] Started gpu monitoring
267
+ 2024-04-12 07:39:06,818 INFO SystemMonitor:334 [interfaces.py:start():190] Started memory monitoring
268
+ 2024-04-12 07:39:06,819 INFO SystemMonitor:334 [interfaces.py:start():190] Started network monitoring
269
+ 2024-04-12 07:39:08,592 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
270
+ 2024-04-12 07:39:10,271 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: pause
271
+ 2024-04-12 07:39:10,271 INFO HandlerThread:334 [handler.py:handle_request_pause():708] stopping system metrics thread
272
+ 2024-04-12 07:39:10,271 INFO HandlerThread:334 [system_monitor.py:finish():203] Stopping system monitor
273
+ 2024-04-12 07:39:10,271 DEBUG SystemMonitor:334 [system_monitor.py:_start():172] Starting system metrics aggregation loop
274
+ 2024-04-12 07:39:10,272 DEBUG SystemMonitor:334 [system_monitor.py:_start():179] Finished system metrics aggregation loop
275
+ 2024-04-12 07:39:10,272 DEBUG SystemMonitor:334 [system_monitor.py:_start():183] Publishing last batch of metrics
276
+ 2024-04-12 07:39:10,275 INFO HandlerThread:334 [interfaces.py:finish():202] Joined cpu monitor
277
+ 2024-04-12 07:39:10,276 INFO HandlerThread:334 [interfaces.py:finish():202] Joined disk monitor
278
+ 2024-04-12 07:39:10,283 INFO HandlerThread:334 [interfaces.py:finish():202] Joined gpu monitor
279
+ 2024-04-12 07:39:10,283 INFO HandlerThread:334 [interfaces.py:finish():202] Joined memory monitor
280
+ 2024-04-12 07:39:10,283 INFO HandlerThread:334 [interfaces.py:finish():202] Joined network monitor
281
+ 2024-04-12 07:39:10,284 DEBUG SenderThread:334 [sender.py:send():379] send: stats
282
+ 2024-04-12 07:39:12,386 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
283
+ 2024-04-12 07:39:12,513 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
284
+ 2024-04-12 07:39:12,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
285
+ 2024-04-12 07:39:12,514 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
286
+ 2024-04-12 07:39:14,549 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
287
+ 2024-04-12 07:39:19,549 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
288
+ 2024-04-12 07:39:24,550 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
289
+ 2024-04-12 07:39:27,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
290
+ 2024-04-12 07:39:27,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
291
+ 2024-04-12 07:39:27,514 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
292
+ 2024-04-12 07:39:29,632 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
293
+ 2024-04-12 07:39:34,633 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
294
+ 2024-04-12 07:39:39,634 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
295
+ 2024-04-12 07:39:42,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
296
+ 2024-04-12 07:39:42,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
297
+ 2024-04-12 07:39:42,515 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
298
+ 2024-04-12 07:39:45,598 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
299
+ 2024-04-12 07:39:50,599 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
300
+ 2024-04-12 07:39:55,601 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
301
+ 2024-04-12 07:39:57,514 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: stop_status
302
+ 2024-04-12 07:39:57,515 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
303
+ 2024-04-12 07:39:57,515 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: stop_status
304
+ 2024-04-12 07:40:00,633 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
305
+ 2024-04-12 07:40:02,812 DEBUG SenderThread:334 [sender.py:send():379] send: exit
306
+ 2024-04-12 07:40:02,812 INFO SenderThread:334 [sender.py:send_exit():586] handling exit code: 0
307
+ 2024-04-12 07:40:02,812 INFO SenderThread:334 [sender.py:send_exit():588] handling runtime: 146
308
+ 2024-04-12 07:40:02,814 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
309
+ 2024-04-12 07:40:02,815 INFO SenderThread:334 [sender.py:send_exit():594] send defer
310
+ 2024-04-12 07:40:02,815 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
311
+ 2024-04-12 07:40:02,815 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 0
312
+ 2024-04-12 07:40:02,815 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
313
+ 2024-04-12 07:40:02,816 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 0
314
+ 2024-04-12 07:40:02,816 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 1
315
+ 2024-04-12 07:40:02,816 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
316
+ 2024-04-12 07:40:02,816 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 1
317
+ 2024-04-12 07:40:02,816 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
318
+ 2024-04-12 07:40:02,816 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 1
319
+ 2024-04-12 07:40:02,816 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 2
320
+ 2024-04-12 07:40:02,816 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
321
+ 2024-04-12 07:40:02,816 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 2
322
+ 2024-04-12 07:40:02,817 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
323
+ 2024-04-12 07:40:02,817 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 2
324
+ 2024-04-12 07:40:02,817 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 3
325
+ 2024-04-12 07:40:02,817 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
326
+ 2024-04-12 07:40:02,817 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 3
327
+ 2024-04-12 07:40:02,817 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
328
+ 2024-04-12 07:40:02,817 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 3
329
+ 2024-04-12 07:40:02,817 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 4
330
+ 2024-04-12 07:40:02,817 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
331
+ 2024-04-12 07:40:02,817 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 4
332
+ 2024-04-12 07:40:02,818 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
333
+ 2024-04-12 07:40:02,818 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 4
334
+ 2024-04-12 07:40:02,818 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 5
335
+ 2024-04-12 07:40:02,818 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
336
+ 2024-04-12 07:40:02,818 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 5
337
+ 2024-04-12 07:40:02,818 DEBUG SenderThread:334 [sender.py:send():379] send: summary
338
+ 2024-04-12 07:40:02,819 INFO SenderThread:334 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
339
+ 2024-04-12 07:40:02,819 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
340
+ 2024-04-12 07:40:02,819 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 5
341
+ 2024-04-12 07:40:02,819 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 6
342
+ 2024-04-12 07:40:02,819 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
343
+ 2024-04-12 07:40:02,819 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 6
344
+ 2024-04-12 07:40:02,819 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
345
+ 2024-04-12 07:40:02,819 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 6
346
+ 2024-04-12 07:40:02,819 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 7
347
+ 2024-04-12 07:40:02,820 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: status_report
348
+ 2024-04-12 07:40:02,820 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
349
+ 2024-04-12 07:40:02,820 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 7
350
+ 2024-04-12 07:40:02,820 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
351
+ 2024-04-12 07:40:02,820 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 7
352
+ 2024-04-12 07:40:03,407 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
353
+ 2024-04-12 07:40:03,812 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: poll_exit
354
+ 2024-04-12 07:40:04,329 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 8
355
+ 2024-04-12 07:40:04,329 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: poll_exit
356
+ 2024-04-12 07:40:04,330 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
357
+ 2024-04-12 07:40:04,330 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 8
358
+ 2024-04-12 07:40:04,330 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
359
+ 2024-04-12 07:40:04,330 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 8
360
+ 2024-04-12 07:40:04,330 INFO SenderThread:334 [job_builder.py:build():318] Attempting to build job artifact
361
+ 2024-04-12 07:40:04,332 INFO SenderThread:334 [job_builder.py:_get_source_type():466] no source found
362
+ 2024-04-12 07:40:04,333 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 9
363
+ 2024-04-12 07:40:04,333 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
364
+ 2024-04-12 07:40:04,333 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 9
365
+ 2024-04-12 07:40:04,333 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
366
+ 2024-04-12 07:40:04,333 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 9
367
+ 2024-04-12 07:40:04,333 INFO SenderThread:334 [dir_watcher.py:finish():358] shutting down directory watcher
368
+ 2024-04-12 07:40:04,407 INFO Thread-12 :334 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
369
+ 2024-04-12 07:40:04,408 INFO SenderThread:334 [dir_watcher.py:finish():388] scan: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files
370
+ 2024-04-12 07:40:04,408 INFO SenderThread:334 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/conda-environment.yaml conda-environment.yaml
371
+ 2024-04-12 07:40:04,408 INFO SenderThread:334 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/requirements.txt requirements.txt
372
+ 2024-04-12 07:40:04,408 INFO SenderThread:334 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log output.log
373
+ 2024-04-12 07:40:04,412 INFO SenderThread:334 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json wandb-summary.json
374
+ 2024-04-12 07:40:04,412 INFO SenderThread:334 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/config.yaml config.yaml
375
+ 2024-04-12 07:40:04,415 INFO SenderThread:334 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-metadata.json wandb-metadata.json
376
+ 2024-04-12 07:40:04,415 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 10
377
+ 2024-04-12 07:40:04,419 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
378
+ 2024-04-12 07:40:04,422 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 10
379
+ 2024-04-12 07:40:04,422 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
380
+ 2024-04-12 07:40:04,422 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 10
381
+ 2024-04-12 07:40:04,422 INFO SenderThread:334 [file_pusher.py:finish():172] shutting down file pusher
382
+ 2024-04-12 07:40:04,598 INFO wandb-upload_0:334 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/requirements.txt
383
+ 2024-04-12 07:40:04,625 INFO wandb-upload_3:334 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/config.yaml
384
+ 2024-04-12 07:40:04,641 INFO wandb-upload_1:334 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/output.log
385
+ 2024-04-12 07:40:04,674 INFO wandb-upload_2:334 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/files/wandb-summary.json
386
+ 2024-04-12 07:40:04,814 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: poll_exit
387
+ 2024-04-12 07:40:04,814 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: poll_exit
388
+ 2024-04-12 07:40:04,875 INFO Thread-11 (_thread_body):334 [sender.py:transition_state():614] send defer: 11
389
+ 2024-04-12 07:40:04,875 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
390
+ 2024-04-12 07:40:04,875 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 11
391
+ 2024-04-12 07:40:04,875 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
392
+ 2024-04-12 07:40:04,876 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 11
393
+ 2024-04-12 07:40:04,876 INFO SenderThread:334 [file_pusher.py:join():178] waiting for file pusher
394
+ 2024-04-12 07:40:04,876 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 12
395
+ 2024-04-12 07:40:04,876 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
396
+ 2024-04-12 07:40:04,876 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 12
397
+ 2024-04-12 07:40:04,876 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
398
+ 2024-04-12 07:40:04,877 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 12
399
+ 2024-04-12 07:40:04,877 INFO SenderThread:334 [file_stream.py:finish():614] file stream finish called
400
+ 2024-04-12 07:40:04,936 INFO SenderThread:334 [file_stream.py:finish():618] file stream finish is done
401
+ 2024-04-12 07:40:04,936 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 13
402
+ 2024-04-12 07:40:04,936 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
403
+ 2024-04-12 07:40:04,936 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 13
404
+ 2024-04-12 07:40:04,937 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
405
+ 2024-04-12 07:40:04,937 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 13
406
+ 2024-04-12 07:40:04,937 INFO SenderThread:334 [sender.py:transition_state():614] send defer: 14
407
+ 2024-04-12 07:40:04,937 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: defer
408
+ 2024-04-12 07:40:04,937 INFO HandlerThread:334 [handler.py:handle_request_defer():172] handle defer: 14
409
+ 2024-04-12 07:40:04,938 DEBUG SenderThread:334 [sender.py:send():379] send: final
410
+ 2024-04-12 07:40:04,938 DEBUG SenderThread:334 [sender.py:send():379] send: footer
411
+ 2024-04-12 07:40:04,938 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: defer
412
+ 2024-04-12 07:40:04,938 INFO SenderThread:334 [sender.py:send_request_defer():610] handle sender defer: 14
413
+ 2024-04-12 07:40:04,939 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: poll_exit
414
+ 2024-04-12 07:40:04,939 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: poll_exit
415
+ 2024-04-12 07:40:04,940 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: poll_exit
416
+ 2024-04-12 07:40:04,940 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: poll_exit
417
+ 2024-04-12 07:40:04,940 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: server_info
418
+ 2024-04-12 07:40:04,941 DEBUG SenderThread:334 [sender.py:send_request():406] send_request: server_info
419
+ 2024-04-12 07:40:04,944 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: get_summary
420
+ 2024-04-12 07:40:04,944 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: sampled_history
421
+ 2024-04-12 07:40:04,945 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: internal_messages
422
+ 2024-04-12 07:40:04,960 INFO MainThread:334 [wandb_run.py:_footer_history_summary_info():3920] rendering history
423
+ 2024-04-12 07:40:04,961 INFO MainThread:334 [wandb_run.py:_footer_history_summary_info():3952] rendering summary
424
+ 2024-04-12 07:40:04,961 INFO MainThread:334 [wandb_run.py:_footer_sync_info():3879] logging synced files
425
+ 2024-04-12 07:40:04,962 DEBUG HandlerThread:334 [handler.py:handle_request():146] handle_request: shutdown
426
+ 2024-04-12 07:40:04,962 INFO HandlerThread:334 [handler.py:finish():866] shutting down handler
427
+ 2024-04-12 07:40:05,941 INFO WriterThread:334 [datastore.py:close():296] close: /kaggle/working/wandb/run-20240412_073555-bw7oy9ix/run-bw7oy9ix.wandb
428
+ 2024-04-12 07:40:05,960 INFO SenderThread:334 [sender.py:finish():1546] shutting down sender
429
+ 2024-04-12 07:40:05,960 INFO SenderThread:334 [file_pusher.py:finish():172] shutting down file pusher
430
+ 2024-04-12 07:40:05,960 INFO SenderThread:334 [file_pusher.py:join():178] waiting for file pusher
wandb/run-20240412_073555-bw7oy9ix/logs/debug.log CHANGED
@@ -29,3 +29,8 @@ config: {}
29
  2024-04-12 07:36:12,511 INFO MainThread:289 [wandb_run.py:_redirect():2268] Redirects installed.
30
  2024-04-12 07:36:12,512 INFO MainThread:289 [wandb_init.py:init():848] run started, returning control to user process
31
  2024-04-12 07:36:12,518 INFO MainThread:289 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 250880, 'hidden_size': 1536, 'n_layer': 24, 'n_head': 16, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'use_cache': False, 'pretraining_tp': 1, 'apply_residual_connection_post_layernorm': False, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'bos_token_id': 1, 'eos_token_id': 2, 'slow_but_exact': False, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'bfloat16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['BloomForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': 3, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'bigscience/bloomz-1b1', 'transformers_version': '4.39.3', 'attention_softmax_in_fp32': True, 'bias_dropout_fusion': True, 'unk_token_id': 0, 'masked_softmax_fusion': True, 'model_type': 'bloom', 'n_inner': None, 'offset_alibi': 100, 'seq_length': 2048, 'skip_bias_add': True, 'skip_bias_add_qkv': False, 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': True, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.0001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 5, 'max_steps': 20000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr12_07-35-52_e5a48bec8248', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 0, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_8bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': 'Femboyuwu2000/bloomz-1b1-vn-chat', 'hub_strategy': 'checkpoint', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
 
 
 
 
 
 
29
  2024-04-12 07:36:12,511 INFO MainThread:289 [wandb_run.py:_redirect():2268] Redirects installed.
30
  2024-04-12 07:36:12,512 INFO MainThread:289 [wandb_init.py:init():848] run started, returning control to user process
31
  2024-04-12 07:36:12,518 INFO MainThread:289 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 250880, 'hidden_size': 1536, 'n_layer': 24, 'n_head': 16, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'use_cache': False, 'pretraining_tp': 1, 'apply_residual_connection_post_layernorm': False, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'bos_token_id': 1, 'eos_token_id': 2, 'slow_but_exact': False, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'bfloat16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['BloomForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': 3, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'bigscience/bloomz-1b1', 'transformers_version': '4.39.3', 'attention_softmax_in_fp32': True, 'bias_dropout_fusion': True, 'unk_token_id': 0, 'masked_softmax_fusion': True, 'model_type': 'bloom', 'n_inner': None, 'offset_alibi': 100, 'seq_length': 2048, 'skip_bias_add': True, 'skip_bias_add_qkv': False, 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': True, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.0001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 5, 'max_steps': 20000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr12_07-35-52_e5a48bec8248', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 0, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_8bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': 'Femboyuwu2000/bloomz-1b1-vn-chat', 'hub_strategy': 'checkpoint', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
32
+ 2024-04-12 07:38:18,990 INFO MainThread:289 [jupyter.py:save_ipynb():373] not saving jupyter notebook
33
+ 2024-04-12 07:38:18,990 INFO MainThread:289 [wandb_init.py:_pause_backend():438] pausing backend
34
+ 2024-04-12 07:39:06,813 INFO MainThread:289 [wandb_init.py:_resume_backend():443] resuming backend
35
+ 2024-04-12 07:39:10,270 INFO MainThread:289 [jupyter.py:save_ipynb():373] not saving jupyter notebook
36
+ 2024-04-12 07:39:10,271 INFO MainThread:289 [wandb_init.py:_pause_backend():438] pausing backend
wandb/run-20240412_073555-bw7oy9ix/run-bw7oy9ix.wandb CHANGED
Binary files a/wandb/run-20240412_073555-bw7oy9ix/run-bw7oy9ix.wandb and b/wandb/run-20240412_073555-bw7oy9ix/run-bw7oy9ix.wandb differ
 
wandb/run-20240412_074044-widovcn3/files/conda-environment.yaml ADDED
File without changes
wandb/run-20240412_074044-widovcn3/files/config.yaml ADDED
@@ -0,0 +1,685 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.10.13
7
+ cli_version: 0.16.5
8
+ framework: huggingface
9
+ huggingface_version: 4.39.3
10
+ is_jupyter_run: true
11
+ is_kaggle_kernel: true
12
+ start_time: 1712907644.0
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 84
27
+ - 98
28
+ - 105
29
+ 2:
30
+ - 1
31
+ - 2
32
+ - 3
33
+ - 5
34
+ - 11
35
+ - 12
36
+ - 49
37
+ - 51
38
+ - 53
39
+ - 55
40
+ - 71
41
+ - 84
42
+ - 98
43
+ - 105
44
+ 3:
45
+ - 7
46
+ - 23
47
+ 4: 3.10.13
48
+ 5: 0.16.5
49
+ 6: 4.39.3
50
+ 8:
51
+ - 1
52
+ - 2
53
+ - 5
54
+ 9:
55
+ 1: transformers_trainer
56
+ 13: linux-x86_64
57
+ m:
58
+ - 1: train/global_step
59
+ 6:
60
+ - 3
61
+ vocab_size:
62
+ desc: null
63
+ value: 250880
64
+ hidden_size:
65
+ desc: null
66
+ value: 1536
67
+ n_layer:
68
+ desc: null
69
+ value: 24
70
+ n_head:
71
+ desc: null
72
+ value: 16
73
+ layer_norm_epsilon:
74
+ desc: null
75
+ value: 1.0e-05
76
+ initializer_range:
77
+ desc: null
78
+ value: 0.02
79
+ use_cache:
80
+ desc: null
81
+ value: false
82
+ pretraining_tp:
83
+ desc: null
84
+ value: 1
85
+ apply_residual_connection_post_layernorm:
86
+ desc: null
87
+ value: false
88
+ hidden_dropout:
89
+ desc: null
90
+ value: 0.0
91
+ attention_dropout:
92
+ desc: null
93
+ value: 0.0
94
+ bos_token_id:
95
+ desc: null
96
+ value: 1
97
+ eos_token_id:
98
+ desc: null
99
+ value: 2
100
+ slow_but_exact:
101
+ desc: null
102
+ value: false
103
+ return_dict:
104
+ desc: null
105
+ value: true
106
+ output_hidden_states:
107
+ desc: null
108
+ value: false
109
+ output_attentions:
110
+ desc: null
111
+ value: false
112
+ torchscript:
113
+ desc: null
114
+ value: false
115
+ torch_dtype:
116
+ desc: null
117
+ value: bfloat16
118
+ use_bfloat16:
119
+ desc: null
120
+ value: false
121
+ tf_legacy_loss:
122
+ desc: null
123
+ value: false
124
+ pruned_heads:
125
+ desc: null
126
+ value: {}
127
+ tie_word_embeddings:
128
+ desc: null
129
+ value: true
130
+ chunk_size_feed_forward:
131
+ desc: null
132
+ value: 0
133
+ is_encoder_decoder:
134
+ desc: null
135
+ value: false
136
+ is_decoder:
137
+ desc: null
138
+ value: false
139
+ cross_attention_hidden_size:
140
+ desc: null
141
+ value: null
142
+ add_cross_attention:
143
+ desc: null
144
+ value: false
145
+ tie_encoder_decoder:
146
+ desc: null
147
+ value: false
148
+ max_length:
149
+ desc: null
150
+ value: 20
151
+ min_length:
152
+ desc: null
153
+ value: 0
154
+ do_sample:
155
+ desc: null
156
+ value: false
157
+ early_stopping:
158
+ desc: null
159
+ value: false
160
+ num_beams:
161
+ desc: null
162
+ value: 1
163
+ num_beam_groups:
164
+ desc: null
165
+ value: 1
166
+ diversity_penalty:
167
+ desc: null
168
+ value: 0.0
169
+ temperature:
170
+ desc: null
171
+ value: 1.0
172
+ top_k:
173
+ desc: null
174
+ value: 50
175
+ top_p:
176
+ desc: null
177
+ value: 1.0
178
+ typical_p:
179
+ desc: null
180
+ value: 1.0
181
+ repetition_penalty:
182
+ desc: null
183
+ value: 1.0
184
+ length_penalty:
185
+ desc: null
186
+ value: 1.0
187
+ no_repeat_ngram_size:
188
+ desc: null
189
+ value: 0
190
+ encoder_no_repeat_ngram_size:
191
+ desc: null
192
+ value: 0
193
+ bad_words_ids:
194
+ desc: null
195
+ value: null
196
+ num_return_sequences:
197
+ desc: null
198
+ value: 1
199
+ output_scores:
200
+ desc: null
201
+ value: false
202
+ return_dict_in_generate:
203
+ desc: null
204
+ value: false
205
+ forced_bos_token_id:
206
+ desc: null
207
+ value: null
208
+ forced_eos_token_id:
209
+ desc: null
210
+ value: null
211
+ remove_invalid_values:
212
+ desc: null
213
+ value: false
214
+ exponential_decay_length_penalty:
215
+ desc: null
216
+ value: null
217
+ suppress_tokens:
218
+ desc: null
219
+ value: null
220
+ begin_suppress_tokens:
221
+ desc: null
222
+ value: null
223
+ architectures:
224
+ desc: null
225
+ value:
226
+ - BloomForCausalLM
227
+ finetuning_task:
228
+ desc: null
229
+ value: null
230
+ id2label:
231
+ desc: null
232
+ value:
233
+ '0': LABEL_0
234
+ '1': LABEL_1
235
+ label2id:
236
+ desc: null
237
+ value:
238
+ LABEL_0: 0
239
+ LABEL_1: 1
240
+ tokenizer_class:
241
+ desc: null
242
+ value: null
243
+ prefix:
244
+ desc: null
245
+ value: null
246
+ pad_token_id:
247
+ desc: null
248
+ value: 3
249
+ sep_token_id:
250
+ desc: null
251
+ value: null
252
+ decoder_start_token_id:
253
+ desc: null
254
+ value: null
255
+ task_specific_params:
256
+ desc: null
257
+ value: null
258
+ problem_type:
259
+ desc: null
260
+ value: null
261
+ _name_or_path:
262
+ desc: null
263
+ value: bigscience/bloomz-1b1
264
+ transformers_version:
265
+ desc: null
266
+ value: 4.39.3
267
+ attention_softmax_in_fp32:
268
+ desc: null
269
+ value: true
270
+ bias_dropout_fusion:
271
+ desc: null
272
+ value: true
273
+ unk_token_id:
274
+ desc: null
275
+ value: 0
276
+ masked_softmax_fusion:
277
+ desc: null
278
+ value: true
279
+ model_type:
280
+ desc: null
281
+ value: bloom
282
+ n_inner:
283
+ desc: null
284
+ value: null
285
+ offset_alibi:
286
+ desc: null
287
+ value: 100
288
+ seq_length:
289
+ desc: null
290
+ value: 2048
291
+ skip_bias_add:
292
+ desc: null
293
+ value: true
294
+ skip_bias_add_qkv:
295
+ desc: null
296
+ value: false
297
+ quantization_config:
298
+ desc: null
299
+ value:
300
+ quant_method: QuantizationMethod.BITS_AND_BYTES
301
+ _load_in_8bit: false
302
+ _load_in_4bit: true
303
+ llm_int8_threshold: 6.0
304
+ llm_int8_skip_modules: null
305
+ llm_int8_enable_fp32_cpu_offload: false
306
+ llm_int8_has_fp16_weight: false
307
+ bnb_4bit_quant_type: nf4
308
+ bnb_4bit_use_double_quant: true
309
+ bnb_4bit_compute_dtype: float16
310
+ bnb_4bit_quant_storage: uint8
311
+ load_in_4bit: true
312
+ load_in_8bit: false
313
+ output_dir:
314
+ desc: null
315
+ value: /kaggle/working/
316
+ overwrite_output_dir:
317
+ desc: null
318
+ value: false
319
+ do_train:
320
+ desc: null
321
+ value: false
322
+ do_eval:
323
+ desc: null
324
+ value: false
325
+ do_predict:
326
+ desc: null
327
+ value: false
328
+ evaluation_strategy:
329
+ desc: null
330
+ value: 'no'
331
+ prediction_loss_only:
332
+ desc: null
333
+ value: false
334
+ per_device_train_batch_size:
335
+ desc: null
336
+ value: 8
337
+ per_device_eval_batch_size:
338
+ desc: null
339
+ value: 8
340
+ per_gpu_train_batch_size:
341
+ desc: null
342
+ value: null
343
+ per_gpu_eval_batch_size:
344
+ desc: null
345
+ value: null
346
+ gradient_accumulation_steps:
347
+ desc: null
348
+ value: 1
349
+ eval_accumulation_steps:
350
+ desc: null
351
+ value: null
352
+ eval_delay:
353
+ desc: null
354
+ value: 0
355
+ learning_rate:
356
+ desc: null
357
+ value: 3.0e-05
358
+ weight_decay:
359
+ desc: null
360
+ value: 0.0001
361
+ adam_beta1:
362
+ desc: null
363
+ value: 0.9
364
+ adam_beta2:
365
+ desc: null
366
+ value: 0.999
367
+ adam_epsilon:
368
+ desc: null
369
+ value: 1.0e-08
370
+ max_grad_norm:
371
+ desc: null
372
+ value: 0.3
373
+ num_train_epochs:
374
+ desc: null
375
+ value: 5
376
+ max_steps:
377
+ desc: null
378
+ value: 20000
379
+ lr_scheduler_type:
380
+ desc: null
381
+ value: cosine
382
+ lr_scheduler_kwargs:
383
+ desc: null
384
+ value: {}
385
+ warmup_ratio:
386
+ desc: null
387
+ value: 0.03
388
+ warmup_steps:
389
+ desc: null
390
+ value: 0
391
+ log_level:
392
+ desc: null
393
+ value: passive
394
+ log_level_replica:
395
+ desc: null
396
+ value: warning
397
+ log_on_each_node:
398
+ desc: null
399
+ value: true
400
+ logging_dir:
401
+ desc: null
402
+ value: /kaggle/working/runs/Apr12_07-40-41_e5a48bec8248
403
+ logging_strategy:
404
+ desc: null
405
+ value: steps
406
+ logging_first_step:
407
+ desc: null
408
+ value: false
409
+ logging_steps:
410
+ desc: null
411
+ value: 20
412
+ logging_nan_inf_filter:
413
+ desc: null
414
+ value: true
415
+ save_strategy:
416
+ desc: null
417
+ value: steps
418
+ save_steps:
419
+ desc: null
420
+ value: 20
421
+ save_total_limit:
422
+ desc: null
423
+ value: 1
424
+ save_safetensors:
425
+ desc: null
426
+ value: true
427
+ save_on_each_node:
428
+ desc: null
429
+ value: false
430
+ save_only_model:
431
+ desc: null
432
+ value: false
433
+ no_cuda:
434
+ desc: null
435
+ value: false
436
+ use_cpu:
437
+ desc: null
438
+ value: false
439
+ use_mps_device:
440
+ desc: null
441
+ value: false
442
+ seed:
443
+ desc: null
444
+ value: 42
445
+ data_seed:
446
+ desc: null
447
+ value: null
448
+ jit_mode_eval:
449
+ desc: null
450
+ value: false
451
+ use_ipex:
452
+ desc: null
453
+ value: false
454
+ bf16:
455
+ desc: null
456
+ value: false
457
+ fp16:
458
+ desc: null
459
+ value: false
460
+ fp16_opt_level:
461
+ desc: null
462
+ value: O1
463
+ half_precision_backend:
464
+ desc: null
465
+ value: auto
466
+ bf16_full_eval:
467
+ desc: null
468
+ value: false
469
+ fp16_full_eval:
470
+ desc: null
471
+ value: false
472
+ tf32:
473
+ desc: null
474
+ value: null
475
+ local_rank:
476
+ desc: null
477
+ value: 0
478
+ ddp_backend:
479
+ desc: null
480
+ value: null
481
+ tpu_num_cores:
482
+ desc: null
483
+ value: null
484
+ tpu_metrics_debug:
485
+ desc: null
486
+ value: false
487
+ debug:
488
+ desc: null
489
+ value: []
490
+ dataloader_drop_last:
491
+ desc: null
492
+ value: false
493
+ eval_steps:
494
+ desc: null
495
+ value: null
496
+ dataloader_num_workers:
497
+ desc: null
498
+ value: 0
499
+ dataloader_prefetch_factor:
500
+ desc: null
501
+ value: null
502
+ past_index:
503
+ desc: null
504
+ value: -1
505
+ run_name:
506
+ desc: null
507
+ value: /kaggle/working/
508
+ disable_tqdm:
509
+ desc: null
510
+ value: false
511
+ remove_unused_columns:
512
+ desc: null
513
+ value: true
514
+ label_names:
515
+ desc: null
516
+ value: null
517
+ load_best_model_at_end:
518
+ desc: null
519
+ value: false
520
+ metric_for_best_model:
521
+ desc: null
522
+ value: null
523
+ greater_is_better:
524
+ desc: null
525
+ value: null
526
+ ignore_data_skip:
527
+ desc: null
528
+ value: false
529
+ fsdp:
530
+ desc: null
531
+ value: []
532
+ fsdp_min_num_params:
533
+ desc: null
534
+ value: 0
535
+ fsdp_config:
536
+ desc: null
537
+ value:
538
+ min_num_params: 0
539
+ xla: false
540
+ xla_fsdp_v2: false
541
+ xla_fsdp_grad_ckpt: false
542
+ fsdp_transformer_layer_cls_to_wrap:
543
+ desc: null
544
+ value: null
545
+ accelerator_config:
546
+ desc: null
547
+ value:
548
+ split_batches: false
549
+ dispatch_batches: null
550
+ even_batches: true
551
+ use_seedable_sampler: true
552
+ deepspeed:
553
+ desc: null
554
+ value: null
555
+ label_smoothing_factor:
556
+ desc: null
557
+ value: 0.0
558
+ optim:
559
+ desc: null
560
+ value: paged_adamw_8bit
561
+ optim_args:
562
+ desc: null
563
+ value: null
564
+ adafactor:
565
+ desc: null
566
+ value: false
567
+ group_by_length:
568
+ desc: null
569
+ value: false
570
+ length_column_name:
571
+ desc: null
572
+ value: length
573
+ report_to:
574
+ desc: null
575
+ value:
576
+ - tensorboard
577
+ - wandb
578
+ ddp_find_unused_parameters:
579
+ desc: null
580
+ value: null
581
+ ddp_bucket_cap_mb:
582
+ desc: null
583
+ value: null
584
+ ddp_broadcast_buffers:
585
+ desc: null
586
+ value: null
587
+ dataloader_pin_memory:
588
+ desc: null
589
+ value: true
590
+ dataloader_persistent_workers:
591
+ desc: null
592
+ value: false
593
+ skip_memory_metrics:
594
+ desc: null
595
+ value: true
596
+ use_legacy_prediction_loop:
597
+ desc: null
598
+ value: false
599
+ push_to_hub:
600
+ desc: null
601
+ value: true
602
+ resume_from_checkpoint:
603
+ desc: null
604
+ value: null
605
+ hub_model_id:
606
+ desc: null
607
+ value: Femboyuwu2000/bloomz-1b1-vn-chat
608
+ hub_strategy:
609
+ desc: null
610
+ value: checkpoint
611
+ hub_token:
612
+ desc: null
613
+ value: <HUB_TOKEN>
614
+ hub_private_repo:
615
+ desc: null
616
+ value: false
617
+ hub_always_push:
618
+ desc: null
619
+ value: false
620
+ gradient_checkpointing:
621
+ desc: null
622
+ value: true
623
+ gradient_checkpointing_kwargs:
624
+ desc: null
625
+ value: null
626
+ include_inputs_for_metrics:
627
+ desc: null
628
+ value: false
629
+ fp16_backend:
630
+ desc: null
631
+ value: auto
632
+ push_to_hub_model_id:
633
+ desc: null
634
+ value: null
635
+ push_to_hub_organization:
636
+ desc: null
637
+ value: null
638
+ push_to_hub_token:
639
+ desc: null
640
+ value: <PUSH_TO_HUB_TOKEN>
641
+ mp_parameters:
642
+ desc: null
643
+ value: ''
644
+ auto_find_batch_size:
645
+ desc: null
646
+ value: false
647
+ full_determinism:
648
+ desc: null
649
+ value: false
650
+ torchdynamo:
651
+ desc: null
652
+ value: null
653
+ ray_scope:
654
+ desc: null
655
+ value: last
656
+ ddp_timeout:
657
+ desc: null
658
+ value: 1800
659
+ torch_compile:
660
+ desc: null
661
+ value: false
662
+ torch_compile_backend:
663
+ desc: null
664
+ value: null
665
+ torch_compile_mode:
666
+ desc: null
667
+ value: null
668
+ dispatch_batches:
669
+ desc: null
670
+ value: null
671
+ split_batches:
672
+ desc: null
673
+ value: null
674
+ include_tokens_per_second:
675
+ desc: null
676
+ value: false
677
+ include_num_input_tokens_seen:
678
+ desc: null
679
+ value: false
680
+ neftune_noise_alpha:
681
+ desc: null
682
+ value: null
683
+ optim_target_modules:
684
+ desc: null
685
+ value: null
wandb/run-20240412_074044-widovcn3/files/output.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
2
+ warnings.warn(
3
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
wandb/run-20240412_074044-widovcn3/files/requirements.txt ADDED
@@ -0,0 +1,864 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Babel==2.14.0
2
+ Boruta==0.3
3
+ Brotli==1.0.9
4
+ CVXcanon==0.1.2
5
+ Cartopy==0.22.0
6
+ Cython==3.0.8
7
+ Deprecated==1.2.14
8
+ Farama-Notifications==0.0.4
9
+ Flask==3.0.2
10
+ Geohash==1.0
11
+ GitPython==3.1.41
12
+ ImageHash==4.3.1
13
+ Janome==0.5.0
14
+ Jinja2==3.1.2
15
+ LunarCalendar==0.0.9
16
+ Mako==1.3.2
17
+ Markdown==3.5.2
18
+ MarkupSafe==2.1.3
19
+ MarkupSafe==2.1.5
20
+ Pillow==9.5.0
21
+ PuLP==2.8.0
22
+ PyArabic==0.6.15
23
+ PyJWT==2.8.0
24
+ PyMeeus==0.5.12
25
+ PySocks==1.7.1
26
+ PyUpSet==0.1.1.post7
27
+ PyWavelets==1.5.0
28
+ PyYAML==6.0.1
29
+ Pygments==2.17.2
30
+ Pympler==1.0.1
31
+ QtPy==2.4.1
32
+ Rtree==1.2.0
33
+ SQLAlchemy==2.0.25
34
+ SecretStorage==3.3.3
35
+ Send2Trash==1.8.2
36
+ Shapely==1.8.5.post1
37
+ Shimmy==1.3.0
38
+ SimpleITK==2.3.1
39
+ TPOT==0.12.1
40
+ Theano-PyMC==1.1.2
41
+ Theano==1.0.5
42
+ Wand==0.6.13
43
+ Werkzeug==3.0.2
44
+ absl-py==1.4.0
45
+ accelerate==0.28.0
46
+ access==1.1.9
47
+ affine==2.4.0
48
+ aiobotocore==2.12.2
49
+ aiofiles==22.1.0
50
+ aiohttp-cors==0.7.0
51
+ aiohttp==3.9.1
52
+ aioitertools==0.11.0
53
+ aiorwlock==1.3.0
54
+ aiosignal==1.3.1
55
+ aiosqlite==0.19.0
56
+ albumentations==1.4.0
57
+ alembic==1.13.1
58
+ altair==5.3.0
59
+ annotated-types==0.6.0
60
+ annoy==1.17.3
61
+ anyio==4.2.0
62
+ apache-beam==2.46.0
63
+ aplus==0.11.0
64
+ appdirs==1.4.4
65
+ archspec==0.2.3
66
+ argon2-cffi-bindings==21.2.0
67
+ argon2-cffi==23.1.0
68
+ array-record==0.5.0
69
+ arrow==1.3.0
70
+ arviz==0.17.1
71
+ astroid==3.1.0
72
+ astropy-iers-data==0.2024.4.1.0.33.14
73
+ astropy==6.0.1
74
+ asttokens==2.4.1
75
+ astunparse==1.6.3
76
+ async-lru==2.0.4
77
+ async-timeout==4.0.3
78
+ attrs==23.2.0
79
+ audioread==3.0.1
80
+ autopep8==2.0.4
81
+ backoff==2.2.1
82
+ bayesian-optimization==1.4.3
83
+ beatrix_jupyterlab==2023.128.151533
84
+ beautifulsoup4==4.12.2
85
+ bitsandbytes==0.43.1
86
+ blake3==0.2.1
87
+ bleach==6.1.0
88
+ blessed==1.20.0
89
+ blinker==1.7.0
90
+ blis==0.7.10
91
+ blosc2==2.6.0
92
+ bokeh==3.3.4
93
+ boltons==23.1.1
94
+ boto3==1.26.100
95
+ botocore==1.34.51
96
+ bq_helper==0.4.1
97
+ bqplot==0.12.43
98
+ branca==0.7.1
99
+ brewer2mpl==1.4.1
100
+ brotlipy==0.7.0
101
+ cached-property==1.5.2
102
+ cachetools==4.2.4
103
+ cachetools==5.3.2
104
+ catalogue==2.0.10
105
+ catalyst==22.4
106
+ catboost==1.2.3
107
+ category-encoders==2.6.3
108
+ certifi==2024.2.2
109
+ cesium==0.12.1
110
+ cffi==1.16.0
111
+ charset-normalizer==3.3.2
112
+ chex==0.1.86
113
+ cleverhans==4.0.0
114
+ click-plugins==1.1.1
115
+ click==8.1.7
116
+ cligj==0.7.2
117
+ cloud-tpu-client==0.10
118
+ cloud-tpu-profiler==2.4.0
119
+ cloudpathlib==0.16.0
120
+ cloudpickle==2.2.1
121
+ cloudpickle==3.0.0
122
+ cmdstanpy==1.2.2
123
+ colorama==0.4.6
124
+ colorcet==3.1.0
125
+ colorful==0.5.6
126
+ colorlog==6.8.2
127
+ colorlover==0.3.0
128
+ comm==0.2.1
129
+ conda-libmamba-solver==23.7.0
130
+ conda-package-handling==2.2.0
131
+ conda==23.7.4
132
+ conda_package_streaming==0.9.0
133
+ confection==0.1.4
134
+ contextily==1.6.0
135
+ contourpy==1.2.0
136
+ convertdate==2.4.0
137
+ crcmod==1.7
138
+ cryptography==41.0.7
139
+ cuda-python==12.4.0
140
+ cudf==23.8.0
141
+ cufflinks==0.17.3
142
+ cuml==23.8.0
143
+ cupy==13.0.0
144
+ cycler==0.12.1
145
+ cymem==2.0.8
146
+ cytoolz==0.12.3
147
+ daal4py==2024.2.0
148
+ daal==2024.2.0
149
+ dacite==1.8.1
150
+ dask-cuda==23.8.0
151
+ dask-cudf==23.8.0
152
+ dask-expr==1.0.9
153
+ dask==2024.4.0
154
+ dataclasses-json==0.6.4
155
+ dataproc_jupyter_plugin==0.1.66
156
+ datasets==2.16.0
157
+ datashader==0.16.0
158
+ datatile==1.0.3
159
+ db-dtypes==1.2.0
160
+ deap==1.4.1
161
+ debugpy==1.8.0
162
+ decorator==5.1.1
163
+ deepdiff==6.7.1
164
+ defusedxml==0.7.1
165
+ deprecation==2.1.0
166
+ descartes==1.1.0
167
+ dill==0.3.7
168
+ dipy==1.9.0
169
+ distlib==0.3.8
170
+ distributed==2023.7.1
171
+ distro==1.9.0
172
+ dm-tree==0.1.8
173
+ docker-pycreds==0.4.0
174
+ docker==7.0.0
175
+ docopt==0.6.2
176
+ docstring-parser==0.15
177
+ docstring-to-markdown==0.15
178
+ docutils==0.20.1
179
+ earthengine-api==0.1.395
180
+ easydict==1.13
181
+ easyocr==1.7.1
182
+ ecos==2.0.13
183
+ eli5==0.13.0
184
+ emoji==2.11.0
185
+ en-core-web-lg==3.7.1
186
+ en-core-web-sm==3.7.1
187
+ entrypoints==0.4
188
+ ephem==4.1.5
189
+ esda==2.5.1
190
+ essentia==2.1b6.dev1110
191
+ et-xmlfile==1.1.0
192
+ etils==1.6.0
193
+ exceptiongroup==1.2.0
194
+ executing==2.0.1
195
+ explainable-ai-sdk==1.3.3
196
+ fastai==2.7.14
197
+ fastapi==0.108.0
198
+ fastavro==1.9.3
199
+ fastcore==1.5.29
200
+ fastdownload==0.0.7
201
+ fasteners==0.19
202
+ fastjsonschema==2.19.1
203
+ fastprogress==1.0.3
204
+ fastrlock==0.8.2
205
+ fasttext==0.9.2
206
+ feather-format==0.4.1
207
+ featuretools==1.30.0
208
+ filelock==3.13.1
209
+ fiona==1.9.6
210
+ fitter==1.7.0
211
+ flake8==7.0.0
212
+ flashtext==2.7
213
+ flatbuffers==23.5.26
214
+ flax==0.8.2
215
+ folium==0.16.0
216
+ fonttools==4.47.0
217
+ fonttools==4.50.0
218
+ fqdn==1.5.1
219
+ frozendict==2.4.1
220
+ frozenlist==1.4.1
221
+ fsspec==2023.10.0
222
+ fsspec==2024.3.1
223
+ funcy==2.0
224
+ fury==0.10.0
225
+ future==1.0.0
226
+ fuzzywuzzy==0.18.0
227
+ gast==0.5.4
228
+ gatspy==0.3
229
+ gcsfs==2024.2.0
230
+ gensim==4.3.2
231
+ geographiclib==2.0
232
+ geojson==3.1.0
233
+ geopandas==0.14.3
234
+ geoplot==0.5.1
235
+ geopy==2.4.1
236
+ geoviews==1.11.1
237
+ ggplot==0.11.5
238
+ giddy==2.3.5
239
+ gitdb==4.0.11
240
+ google-ai-generativelanguage==0.4.0
241
+ google-api-core==2.11.1
242
+ google-api-core==2.18.0
243
+ google-api-python-client==2.125.0
244
+ google-apitools==0.5.31
245
+ google-auth-httplib2==0.2.0
246
+ google-auth-oauthlib==1.2.0
247
+ google-auth==2.26.1
248
+ google-cloud-aiplatform==0.6.0a1
249
+ google-cloud-artifact-registry==1.10.0
250
+ google-cloud-automl==1.0.1
251
+ google-cloud-bigquery==2.34.4
252
+ google-cloud-bigtable==1.7.3
253
+ google-cloud-core==2.4.1
254
+ google-cloud-datastore==2.19.0
255
+ google-cloud-dlp==3.14.0
256
+ google-cloud-jupyter-config==0.0.5
257
+ google-cloud-language==2.13.3
258
+ google-cloud-monitoring==2.18.0
259
+ google-cloud-pubsub==2.19.0
260
+ google-cloud-pubsublite==1.9.0
261
+ google-cloud-recommendations-ai==0.7.1
262
+ google-cloud-resource-manager==1.11.0
263
+ google-cloud-spanner==3.40.1
264
+ google-cloud-storage==1.44.0
265
+ google-cloud-translate==3.12.1
266
+ google-cloud-videointelligence==2.13.3
267
+ google-cloud-vision==2.8.0
268
+ google-crc32c==1.5.0
269
+ google-generativeai==0.4.1
270
+ google-pasta==0.2.0
271
+ google-resumable-media==2.7.0
272
+ googleapis-common-protos==1.62.0
273
+ gplearn==0.4.2
274
+ gpustat==1.0.0
275
+ gpxpy==1.6.2
276
+ graphviz==0.20.3
277
+ greenlet==3.0.3
278
+ grpc-google-iam-v1==0.12.7
279
+ grpcio-status==1.48.1
280
+ grpcio-status==1.48.2
281
+ grpcio==1.51.1
282
+ grpcio==1.60.0
283
+ gviz-api==1.10.0
284
+ gym-notices==0.0.8
285
+ gym==0.26.2
286
+ gymnasium==0.29.0
287
+ h11==0.14.0
288
+ h2o==3.46.0.1
289
+ h5netcdf==1.3.0
290
+ h5py==3.10.0
291
+ haversine==2.8.1
292
+ hdfs==2.7.3
293
+ hep-ml==0.7.2
294
+ hijri-converter==2.3.1
295
+ hmmlearn==0.3.2
296
+ holidays==0.24
297
+ holoviews==1.18.3
298
+ hpsklearn==0.1.0
299
+ html5lib==1.1
300
+ htmlmin==0.1.12
301
+ httpcore==1.0.5
302
+ httplib2==0.21.0
303
+ httptools==0.6.1
304
+ httpx==0.27.0
305
+ huggingface-hub==0.22.2
306
+ hunspell==0.5.5
307
+ hydra-slayer==0.5.0
308
+ hyperopt==0.2.7
309
+ hypertools==0.8.0
310
+ idna==3.6
311
+ igraph==0.11.4
312
+ imagecodecs==2024.1.1
313
+ imageio==2.33.1
314
+ imbalanced-learn==0.12.2
315
+ imgaug==0.4.0
316
+ importlib-metadata==6.11.0
317
+ importlib-metadata==7.0.1
318
+ importlib-resources==6.1.1
319
+ inequality==1.0.1
320
+ iniconfig==2.0.0
321
+ ipydatawidgets==4.3.5
322
+ ipykernel==6.28.0
323
+ ipyleaflet==0.18.2
324
+ ipympl==0.7.0
325
+ ipython-genutils==0.2.0
326
+ ipython-genutils==0.2.0
327
+ ipython-sql==0.5.0
328
+ ipython==8.20.0
329
+ ipyvolume==0.6.3
330
+ ipyvue==1.10.2
331
+ ipyvuetify==1.9.3
332
+ ipywebrtc==0.6.0
333
+ ipywidgets==7.7.1
334
+ isoduration==20.11.0
335
+ isort==5.13.2
336
+ isoweek==1.3.3
337
+ itsdangerous==2.1.2
338
+ jaraco.classes==3.3.0
339
+ jax-jumpy==1.0.0
340
+ jax==0.4.23
341
+ jaxlib==0.4.23.dev20240116
342
+ jedi==0.19.1
343
+ jeepney==0.8.0
344
+ jieba==0.42.1
345
+ jmespath==1.0.1
346
+ joblib==1.3.2
347
+ json5==0.9.14
348
+ jsonpatch==1.33
349
+ jsonpointer==2.4
350
+ jsonschema-specifications==2023.12.1
351
+ jsonschema==4.20.0
352
+ jupyter-console==6.6.3
353
+ jupyter-events==0.9.0
354
+ jupyter-http-over-ws==0.0.8
355
+ jupyter-lsp==1.5.1
356
+ jupyter-server-mathjax==0.2.6
357
+ jupyter-ydoc==0.2.5
358
+ jupyter_client==7.4.9
359
+ jupyter_client==8.6.0
360
+ jupyter_core==5.7.1
361
+ jupyter_server==2.13.0
362
+ jupyter_server_fileid==0.9.1
363
+ jupyter_server_proxy==4.1.0
364
+ jupyter_server_terminals==0.5.1
365
+ jupyter_server_ydoc==0.8.0
366
+ jupyterlab-lsp==5.1.0
367
+ jupyterlab-widgets==3.0.9
368
+ jupyterlab==4.1.5
369
+ jupyterlab_git==0.44.0
370
+ jupyterlab_pygments==0.3.0
371
+ jupyterlab_server==2.25.2
372
+ jupytext==1.16.0
373
+ kaggle-environments==1.14.3
374
+ kaggle==1.6.8
375
+ kagglehub==0.2.2
376
+ keras-cv==0.8.2
377
+ keras-nlp==0.8.2
378
+ keras-tuner==1.4.6
379
+ keras==3.1.1
380
+ kernels-mixer==0.0.7
381
+ keyring==24.3.0
382
+ keyrings.google-artifactregistry-auth==1.1.2
383
+ kfp-pipeline-spec==0.2.2
384
+ kfp-server-api==2.0.5
385
+ kfp==2.5.0
386
+ kiwisolver==1.4.5
387
+ kmapper==2.0.1
388
+ kmodes==0.12.2
389
+ korean-lunar-calendar==0.3.1
390
+ kornia==0.7.2
391
+ kornia_rs==0.1.3
392
+ kt-legacy==1.0.5
393
+ kubernetes==26.1.0
394
+ langcodes==3.3.0
395
+ langid==1.1.6
396
+ lazy_loader==0.3
397
+ learntools==0.3.4
398
+ leven==1.0.4
399
+ libclang==16.0.6
400
+ libmambapy==1.5.0
401
+ libpysal==4.9.2
402
+ librosa==0.10.1
403
+ lightgbm==4.2.0
404
+ lightning-utilities==0.11.2
405
+ lime==0.2.0.1
406
+ line-profiler==4.1.2
407
+ linkify-it-py==2.0.3
408
+ llvmlite==0.41.1
409
+ llvmlite==0.42.0
410
+ lml==0.1.0
411
+ locket==1.0.0
412
+ loguru==0.7.2
413
+ lxml==5.2.1
414
+ lz4==4.3.3
415
+ mamba==1.5.0
416
+ mapclassify==2.6.1
417
+ markdown-it-py==3.0.0
418
+ marshmallow==3.21.1
419
+ matplotlib-inline==0.1.6
420
+ matplotlib-venn==0.11.10
421
+ matplotlib==3.7.5
422
+ matplotlib==3.8.3
423
+ mccabe==0.7.0
424
+ mdit-py-plugins==0.4.0
425
+ mdurl==0.1.2
426
+ memory-profiler==0.61.0
427
+ menuinst==2.0.1
428
+ mercantile==1.2.1
429
+ mgwr==2.2.1
430
+ missingno==0.5.2
431
+ mistune==0.8.4
432
+ mizani==0.11.1
433
+ ml-dtypes==0.2.0
434
+ mlcrate==0.2.0
435
+ mlens==0.2.3
436
+ mlxtend==0.23.1
437
+ mne==1.6.1
438
+ mnist==0.2.2
439
+ momepy==0.7.0
440
+ more-itertools==10.2.0
441
+ mpld3==0.5.10
442
+ mpmath==1.3.0
443
+ msgpack==1.0.7
444
+ multidict==6.0.4
445
+ multimethod==1.10
446
+ multipledispatch==1.0.0
447
+ multiprocess==0.70.15
448
+ munkres==1.1.4
449
+ murmurhash==1.0.10
450
+ mypy-extensions==1.0.0
451
+ namex==0.0.7
452
+ nb-conda-kernels==2.3.1
453
+ nb_conda==2.2.1
454
+ nbclassic==1.0.0
455
+ nbclient==0.5.13
456
+ nbconvert==6.4.5
457
+ nbdime==3.2.0
458
+ nbformat==5.9.2
459
+ ndindex==1.8
460
+ nest-asyncio==1.5.8
461
+ networkx==3.2.1
462
+ nibabel==5.2.1
463
+ nilearn==0.10.3
464
+ ninja==1.11.1.1
465
+ nltk==3.2.4
466
+ nose==1.3.7
467
+ notebook==6.5.4
468
+ notebook==6.5.6
469
+ notebook_executor==0.2
470
+ notebook_shim==0.2.3
471
+ numba==0.58.1
472
+ numba==0.59.1
473
+ numexpr==2.10.0
474
+ numpy==1.26.4
475
+ nvidia-ml-py==11.495.46
476
+ nvtx==0.2.10
477
+ oauth2client==4.1.3
478
+ oauthlib==3.2.2
479
+ objsize==0.6.1
480
+ odfpy==1.4.1
481
+ olefile==0.47
482
+ onnx==1.16.0
483
+ opencensus-context==0.1.3
484
+ opencensus==0.11.4
485
+ opencv-contrib-python==4.9.0.80
486
+ opencv-python-headless==4.9.0.80
487
+ opencv-python==4.9.0.80
488
+ openpyxl==3.1.2
489
+ openslide-python==1.3.1
490
+ opentelemetry-api==1.22.0
491
+ opentelemetry-exporter-otlp-proto-common==1.22.0
492
+ opentelemetry-exporter-otlp-proto-grpc==1.22.0
493
+ opentelemetry-exporter-otlp-proto-http==1.22.0
494
+ opentelemetry-exporter-otlp==1.22.0
495
+ opentelemetry-proto==1.22.0
496
+ opentelemetry-sdk==1.22.0
497
+ opentelemetry-semantic-conventions==0.43b0
498
+ opt-einsum==3.3.0
499
+ optax==0.2.2
500
+ optree==0.11.0
501
+ optuna==3.6.1
502
+ orbax-checkpoint==0.5.7
503
+ ordered-set==4.1.0
504
+ orjson==3.9.10
505
+ ortools==9.4.1874
506
+ osmnx==1.9.2
507
+ overrides==7.4.0
508
+ packaging==21.3
509
+ pandas-datareader==0.10.0
510
+ pandas-profiling==3.6.6
511
+ pandas-summary==0.2.0
512
+ pandas==2.1.4
513
+ pandas==2.2.1
514
+ pandasql==0.7.3
515
+ pandocfilters==1.5.0
516
+ panel==1.3.8
517
+ papermill==2.5.0
518
+ param==2.1.0
519
+ parso==0.8.3
520
+ partd==1.4.1
521
+ path.py==12.5.0
522
+ path==16.10.0
523
+ pathos==0.3.2
524
+ pathy==0.10.3
525
+ patsy==0.5.6
526
+ pdf2image==1.17.0
527
+ peft==0.10.0
528
+ pettingzoo==1.24.0
529
+ pexpect==4.8.0
530
+ pexpect==4.9.0
531
+ phik==0.12.4
532
+ pickleshare==0.7.5
533
+ pillow==10.3.0
534
+ pip==23.3.2
535
+ pkgutil_resolve_name==1.3.10
536
+ platformdirs==4.2.0
537
+ plotly-express==0.4.1
538
+ plotly==5.18.0
539
+ plotnine==0.13.4
540
+ pluggy==1.4.0
541
+ pointpats==2.4.0
542
+ polars==0.20.18
543
+ polyglot==16.7.4
544
+ pooch==1.8.1
545
+ pox==0.3.4
546
+ ppca==0.0.4
547
+ ppft==1.7.6.8
548
+ preprocessing==0.1.13
549
+ preshed==3.0.9
550
+ prettytable==3.9.0
551
+ progressbar2==4.4.2
552
+ prometheus-client==0.19.0
553
+ promise==2.3
554
+ prompt-toolkit==3.0.42
555
+ prompt-toolkit==3.0.43
556
+ prophet==1.1.1
557
+ proto-plus==1.23.0
558
+ protobuf==3.20.3
559
+ protobuf==4.21.12
560
+ psutil==5.9.3
561
+ psutil==5.9.7
562
+ ptyprocess==0.7.0
563
+ pudb==2024.1
564
+ pure-eval==0.2.2
565
+ py-cpuinfo==9.0.0
566
+ py-spy==0.3.14
567
+ py4j==0.10.9.7
568
+ pyLDAvis==3.4.1
569
+ pyOpenSSL==23.3.0
570
+ pyaml==23.12.0
571
+ pyarrow-hotfix==0.6
572
+ pyarrow==15.0.2
573
+ pyasn1-modules==0.3.0
574
+ pyasn1==0.5.1
575
+ pybind11==2.12.0
576
+ pyclipper==1.3.0.post5
577
+ pycodestyle==2.11.1
578
+ pycosat==0.6.6
579
+ pycparser==2.21
580
+ pycryptodome==3.20.0
581
+ pyct==0.5.0
582
+ pycuda==2024.1
583
+ pydantic==2.5.3
584
+ pydantic==2.6.4
585
+ pydantic_core==2.14.6
586
+ pydantic_core==2.16.3
587
+ pydegensac==0.1.2
588
+ pydicom==2.4.4
589
+ pydocstyle==6.3.0
590
+ pydot==1.4.2
591
+ pydub==0.25.1
592
+ pyemd==1.0.0
593
+ pyerfa==2.0.1.1
594
+ pyexcel-io==0.6.6
595
+ pyexcel-ods==0.6.0
596
+ pyflakes==3.2.0
597
+ pygltflib==1.16.2
598
+ pykalman==0.9.7
599
+ pylibraft==23.8.0
600
+ pylint==3.1.0
601
+ pymc3==3.11.4
602
+ pymongo==3.13.0
603
+ pynndescent==0.5.12
604
+ pynvml==11.4.1
605
+ pynvrtc==9.2
606
+ pyparsing==3.1.1
607
+ pyparsing==3.1.2
608
+ pypdf==4.1.0
609
+ pyproj==3.6.1
610
+ pysal==24.1
611
+ pyshp==2.3.1
612
+ pytesseract==0.3.10
613
+ pytest==8.1.1
614
+ python-bidi==0.4.2
615
+ python-dateutil==2.9.0.post0
616
+ python-dotenv==1.0.0
617
+ python-json-logger==2.0.7
618
+ python-louvain==0.16
619
+ python-lsp-jsonrpc==1.1.2
620
+ python-lsp-server==1.11.0
621
+ python-slugify==8.0.4
622
+ python-utils==3.8.2
623
+ pythreejs==2.4.2
624
+ pytoolconfig==1.3.1
625
+ pytools==2024.1.1
626
+ pytorch-ignite==0.5.0.post2
627
+ pytorch-lightning==2.2.1
628
+ pytz==2023.3.post1
629
+ pytz==2024.1
630
+ pyu2f==0.1.5
631
+ pyviz_comms==3.0.2
632
+ pyzmq==24.0.1
633
+ pyzmq==25.1.2
634
+ qgrid==1.3.1
635
+ qtconsole==5.5.1
636
+ quantecon==0.7.2
637
+ qudida==0.0.4
638
+ raft-dask==23.8.0
639
+ rasterio==1.3.9
640
+ rasterstats==0.19.0
641
+ ray-cpp==2.9.0
642
+ ray==2.9.0
643
+ referencing==0.32.1
644
+ regex==2023.12.25
645
+ requests-oauthlib==1.3.1
646
+ requests-toolbelt==0.10.1
647
+ requests==2.31.0
648
+ retrying==1.3.3
649
+ retrying==1.3.4
650
+ rfc3339-validator==0.1.4
651
+ rfc3986-validator==0.1.1
652
+ rgf-python==3.12.0
653
+ rich-click==1.7.4
654
+ rich==13.7.0
655
+ rich==13.7.1
656
+ rmm==23.8.0
657
+ rope==1.13.0
658
+ rpds-py==0.16.2
659
+ rsa==4.9
660
+ ruamel-yaml-conda==0.15.100
661
+ ruamel.yaml.clib==0.2.7
662
+ ruamel.yaml==0.17.40
663
+ s2sphere==0.2.5
664
+ s3fs==2024.2.0
665
+ s3transfer==0.6.2
666
+ safetensors==0.4.2
667
+ scattertext==0.1.19
668
+ scikit-image==0.22.0
669
+ scikit-learn-intelex==2024.2.0
670
+ scikit-learn==1.2.2
671
+ scikit-multilearn==0.2.0
672
+ scikit-optimize==0.10.1
673
+ scikit-plot==0.3.7
674
+ scikit-surprise==1.1.3
675
+ scipy==1.11.4
676
+ scipy==1.12.0
677
+ seaborn==0.12.2
678
+ segment_anything==1.0
679
+ segregation==2.5
680
+ semver==3.0.2
681
+ sentencepiece==0.2.0
682
+ sentry-sdk==1.44.1
683
+ setproctitle==1.3.3
684
+ setuptools-git==1.2
685
+ setuptools-scm==8.0.4
686
+ setuptools==69.0.3
687
+ shap==0.44.1
688
+ shapely==2.0.3
689
+ shellingham==1.5.4
690
+ shtab==1.7.1
691
+ simpervisor==1.0.0
692
+ simplejson==3.19.2
693
+ six==1.16.0
694
+ sklearn-pandas==2.2.0
695
+ slicer==0.0.7
696
+ smart-open==6.4.0
697
+ smmap==5.0.1
698
+ sniffio==1.3.0
699
+ snowballstemmer==2.2.0
700
+ snuggs==1.4.7
701
+ sortedcontainers==2.4.0
702
+ soundfile==0.12.1
703
+ soupsieve==2.5
704
+ soxr==0.3.7
705
+ spacy-legacy==3.0.12
706
+ spacy-loggers==1.0.5
707
+ spacy==3.7.2
708
+ spaghetti==1.7.5.post1
709
+ spectral==0.23.1
710
+ spglm==1.1.0
711
+ sphinx-rtd-theme==0.2.4
712
+ spint==1.0.7
713
+ splot==1.1.5.post1
714
+ spopt==0.6.0
715
+ spreg==1.4.2
716
+ spvcm==0.3.0
717
+ sqlparse==0.4.4
718
+ squarify==0.4.3
719
+ srsly==2.4.8
720
+ stable-baselines3==2.1.0
721
+ stack-data==0.6.2
722
+ stack-data==0.6.3
723
+ stanio==0.5.0
724
+ starlette==0.32.0.post1
725
+ statsmodels==0.14.1
726
+ stemming==1.0.1
727
+ stop-words==2018.7.23
728
+ stopit==1.1.2
729
+ stumpy==1.12.0
730
+ sympy==1.12
731
+ tables==3.9.2
732
+ tabulate==0.9.0
733
+ tangled-up-in-unicode==0.2.0
734
+ tbb==2021.12.0
735
+ tblib==3.0.0
736
+ tenacity==8.2.3
737
+ tensorboard-data-server==0.7.2
738
+ tensorboard-plugin-profile==2.15.0
739
+ tensorboard==2.15.1
740
+ tensorboardX==2.6.2.2
741
+ tensorflow-cloud==0.1.16
742
+ tensorflow-datasets==4.9.4
743
+ tensorflow-decision-forests==1.8.1
744
+ tensorflow-estimator==2.15.0
745
+ tensorflow-hub==0.16.1
746
+ tensorflow-io-gcs-filesystem==0.35.0
747
+ tensorflow-io==0.35.0
748
+ tensorflow-metadata==0.14.0
749
+ tensorflow-probability==0.23.0
750
+ tensorflow-serving-api==2.14.1
751
+ tensorflow-text==2.15.0
752
+ tensorflow-transform==0.14.0
753
+ tensorflow==2.15.0
754
+ tensorstore==0.1.56
755
+ termcolor==2.4.0
756
+ terminado==0.18.0
757
+ testpath==0.6.0
758
+ text-unidecode==1.3
759
+ textblob==0.18.0.post0
760
+ texttable==1.7.0
761
+ tf_keras==2.15.1
762
+ tfp-nightly==0.24.0.dev0
763
+ thinc==8.2.2
764
+ threadpoolctl==3.2.0
765
+ tifffile==2023.12.9
766
+ timm==0.9.16
767
+ tinycss2==1.2.1
768
+ tobler==0.11.2
769
+ tokenizers==0.15.2
770
+ toml==0.10.2
771
+ tomli==2.0.1
772
+ tomlkit==0.12.4
773
+ toolz==0.12.1
774
+ torch==2.1.2
775
+ torchaudio==2.1.2
776
+ torchdata==0.7.1
777
+ torchinfo==1.8.0
778
+ torchmetrics==1.3.2
779
+ torchtext==0.16.2
780
+ torchvision==0.16.2
781
+ tornado==6.3.3
782
+ tqdm==4.66.1
783
+ traceml==1.0.8
784
+ traitlets==5.9.0
785
+ traittypes==0.2.1
786
+ transformers==4.39.3
787
+ treelite-runtime==3.2.0
788
+ treelite==3.2.0
789
+ trl==0.8.2
790
+ truststore==0.8.0
791
+ trx-python==0.2.9
792
+ tsfresh==0.20.2
793
+ typeguard==4.1.5
794
+ typer==0.9.0
795
+ typer==0.9.4
796
+ types-python-dateutil==2.8.19.20240106
797
+ typing-inspect==0.9.0
798
+ typing-utils==0.1.0
799
+ typing_extensions==4.9.0
800
+ tyro==0.8.3
801
+ tzdata==2023.4
802
+ uc-micro-py==1.0.3
803
+ ucx-py==0.33.0
804
+ ujson==5.9.0
805
+ umap-learn==0.5.5
806
+ unicodedata2==15.1.0
807
+ update-checker==0.18.0
808
+ uri-template==1.3.0
809
+ uritemplate==3.0.1
810
+ urllib3==1.26.18
811
+ urllib3==2.1.0
812
+ urwid==2.6.10
813
+ urwid_readline==0.14
814
+ uvicorn==0.25.0
815
+ uvloop==0.19.0
816
+ vaex-astro==0.9.3
817
+ vaex-core==4.17.1
818
+ vaex-hdf5==0.14.1
819
+ vaex-jupyter==0.8.2
820
+ vaex-ml==0.18.3
821
+ vaex-server==0.9.0
822
+ vaex-viz==0.5.4
823
+ vaex==4.17.0
824
+ vec_noise==1.1.4
825
+ vecstack==0.4.0
826
+ virtualenv==20.21.0
827
+ visions==0.7.5
828
+ vowpalwabbit==9.9.0
829
+ vtk==9.3.0
830
+ wandb==0.16.5
831
+ wasabi==1.1.2
832
+ watchfiles==0.21.0
833
+ wavio==0.0.8
834
+ wcwidth==0.2.13
835
+ weasel==0.3.4
836
+ webcolors==1.13
837
+ webencodings==0.5.1
838
+ websocket-client==1.7.0
839
+ websockets==12.0
840
+ wfdb==4.1.2
841
+ whatthepatch==1.0.5
842
+ wheel==0.42.0
843
+ widgetsnbextension==3.6.6
844
+ witwidget==1.8.1
845
+ woodwork==0.29.0
846
+ wordcloud==1.9.3
847
+ wordsegment==1.3.1
848
+ wrapt==1.14.1
849
+ xarray-einstats==0.7.0
850
+ xarray==2024.3.0
851
+ xgboost==2.0.3
852
+ xvfbwrapper==0.2.9
853
+ xxhash==3.4.1
854
+ xyzservices==2023.10.1
855
+ y-py==0.6.2
856
+ yapf==0.40.2
857
+ yarl==1.9.3
858
+ yarl==1.9.4
859
+ ydata-profiling==4.6.4
860
+ yellowbrick==1.5
861
+ ypy-websocket==0.8.4
862
+ zict==3.0.0
863
+ zipp==3.17.0
864
+ zstandard==0.22.0
wandb/run-20240412_074044-widovcn3/files/wandb-metadata.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.15.133+-x86_64-with-glibc2.31",
3
+ "python": "3.10.13",
4
+ "heartbeatAt": "2024-04-12T07:40:45.023659",
5
+ "startedAt": "2024-04-12T07:40:44.501264",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [],
9
+ "state": "running",
10
+ "program": "kaggle.ipynb",
11
+ "codePathLocal": null,
12
+ "root": "/kaggle/working",
13
+ "host": "e5a48bec8248",
14
+ "username": "root",
15
+ "executable": "/opt/conda/bin/python3.10",
16
+ "cpu_count": 2,
17
+ "cpu_count_logical": 4,
18
+ "cpu_freq": {
19
+ "current": 2000.138,
20
+ "min": 0.0,
21
+ "max": 0.0
22
+ },
23
+ "cpu_freq_per_core": [
24
+ {
25
+ "current": 2000.138,
26
+ "min": 0.0,
27
+ "max": 0.0
28
+ },
29
+ {
30
+ "current": 2000.138,
31
+ "min": 0.0,
32
+ "max": 0.0
33
+ },
34
+ {
35
+ "current": 2000.138,
36
+ "min": 0.0,
37
+ "max": 0.0
38
+ },
39
+ {
40
+ "current": 2000.138,
41
+ "min": 0.0,
42
+ "max": 0.0
43
+ }
44
+ ],
45
+ "disk": {
46
+ "/": {
47
+ "total": 8062.387607574463,
48
+ "used": 5565.782573699951
49
+ }
50
+ },
51
+ "gpu": "Tesla T4",
52
+ "gpu_count": 2,
53
+ "gpu_devices": [
54
+ {
55
+ "name": "Tesla T4",
56
+ "memory_total": 16106127360
57
+ },
58
+ {
59
+ "name": "Tesla T4",
60
+ "memory_total": 16106127360
61
+ }
62
+ ],
63
+ "memory": {
64
+ "total": 31.357559204101562
65
+ }
66
+ }
wandb/run-20240412_074044-widovcn3/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/loss": 4.5777, "train/grad_norm": 62.10089111328125, "train/learning_rate": 1e-06, "train/epoch": 0.0, "train/global_step": 20, "_timestamp": 1712907682.0631418, "_runtime": 37.555423736572266, "_step": 0}
wandb/run-20240412_074044-widovcn3/logs/debug-internal.log ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-12 07:40:44,508 INFO StreamThr :485 [internal.py:wandb_internal():86] W&B internal server running at pid: 485, started at: 2024-04-12 07:40:44.508065
2
+ 2024-04-12 07:40:44,510 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-12 07:40:44,782 INFO WriterThread:485 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240412_074044-widovcn3/run-widovcn3.wandb
4
+ 2024-04-12 07:40:44,782 DEBUG SenderThread:485 [sender.py:send():379] send: header
5
+ 2024-04-12 07:40:44,785 DEBUG SenderThread:485 [sender.py:send():379] send: run
6
+ 2024-04-12 07:40:44,894 INFO SenderThread:485 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240412_074044-widovcn3/files
7
+ 2024-04-12 07:40:44,895 INFO SenderThread:485 [sender.py:_start_run_threads():1124] run started: widovcn3 with start time 1712907644.507718
8
+ 2024-04-12 07:40:44,903 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-12 07:40:44,904 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-12 07:40:44,996 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-12 07:40:45,008 DEBUG HandlerThread:485 [system_info.py:__init__():26] System info init
12
+ 2024-04-12 07:40:45,008 DEBUG HandlerThread:485 [system_info.py:__init__():41] System info init done
13
+ 2024-04-12 07:40:45,008 INFO HandlerThread:485 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-12 07:40:45,008 INFO SystemMonitor:485 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-12 07:40:45,008 INFO HandlerThread:485 [system_monitor.py:probe():214] Collecting system info
16
+ 2024-04-12 07:40:45,009 INFO SystemMonitor:485 [interfaces.py:start():190] Started cpu monitoring
17
+ 2024-04-12 07:40:45,009 INFO SystemMonitor:485 [interfaces.py:start():190] Started disk monitoring
18
+ 2024-04-12 07:40:45,011 INFO SystemMonitor:485 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-12 07:40:45,012 INFO SystemMonitor:485 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-12 07:40:45,013 INFO SystemMonitor:485 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-12 07:40:45,023 DEBUG HandlerThread:485 [system_info.py:probe():150] Probing system
22
+ 2024-04-12 07:40:45,026 DEBUG HandlerThread:485 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-12 07:40:45,026 DEBUG HandlerThread:485 [system_info.py:probe():198] Probing system done
24
+ 2024-04-12 07:40:45,026 DEBUG HandlerThread:485 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-12T07:40:45.023659', 'startedAt': '2024-04-12T07:40:44.501264', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': 'e5a48bec8248', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.138, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.138, 'min': 0.0, 'max': 0.0}, {'current': 2000.138, 'min': 0.0, 'max': 0.0}, {'current': 2000.138, 'min': 0.0, 'max': 0.0}, {'current': 2000.138, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5565.782573699951}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-12 07:40:45,026 INFO HandlerThread:485 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-12 07:40:45,026 INFO HandlerThread:485 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-12 07:40:45,026 DEBUG HandlerThread:485 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-12 07:40:45,897 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/conda-environment.yaml
29
+ 2024-04-12 07:41:00,040 ERROR HandlerThread:485 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
+ Traceback (most recent call last):
31
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
+ subprocess.call(
33
+ File "/opt/conda/lib/python3.10/subprocess.py", line 347, in call
34
+ return p.wait(timeout=timeout)
35
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1209, in wait
36
+ return self._wait(timeout=timeout)
37
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
+ raise TimeoutExpired(self.args, timeout)
39
+ subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-12 07:41:00,041 DEBUG HandlerThread:485 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-12 07:41:00,042 INFO HandlerThread:485 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-12 07:41:00,047 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-12 07:41:00,047 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-12 07:41:00,047 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-12 07:41:00,048 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-12 07:41:00,048 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-12 07:41:00,048 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: keepalive
48
+ 2024-04-12 07:41:00,048 DEBUG SenderThread:485 [sender.py:send():379] send: files
49
+ 2024-04-12 07:41:00,049 INFO SenderThread:485 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
+ 2024-04-12 07:41:00,245 INFO wandb-upload_0:485 [upload_job.py:push():131] Uploaded file /tmp/tmpqpxjmay6wandb/iki4gmew-wandb-metadata.json
51
+ 2024-04-12 07:41:00,900 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/wandb-metadata.json
52
+ 2024-04-12 07:41:01,034 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: python_packages
53
+ 2024-04-12 07:41:01,034 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: python_packages
54
+ 2024-04-12 07:41:01,038 DEBUG SenderThread:485 [sender.py:send():379] send: telemetry
55
+ 2024-04-12 07:41:01,043 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: stop_status
56
+ 2024-04-12 07:41:01,043 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: stop_status
57
+ 2024-04-12 07:41:01,051 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: internal_messages
58
+ 2024-04-12 07:41:01,102 DEBUG SenderThread:485 [sender.py:send():379] send: config
59
+ 2024-04-12 07:41:01,103 DEBUG SenderThread:485 [sender.py:send():379] send: metric
60
+ 2024-04-12 07:41:01,103 DEBUG SenderThread:485 [sender.py:send():379] send: telemetry
61
+ 2024-04-12 07:41:01,104 DEBUG SenderThread:485 [sender.py:send():379] send: metric
62
+ 2024-04-12 07:41:01,104 WARNING SenderThread:485 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
63
+ 2024-04-12 07:41:01,104 DEBUG SenderThread:485 [sender.py:send():379] send: telemetry
64
+ 2024-04-12 07:41:01,900 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/output.log
65
+ 2024-04-12 07:41:01,901 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/requirements.txt
66
+ 2024-04-12 07:41:03,901 INFO Thread-12 :485 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/output.log
67
+ 2024-04-12 07:41:05,568 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
68
+ 2024-04-12 07:41:10,569 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
69
+ 2024-04-12 07:41:15,575 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
70
+ 2024-04-12 07:41:15,907 INFO Thread-12 :485 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/config.yaml
71
+ 2024-04-12 07:41:16,035 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: stop_status
72
+ 2024-04-12 07:41:16,036 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: internal_messages
73
+ 2024-04-12 07:41:16,036 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: stop_status
74
+ 2024-04-12 07:41:21,106 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
75
+ 2024-04-12 07:41:22,063 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: partial_history
76
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: metric
77
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: metric
78
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: metric
79
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: metric
80
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send():379] send: history
81
+ 2024-04-12 07:41:22,066 DEBUG SenderThread:485 [sender.py:send_request():406] send_request: summary_record
82
+ 2024-04-12 07:41:22,068 INFO SenderThread:485 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
83
+ 2024-04-12 07:41:22,910 INFO Thread-12 :485 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/wandb-summary.json
84
+ 2024-04-12 07:41:23,910 INFO Thread-12 :485 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240412_074044-widovcn3/files/output.log
85
+ 2024-04-12 07:41:26,797 DEBUG HandlerThread:485 [handler.py:handle_request():146] handle_request: status_report
wandb/run-20240412_074044-widovcn3/logs/debug.log ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Configure stats pid to 443
3
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240412_074044-widovcn3/logs/debug.log
10
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240412_074044-widovcn3/logs/debug-internal.log
11
+ 2024-04-12 07:40:44,503 INFO MainThread:443 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x79825186be20>
12
+ 2024-04-12 07:40:44,504 INFO MainThread:443 [wandb_init.py:init():567] calling init triggers
13
+ 2024-04-12 07:40:44,504 INFO MainThread:443 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
+ config: {}
15
+ 2024-04-12 07:40:44,504 INFO MainThread:443 [wandb_init.py:init():617] starting backend
16
+ 2024-04-12 07:40:44,504 INFO MainThread:443 [wandb_init.py:init():621] setting up manager
17
+ 2024-04-12 07:40:44,506 INFO MainThread:443 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
+ 2024-04-12 07:40:44,507 INFO MainThread:443 [wandb_init.py:init():629] backend started and connected
19
+ 2024-04-12 07:40:44,521 INFO MainThread:443 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
+ 2024-04-12 07:40:44,781 INFO MainThread:443 [wandb_init.py:init():721] updated telemetry
21
+ 2024-04-12 07:40:44,784 INFO MainThread:443 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
+ 2024-04-12 07:40:44,902 INFO MainThread:443 [wandb_run.py:_on_init():2344] communicating current version
23
+ 2024-04-12 07:40:44,988 INFO MainThread:443 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
+
25
+ 2024-04-12 07:40:44,988 INFO MainThread:443 [wandb_init.py:init():805] starting run threads in backend
26
+ 2024-04-12 07:41:01,035 INFO MainThread:443 [wandb_run.py:_console_start():2323] atexit reg
27
+ 2024-04-12 07:41:01,035 INFO MainThread:443 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
+ 2024-04-12 07:41:01,036 INFO MainThread:443 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
+ 2024-04-12 07:41:01,036 INFO MainThread:443 [wandb_run.py:_redirect():2268] Redirects installed.
30
+ 2024-04-12 07:41:01,037 INFO MainThread:443 [wandb_init.py:init():848] run started, returning control to user process
31
+ 2024-04-12 07:41:01,044 INFO MainThread:443 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 250880, 'hidden_size': 1536, 'n_layer': 24, 'n_head': 16, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'use_cache': False, 'pretraining_tp': 1, 'apply_residual_connection_post_layernorm': False, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'bos_token_id': 1, 'eos_token_id': 2, 'slow_but_exact': False, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'bfloat16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['BloomForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': 3, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'bigscience/bloomz-1b1', 'transformers_version': '4.39.3', 'attention_softmax_in_fp32': True, 'bias_dropout_fusion': True, 'unk_token_id': 0, 'masked_softmax_fusion': True, 'model_type': 'bloom', 'n_inner': None, 'offset_alibi': 100, 'seq_length': 2048, 'skip_bias_add': True, 'skip_bias_add_qkv': False, 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': True, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.0001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 5, 'max_steps': 20000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr12_07-40-41_e5a48bec8248', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 0, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_8bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': 'Femboyuwu2000/bloomz-1b1-vn-chat', 'hub_strategy': 'checkpoint', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
wandb/run-20240412_074044-widovcn3/run-widovcn3.wandb ADDED
Binary file (6.68 kB). View file