Femboyuwu2000 commited on
Commit
368b66b
1 Parent(s): f71cf9b

gpt2_lierotica

Browse files
README.md CHANGED
@@ -1,11 +1,11 @@
1
  ---
2
- license: apache-2.0
3
  library_name: peft
4
  tags:
5
  - trl
6
  - sft
7
  - generated_from_trainer
8
- base_model: tiiuae/falcon-rw-1b
9
  model-index:
10
  - name: working
11
  results: []
@@ -16,7 +16,7 @@ should probably proofread and complete it, then remove this comment. -->
16
 
17
  # working
18
 
19
- This model is a fine-tuned version of [tiiuae/falcon-rw-1b](https://huggingface.co/tiiuae/falcon-rw-1b) on an unknown dataset.
20
 
21
  ## Model description
22
 
 
1
  ---
2
+ license: mit
3
  library_name: peft
4
  tags:
5
  - trl
6
  - sft
7
  - generated_from_trainer
8
+ base_model: openai-community/gpt2-large
9
  model-index:
10
  - name: working
11
  results: []
 
16
 
17
  # working
18
 
19
+ This model is a fine-tuned version of [openai-community/gpt2-large](https://huggingface.co/openai-community/gpt2-large) on an unknown dataset.
20
 
21
  ## Model description
22
 
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "tiiuae/falcon-rw-1b",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -20,11 +20,11 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "dense",
24
- "query_key_value",
25
- "lm_head",
26
- "dense_h_to_4h",
27
- "word_embeddings"
28
  ],
29
  "task_type": "CAUSAL_LM",
30
  "use_dora": false,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "openai-community/gpt2-large",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "c_fc",
24
+ "c_attn",
25
+ "wte",
26
+ "c_proj",
27
+ "lm_head"
28
  ],
29
  "task_type": "CAUSAL_LM",
30
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d9aea127eb50666b10f052869f64e016f82c077f73c58abb8489889d039a0447
3
- size 844854136
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e89a7e068c1b8ca512d8f84c288b20473c3e33b72af5e87dbdc4880c81fbaec5
3
+ size 541561408
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
runs/Apr11_00-59-48_c072b7c9e487/events.out.tfevents.1712797249.c072b7c9e487.94.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d5b6a866706dea049f01b547df41763413571cb0be7120fe89152c2c60fbe42
3
+ size 5901
special_tokens_map.json CHANGED
@@ -1,24 +1,6 @@
1
  {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "</s>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "</s>",
17
- "unk_token": {
18
- "content": "<unk>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
  }
 
1
  {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,47 +1,22 @@
1
  {
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
- "0": {
5
- "content": "<unk>",
6
  "lstrip": false,
7
- "normalized": false,
8
- "rstrip": false,
9
- "single_word": false,
10
- "special": true
11
- },
12
- "1": {
13
- "content": "<s>",
14
- "lstrip": false,
15
- "normalized": false,
16
- "rstrip": false,
17
- "single_word": false,
18
- "special": true
19
- },
20
- "2": {
21
- "content": "</s>",
22
- "lstrip": false,
23
- "normalized": false,
24
- "rstrip": false,
25
- "single_word": false,
26
- "special": true
27
- },
28
- "3": {
29
- "content": "<pad>",
30
- "lstrip": false,
31
- "normalized": false,
32
  "rstrip": false,
33
  "single_word": false,
34
  "special": true
35
  }
36
  },
37
- "bos_token": "<s>",
38
- "clean_up_tokenization_spaces": false,
39
- "eos_token": "</s>",
40
- "max_length": 2048,
41
- "model_max_length": 8192,
42
- "pad_token": "</s>",
43
- "padding_side": "right",
44
- "tokenizer_class": "BloomTokenizer",
45
  "truncation": true,
46
- "unk_token": "<unk>"
47
  }
 
1
  {
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
  "lstrip": false,
7
+ "normalized": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  }
12
  },
13
+ "bos_token": "<|endoftext|>",
14
+ "clean_up_tokenization_spaces": true,
15
+ "eos_token": "<|endoftext|>",
16
+ "max_length": 1024,
17
+ "model_max_length": 1024,
18
+ "pad_token": "<|endoftext|>",
19
+ "tokenizer_class": "GPT2Tokenizer",
 
20
  "truncation": true,
21
+ "unk_token": "<|endoftext|>"
22
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:71013af53dd90b2f29c64c2465ed2e7f850cd56a40c3c90ec075b87d0cf5e42c
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d336504ff1b5fa68adcdc85a42fecf652565e8aad52350e3bded9aa454938008
3
  size 4920
vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
wandb/debug-internal.log CHANGED
@@ -1,32 +1,32 @@
1
- 2024-04-10 21:51:01,073 INFO StreamThr :274 [internal.py:wandb_internal():86] W&B internal server running at pid: 274, started at: 2024-04-10 21:51:01.072932
2
- 2024-04-10 21:51:01,075 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status
3
- 2024-04-10 21:51:01,447 INFO WriterThread:274 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240410_215101-ta1pahei/run-ta1pahei.wandb
4
- 2024-04-10 21:51:01,448 DEBUG SenderThread:274 [sender.py:send():379] send: header
5
- 2024-04-10 21:51:01,459 DEBUG SenderThread:274 [sender.py:send():379] send: run
6
- 2024-04-10 21:51:01,670 INFO SenderThread:274 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files
7
- 2024-04-10 21:51:01,670 INFO SenderThread:274 [sender.py:_start_run_threads():1124] run started: ta1pahei with start time 1712785861.074028
8
- 2024-04-10 21:51:01,678 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: check_version
9
- 2024-04-10 21:51:01,678 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: check_version
10
- 2024-04-10 21:51:01,777 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: run_start
11
- 2024-04-10 21:51:01,788 DEBUG HandlerThread:274 [system_info.py:__init__():26] System info init
12
- 2024-04-10 21:51:01,789 DEBUG HandlerThread:274 [system_info.py:__init__():41] System info init done
13
- 2024-04-10 21:51:01,789 INFO HandlerThread:274 [system_monitor.py:start():194] Starting system monitor
14
- 2024-04-10 21:51:01,789 INFO SystemMonitor:274 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
- 2024-04-10 21:51:01,789 INFO HandlerThread:274 [system_monitor.py:probe():214] Collecting system info
16
- 2024-04-10 21:51:01,789 INFO SystemMonitor:274 [interfaces.py:start():190] Started cpu monitoring
17
- 2024-04-10 21:51:01,790 INFO SystemMonitor:274 [interfaces.py:start():190] Started disk monitoring
18
- 2024-04-10 21:51:01,791 INFO SystemMonitor:274 [interfaces.py:start():190] Started gpu monitoring
19
- 2024-04-10 21:51:01,792 INFO SystemMonitor:274 [interfaces.py:start():190] Started memory monitoring
20
- 2024-04-10 21:51:01,792 INFO SystemMonitor:274 [interfaces.py:start():190] Started network monitoring
21
- 2024-04-10 21:51:01,802 DEBUG HandlerThread:274 [system_info.py:probe():150] Probing system
22
- 2024-04-10 21:51:01,805 DEBUG HandlerThread:274 [gitlib.py:_init_repo():56] git repository is invalid
23
- 2024-04-10 21:51:01,805 DEBUG HandlerThread:274 [system_info.py:probe():198] Probing system done
24
- 2024-04-10 21:51:01,805 DEBUG HandlerThread:274 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-10T21:51:01.802955', 'startedAt': '2024-04-10T21:51:01.066772', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': '4bf7c58fba03', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.192, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.192, 'min': 0.0, 'max': 0.0}, {'current': 2000.192, 'min': 0.0, 'max': 0.0}, {'current': 2000.192, 'min': 0.0, 'max': 0.0}, {'current': 2000.192, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5578.723789215088}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
- 2024-04-10 21:51:01,805 INFO HandlerThread:274 [system_monitor.py:probe():224] Finished collecting system info
26
- 2024-04-10 21:51:01,805 INFO HandlerThread:274 [system_monitor.py:probe():227] Publishing system info
27
- 2024-04-10 21:51:01,805 DEBUG HandlerThread:274 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
- 2024-04-10 21:51:02,672 INFO Thread-12 :274 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/conda-environment.yaml
29
- 2024-04-10 21:51:16,821 ERROR HandlerThread:274 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
  Traceback (most recent call last):
31
  File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
  subprocess.call(
@@ -37,370 +37,126 @@ Traceback (most recent call last):
37
  File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
  raise TimeoutExpired(self.args, timeout)
39
  subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
- 2024-04-10 21:51:16,824 DEBUG HandlerThread:274 [system_info.py:_save_conda():222] Saving conda packages done
41
- 2024-04-10 21:51:16,825 INFO HandlerThread:274 [system_monitor.py:probe():229] Finished publishing system info
42
- 2024-04-10 21:51:16,831 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
43
- 2024-04-10 21:51:16,832 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: keepalive
44
- 2024-04-10 21:51:16,832 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
45
- 2024-04-10 21:51:16,832 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: keepalive
46
- 2024-04-10 21:51:16,832 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
47
- 2024-04-10 21:51:16,832 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: keepalive
48
- 2024-04-10 21:51:16,833 DEBUG SenderThread:274 [sender.py:send():379] send: files
49
- 2024-04-10 21:51:16,833 INFO SenderThread:274 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
- 2024-04-10 21:51:17,270 INFO wandb-upload_0:274 [upload_job.py:push():131] Uploaded file /tmp/tmphet9wukmwandb/tfhoobcd-wandb-metadata.json
51
- 2024-04-10 21:51:17,675 INFO Thread-12 :274 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/wandb-metadata.json
52
- 2024-04-10 21:51:17,776 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: python_packages
53
- 2024-04-10 21:51:17,776 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: python_packages
54
- 2024-04-10 21:51:17,779 DEBUG SenderThread:274 [sender.py:send():379] send: telemetry
55
- 2024-04-10 21:51:17,789 DEBUG SenderThread:274 [sender.py:send():379] send: config
56
- 2024-04-10 21:51:17,791 DEBUG SenderThread:274 [sender.py:send():379] send: metric
57
- 2024-04-10 21:51:17,792 DEBUG SenderThread:274 [sender.py:send():379] send: telemetry
58
- 2024-04-10 21:51:17,792 DEBUG SenderThread:274 [sender.py:send():379] send: metric
59
- 2024-04-10 21:51:17,792 WARNING SenderThread:274 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
60
- 2024-04-10 21:51:17,792 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
61
- 2024-04-10 21:51:17,793 DEBUG SenderThread:274 [sender.py:send():379] send: telemetry
62
- 2024-04-10 21:51:17,793 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
63
- 2024-04-10 21:51:17,793 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
64
- 2024-04-10 21:51:18,676 INFO Thread-12 :274 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/requirements.txt
65
- 2024-04-10 21:51:18,676 INFO Thread-12 :274 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
66
- 2024-04-10 21:51:20,677 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
67
- 2024-04-10 21:51:22,163 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
68
- 2024-04-10 21:51:22,849 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: pause
69
- 2024-04-10 21:51:22,849 INFO HandlerThread:274 [handler.py:handle_request_pause():708] stopping system metrics thread
70
- 2024-04-10 21:51:22,849 INFO HandlerThread:274 [system_monitor.py:finish():203] Stopping system monitor
71
- 2024-04-10 21:51:22,849 DEBUG SystemMonitor:274 [system_monitor.py:_start():172] Starting system metrics aggregation loop
72
- 2024-04-10 21:51:22,849 DEBUG SystemMonitor:274 [system_monitor.py:_start():179] Finished system metrics aggregation loop
73
- 2024-04-10 21:51:22,849 DEBUG SystemMonitor:274 [system_monitor.py:_start():183] Publishing last batch of metrics
74
- 2024-04-10 21:51:22,850 INFO HandlerThread:274 [interfaces.py:finish():202] Joined cpu monitor
75
- 2024-04-10 21:51:22,850 INFO HandlerThread:274 [interfaces.py:finish():202] Joined disk monitor
76
- 2024-04-10 21:51:22,861 INFO HandlerThread:274 [interfaces.py:finish():202] Joined gpu monitor
77
- 2024-04-10 21:51:22,861 INFO HandlerThread:274 [interfaces.py:finish():202] Joined memory monitor
78
- 2024-04-10 21:51:22,861 INFO HandlerThread:274 [interfaces.py:finish():202] Joined network monitor
79
- 2024-04-10 21:51:22,861 DEBUG SenderThread:274 [sender.py:send():379] send: stats
80
- 2024-04-10 21:51:27,862 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
81
- 2024-04-10 21:51:32,778 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
82
- 2024-04-10 21:51:32,779 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
83
- 2024-04-10 21:51:32,779 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
84
- 2024-04-10 21:51:33,855 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
85
- 2024-04-10 21:51:34,682 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/config.yaml
86
- 2024-04-10 21:51:38,978 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
87
- 2024-04-10 21:51:43,979 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
88
- 2024-04-10 21:51:47,777 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
89
- 2024-04-10 21:51:47,778 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
90
- 2024-04-10 21:51:47,817 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
91
- 2024-04-10 21:51:49,947 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
92
- 2024-04-10 21:51:54,947 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
93
- 2024-04-10 21:51:59,948 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
94
- 2024-04-10 21:52:02,777 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
95
- 2024-04-10 21:52:02,778 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
96
- 2024-04-10 21:52:02,818 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
97
- 2024-04-10 21:52:05,863 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
98
- 2024-04-10 21:52:10,864 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
99
- 2024-04-10 21:52:15,865 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
100
- 2024-04-10 21:52:17,777 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
101
- 2024-04-10 21:52:17,778 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
102
- 2024-04-10 21:52:17,818 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
103
- 2024-04-10 21:52:20,900 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
104
- 2024-04-10 21:52:25,901 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
105
- 2024-04-10 21:52:30,902 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
106
- 2024-04-10 21:52:32,777 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
107
- 2024-04-10 21:52:32,778 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
108
- 2024-04-10 21:52:32,818 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
109
- 2024-04-10 21:52:36,843 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
110
- 2024-04-10 21:52:40,759 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: resume
111
- 2024-04-10 21:52:40,759 INFO HandlerThread:274 [handler.py:handle_request_resume():699] starting system metrics thread
112
- 2024-04-10 21:52:40,759 INFO HandlerThread:274 [system_monitor.py:start():194] Starting system monitor
113
- 2024-04-10 21:52:40,759 INFO SystemMonitor:274 [system_monitor.py:_start():158] Starting system asset monitoring threads
114
- 2024-04-10 21:52:40,760 INFO SystemMonitor:274 [interfaces.py:start():190] Started cpu monitoring
115
- 2024-04-10 21:52:40,761 INFO SystemMonitor:274 [interfaces.py:start():190] Started disk monitoring
116
- 2024-04-10 21:52:40,762 INFO SystemMonitor:274 [interfaces.py:start():190] Started gpu monitoring
117
- 2024-04-10 21:52:40,763 INFO SystemMonitor:274 [interfaces.py:start():190] Started memory monitoring
118
- 2024-04-10 21:52:40,764 INFO SystemMonitor:274 [interfaces.py:start():190] Started network monitoring
119
- 2024-04-10 21:52:41,844 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
120
- 2024-04-10 21:52:46,708 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
121
- 2024-04-10 21:52:47,665 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
122
- 2024-04-10 21:52:47,789 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
123
- 2024-04-10 21:52:47,789 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
124
- 2024-04-10 21:52:47,829 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
125
- 2024-04-10 21:52:48,709 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
126
- 2024-04-10 21:52:52,915 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
127
- 2024-04-10 21:52:57,916 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
128
- 2024-04-10 21:53:02,783 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
129
- 2024-04-10 21:53:02,784 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
130
- 2024-04-10 21:53:02,784 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
131
- 2024-04-10 21:53:04,789 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
132
- 2024-04-10 21:53:09,789 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
133
- 2024-04-10 21:53:14,790 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
134
- 2024-04-10 21:53:17,800 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
135
- 2024-04-10 21:53:17,802 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
136
- 2024-04-10 21:53:17,802 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
137
- 2024-04-10 21:53:19,871 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
138
- 2024-04-10 21:53:24,874 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
139
- 2024-04-10 21:53:29,875 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
140
- 2024-04-10 21:53:32,801 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
141
- 2024-04-10 21:53:32,801 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
142
- 2024-04-10 21:53:32,802 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
143
- 2024-04-10 21:53:34,952 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
144
- 2024-04-10 21:53:39,953 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
145
- 2024-04-10 21:53:40,767 DEBUG SystemMonitor:274 [system_monitor.py:_start():172] Starting system metrics aggregation loop
146
- 2024-04-10 21:53:40,768 DEBUG SenderThread:274 [sender.py:send():379] send: stats
147
- 2024-04-10 21:53:45,770 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
148
- 2024-04-10 21:53:48,058 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
149
- 2024-04-10 21:53:48,058 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
150
- 2024-04-10 21:53:48,106 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
151
- 2024-04-10 21:53:51,112 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
152
- 2024-04-10 21:53:56,113 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
153
- 2024-04-10 21:54:01,113 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
154
- 2024-04-10 21:54:03,034 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
155
- 2024-04-10 21:54:03,035 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
156
- 2024-04-10 21:54:03,075 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
157
- 2024-04-10 21:54:06,135 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
158
- 2024-04-10 21:54:10,769 DEBUG SenderThread:274 [sender.py:send():379] send: stats
159
- 2024-04-10 21:54:11,770 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
160
- 2024-04-10 21:54:16,771 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
161
- 2024-04-10 21:54:18,045 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
162
- 2024-04-10 21:54:18,045 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
163
- 2024-04-10 21:54:18,087 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
164
- 2024-04-10 21:54:22,200 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
165
- 2024-04-10 21:54:27,201 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
166
- 2024-04-10 21:54:32,202 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
167
- 2024-04-10 21:54:33,045 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
168
- 2024-04-10 21:54:33,045 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
169
- 2024-04-10 21:54:33,085 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
170
- 2024-04-10 21:54:37,214 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
171
- 2024-04-10 21:54:40,770 DEBUG SenderThread:274 [sender.py:send():379] send: stats
172
- 2024-04-10 21:54:42,771 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
173
- 2024-04-10 21:54:47,772 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
174
- 2024-04-10 21:54:48,045 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
175
- 2024-04-10 21:54:48,046 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
176
- 2024-04-10 21:54:48,087 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
177
- 2024-04-10 21:54:53,167 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
178
- 2024-04-10 21:54:58,167 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
179
- 2024-04-10 21:55:03,168 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
180
- 2024-04-10 21:55:04,522 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
181
- 2024-04-10 21:55:04,523 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
182
- 2024-04-10 21:55:04,524 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
183
- 2024-04-10 21:55:08,693 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
184
- 2024-04-10 21:55:10,771 DEBUG SenderThread:274 [sender.py:send():379] send: stats
185
- 2024-04-10 21:55:13,772 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
186
- 2024-04-10 21:55:18,288 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
187
- 2024-04-10 21:55:18,288 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
188
- 2024-04-10 21:55:18,399 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
189
- 2024-04-10 21:55:19,387 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
190
- 2024-04-10 21:55:24,388 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
191
- 2024-04-10 21:55:29,389 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
192
- 2024-04-10 21:55:33,895 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
193
- 2024-04-10 21:55:33,895 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
194
- 2024-04-10 21:55:33,896 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
195
- 2024-04-10 21:55:34,984 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
196
- 2024-04-10 21:55:39,985 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
197
- 2024-04-10 21:55:40,772 DEBUG SenderThread:274 [sender.py:send():379] send: stats
198
- 2024-04-10 21:55:45,774 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
199
- 2024-04-10 21:55:48,531 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
200
- 2024-04-10 21:55:48,581 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
201
- 2024-04-10 21:55:48,582 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
202
- 2024-04-10 21:55:50,895 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
203
- 2024-04-10 21:55:55,897 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
204
- 2024-04-10 21:56:00,897 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
205
- 2024-04-10 21:56:03,531 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
206
- 2024-04-10 21:56:03,581 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
207
- 2024-04-10 21:56:03,582 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
208
- 2024-04-10 21:56:06,895 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
209
- 2024-04-10 21:56:10,774 DEBUG SenderThread:274 [sender.py:send():379] send: stats
210
- 2024-04-10 21:56:12,775 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
211
- 2024-04-10 21:56:17,775 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
212
- 2024-04-10 21:56:18,531 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
213
- 2024-04-10 21:56:18,581 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
214
- 2024-04-10 21:56:18,582 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
215
- 2024-04-10 21:56:22,895 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
216
- 2024-04-10 21:56:27,896 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
217
- 2024-04-10 21:56:32,898 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
218
- 2024-04-10 21:56:33,722 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
219
- 2024-04-10 21:56:33,729 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
220
- 2024-04-10 21:56:33,729 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
221
- 2024-04-10 21:56:38,159 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
222
- 2024-04-10 21:56:40,775 DEBUG SenderThread:274 [sender.py:send():379] send: stats
223
- 2024-04-10 21:56:43,776 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
224
- 2024-04-10 21:56:48,546 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
225
- 2024-04-10 21:56:48,585 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
226
- 2024-04-10 21:56:48,586 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
227
- 2024-04-10 21:56:49,717 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
228
- 2024-04-10 21:56:55,577 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
229
- 2024-04-10 21:57:00,578 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
230
- 2024-04-10 21:57:03,546 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
231
- 2024-04-10 21:57:03,586 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
232
- 2024-04-10 21:57:03,586 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
233
- 2024-04-10 21:57:05,636 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
234
- 2024-04-10 21:57:10,637 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
235
- 2024-04-10 21:57:10,776 DEBUG SenderThread:274 [sender.py:send():379] send: stats
236
- 2024-04-10 21:57:15,777 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
237
- 2024-04-10 21:57:18,547 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
238
- 2024-04-10 21:57:18,586 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
239
- 2024-04-10 21:57:18,586 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
240
- 2024-04-10 21:57:21,585 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
241
- 2024-04-10 21:57:26,586 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
242
- 2024-04-10 21:57:31,587 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
243
- 2024-04-10 21:57:33,567 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
244
- 2024-04-10 21:57:33,588 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
245
- 2024-04-10 21:57:33,589 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
246
- 2024-04-10 21:57:36,605 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
247
- 2024-04-10 21:57:40,777 DEBUG SenderThread:274 [sender.py:send():379] send: stats
248
- 2024-04-10 21:57:41,778 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
249
- 2024-04-10 21:57:46,779 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
250
- 2024-04-10 21:57:48,567 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
251
- 2024-04-10 21:57:48,588 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
252
- 2024-04-10 21:57:48,589 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
253
- 2024-04-10 21:57:52,586 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
254
- 2024-04-10 21:57:57,587 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
255
- 2024-04-10 21:58:02,588 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
256
- 2024-04-10 21:58:03,567 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
257
- 2024-04-10 21:58:03,588 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
258
- 2024-04-10 21:58:03,589 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
259
- 2024-04-10 21:58:08,587 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
260
- 2024-04-10 21:58:10,778 DEBUG SenderThread:274 [sender.py:send():379] send: stats
261
- 2024-04-10 21:58:13,779 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
262
- 2024-04-10 21:58:18,567 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
263
- 2024-04-10 21:58:18,588 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
264
- 2024-04-10 21:58:18,589 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
265
- 2024-04-10 21:58:19,703 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
266
- 2024-04-10 21:58:25,586 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
267
- 2024-04-10 21:58:30,587 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
268
- 2024-04-10 21:58:33,567 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
269
- 2024-04-10 21:58:33,589 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
270
- 2024-04-10 21:58:33,589 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
271
- 2024-04-10 21:58:36,586 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
272
- 2024-04-10 21:58:40,779 DEBUG SenderThread:274 [sender.py:send():379] send: stats
273
- 2024-04-10 21:58:41,780 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
274
- 2024-04-10 21:58:46,780 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
275
- 2024-04-10 21:58:48,567 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
276
- 2024-04-10 21:58:48,602 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
277
- 2024-04-10 21:58:48,602 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
278
- 2024-04-10 21:58:52,596 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
279
- 2024-04-10 21:58:57,597 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
280
- 2024-04-10 21:59:02,598 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
281
- 2024-04-10 21:59:03,568 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
282
- 2024-04-10 21:59:03,589 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
283
- 2024-04-10 21:59:03,589 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
284
- 2024-04-10 21:59:08,596 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
285
- 2024-04-10 21:59:10,780 DEBUG SenderThread:274 [sender.py:send():379] send: stats
286
- 2024-04-10 21:59:13,781 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
287
- 2024-04-10 21:59:18,568 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
288
- 2024-04-10 21:59:18,589 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
289
- 2024-04-10 21:59:18,590 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
290
- 2024-04-10 21:59:19,663 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
291
- 2024-04-10 21:59:25,597 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
292
- 2024-04-10 21:59:30,598 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
293
- 2024-04-10 21:59:33,568 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
294
- 2024-04-10 21:59:33,589 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
295
- 2024-04-10 21:59:33,590 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
296
- 2024-04-10 21:59:35,711 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
297
- 2024-04-10 21:59:40,781 DEBUG SenderThread:274 [sender.py:send():379] send: stats
298
- 2024-04-10 21:59:40,783 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
299
- 2024-04-10 21:59:45,783 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
300
- 2024-04-10 21:59:48,568 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
301
- 2024-04-10 21:59:48,589 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
302
- 2024-04-10 21:59:48,590 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
303
- 2024-04-10 21:59:51,602 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
304
- 2024-04-10 21:59:56,604 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
305
- 2024-04-10 22:00:01,605 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
306
- 2024-04-10 22:00:03,568 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
307
- 2024-04-10 22:00:03,589 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
308
- 2024-04-10 22:00:03,590 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
309
- 2024-04-10 22:00:06,608 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
310
- 2024-04-10 22:00:10,782 DEBUG SenderThread:274 [sender.py:send():379] send: stats
311
- 2024-04-10 22:00:11,783 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
312
- 2024-04-10 22:00:16,784 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
313
- 2024-04-10 22:00:18,568 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
314
- 2024-04-10 22:00:18,590 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
315
- 2024-04-10 22:00:18,590 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
316
- 2024-04-10 22:00:22,603 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
317
- 2024-04-10 22:00:27,604 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
318
- 2024-04-10 22:00:32,605 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
319
- 2024-04-10 22:00:33,584 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
320
- 2024-04-10 22:00:33,614 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
321
- 2024-04-10 22:00:33,615 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
322
- 2024-04-10 22:00:38,603 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
323
- 2024-04-10 22:00:40,783 DEBUG SenderThread:274 [sender.py:send():379] send: stats
324
- 2024-04-10 22:00:44,504 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
325
- 2024-04-10 22:00:44,558 DEBUG SenderThread:274 [sender.py:send():379] send: config
326
- 2024-04-10 22:00:44,560 DEBUG SenderThread:274 [sender.py:send():379] send: metric
327
- 2024-04-10 22:00:44,560 DEBUG SenderThread:274 [sender.py:send():379] send: metric
328
- 2024-04-10 22:00:44,560 WARNING SenderThread:274 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
329
- 2024-04-10 22:00:44,919 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
330
- 2024-04-10 22:00:46,920 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
331
- 2024-04-10 22:00:48,585 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
332
- 2024-04-10 22:00:48,615 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
333
- 2024-04-10 22:00:48,615 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
334
- 2024-04-10 22:00:48,921 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
335
- 2024-04-10 22:00:49,760 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
336
- 2024-04-10 22:00:55,604 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
337
- 2024-04-10 22:00:56,923 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
338
- 2024-04-10 22:00:58,924 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
339
- 2024-04-10 22:01:00,664 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
340
- 2024-04-10 22:01:03,585 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
341
- 2024-04-10 22:01:03,615 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
342
- 2024-04-10 22:01:03,615 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
343
- 2024-04-10 22:01:05,691 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
344
- 2024-04-10 22:01:05,927 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/config.yaml
345
- 2024-04-10 22:01:08,928 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
346
- 2024-04-10 22:01:10,784 DEBUG SenderThread:274 [sender.py:send():379] send: stats
347
- 2024-04-10 22:01:11,785 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
348
- 2024-04-10 22:01:16,786 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
349
- 2024-04-10 22:01:18,585 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
350
- 2024-04-10 22:01:18,615 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
351
- 2024-04-10 22:01:18,616 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
352
- 2024-04-10 22:01:22,656 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
353
- 2024-04-10 22:01:27,657 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
354
- 2024-04-10 22:01:32,658 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
355
- 2024-04-10 22:01:33,585 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
356
- 2024-04-10 22:01:33,615 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
357
- 2024-04-10 22:01:33,615 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
358
- 2024-04-10 22:01:38,656 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
359
- 2024-04-10 22:01:40,785 DEBUG SenderThread:274 [sender.py:send():379] send: stats
360
- 2024-04-10 22:01:43,786 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
361
- 2024-04-10 22:01:48,588 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
362
- 2024-04-10 22:01:48,615 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
363
- 2024-04-10 22:01:48,615 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
364
- 2024-04-10 22:01:49,779 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
365
- 2024-04-10 22:01:54,904 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: partial_history
366
- 2024-04-10 22:01:54,907 DEBUG SenderThread:274 [sender.py:send():379] send: metric
367
- 2024-04-10 22:01:54,907 DEBUG SenderThread:274 [sender.py:send():379] send: metric
368
- 2024-04-10 22:01:54,907 DEBUG SenderThread:274 [sender.py:send():379] send: metric
369
- 2024-04-10 22:01:54,907 DEBUG SenderThread:274 [sender.py:send():379] send: metric
370
- 2024-04-10 22:01:54,907 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
371
- 2024-04-10 22:01:54,907 DEBUG SenderThread:274 [sender.py:send():379] send: history
372
- 2024-04-10 22:01:54,908 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: summary_record
373
- 2024-04-10 22:01:54,908 INFO SenderThread:274 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
374
- 2024-04-10 22:01:54,945 INFO Thread-12 :274 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/wandb-summary.json
375
- 2024-04-10 22:01:56,946 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
376
- 2024-04-10 22:01:57,338 DEBUG SenderThread:274 [sender.py:send():379] send: telemetry
377
- 2024-04-10 22:01:57,338 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: summary_record
378
- 2024-04-10 22:01:57,340 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: partial_history
379
- 2024-04-10 22:01:57,341 INFO SenderThread:274 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
380
- 2024-04-10 22:01:57,342 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: summary_record
381
- 2024-04-10 22:01:57,342 INFO SenderThread:274 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
382
- 2024-04-10 22:01:57,342 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: summary_record
383
- 2024-04-10 22:01:57,343 INFO SenderThread:274 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
384
- 2024-04-10 22:01:57,343 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: summary_record
385
- 2024-04-10 22:01:57,343 INFO SenderThread:274 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
386
- 2024-04-10 22:01:57,344 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: summary_record
387
- 2024-04-10 22:01:57,344 INFO SenderThread:274 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
388
- 2024-04-10 22:01:57,344 DEBUG SenderThread:274 [sender.py:send():379] send: history
389
- 2024-04-10 22:01:57,345 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: summary_record
390
- 2024-04-10 22:01:57,345 INFO SenderThread:274 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
391
- 2024-04-10 22:01:57,946 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/wandb-summary.json
392
- 2024-04-10 22:01:58,946 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/output.log
393
- 2024-04-10 22:02:00,489 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
394
- 2024-04-10 22:02:03,585 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
395
- 2024-04-10 22:02:03,615 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
396
- 2024-04-10 22:02:03,616 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
397
- 2024-04-10 22:02:05,656 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
398
- 2024-04-10 22:02:10,662 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
399
- 2024-04-10 22:02:10,786 DEBUG SenderThread:274 [sender.py:send():379] send: stats
400
- 2024-04-10 22:02:10,950 INFO Thread-12 :274 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_215101-ta1pahei/files/config.yaml
401
- 2024-04-10 22:02:15,787 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
402
- 2024-04-10 22:02:18,586 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: internal_messages
403
- 2024-04-10 22:02:18,615 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: stop_status
404
- 2024-04-10 22:02:18,616 DEBUG SenderThread:274 [sender.py:send_request():406] send_request: stop_status
405
- 2024-04-10 22:02:21,656 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
406
- 2024-04-10 22:02:26,657 DEBUG HandlerThread:274 [handler.py:handle_request():146] handle_request: status_report
 
1
+ 2024-04-11 01:01:03,983 INFO StreamThr :138 [internal.py:wandb_internal():86] W&B internal server running at pid: 138, started at: 2024-04-11 01:01:03.982415
2
+ 2024-04-11 01:01:03,984 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-11 01:01:04,351 INFO WriterThread:138 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/run-4b3fzolv.wandb
4
+ 2024-04-11 01:01:04,352 DEBUG SenderThread:138 [sender.py:send():379] send: header
5
+ 2024-04-11 01:01:04,355 DEBUG SenderThread:138 [sender.py:send():379] send: run
6
+ 2024-04-11 01:01:04,503 INFO SenderThread:138 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files
7
+ 2024-04-11 01:01:04,503 INFO SenderThread:138 [sender.py:_start_run_threads():1124] run started: 4b3fzolv with start time 1712797263.984466
8
+ 2024-04-11 01:01:04,511 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-11 01:01:04,511 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-11 01:01:04,599 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-11 01:01:04,610 DEBUG HandlerThread:138 [system_info.py:__init__():26] System info init
12
+ 2024-04-11 01:01:04,610 DEBUG HandlerThread:138 [system_info.py:__init__():41] System info init done
13
+ 2024-04-11 01:01:04,610 INFO HandlerThread:138 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-11 01:01:04,610 INFO SystemMonitor:138 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-11 01:01:04,610 INFO HandlerThread:138 [system_monitor.py:probe():214] Collecting system info
16
+ 2024-04-11 01:01:04,611 INFO SystemMonitor:138 [interfaces.py:start():190] Started cpu monitoring
17
+ 2024-04-11 01:01:04,611 INFO SystemMonitor:138 [interfaces.py:start():190] Started disk monitoring
18
+ 2024-04-11 01:01:04,612 INFO SystemMonitor:138 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-11 01:01:04,613 INFO SystemMonitor:138 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-11 01:01:04,614 INFO SystemMonitor:138 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-11 01:01:04,623 DEBUG HandlerThread:138 [system_info.py:probe():150] Probing system
22
+ 2024-04-11 01:01:04,625 DEBUG HandlerThread:138 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-11 01:01:04,625 DEBUG HandlerThread:138 [system_info.py:probe():198] Probing system done
24
+ 2024-04-11 01:01:04,626 DEBUG HandlerThread:138 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-11T01:01:04.623823', 'startedAt': '2024-04-11T01:01:03.976173', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': 'c072b7c9e487', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.188, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.188, 'min': 0.0, 'max': 0.0}, {'current': 2000.188, 'min': 0.0, 'max': 0.0}, {'current': 2000.188, 'min': 0.0, 'max': 0.0}, {'current': 2000.188, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5566.689571380615}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-11 01:01:04,626 INFO HandlerThread:138 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-11 01:01:04,626 INFO HandlerThread:138 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-11 01:01:04,626 DEBUG HandlerThread:138 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-11 01:01:05,505 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/conda-environment.yaml
29
+ 2024-04-11 01:01:19,640 ERROR HandlerThread:138 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
  Traceback (most recent call last):
31
  File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
  subprocess.call(
 
37
  File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
  raise TimeoutExpired(self.args, timeout)
39
  subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-11 01:01:19,643 DEBUG HandlerThread:138 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-11 01:01:19,644 INFO HandlerThread:138 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-11 01:01:19,652 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-11 01:01:19,652 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-11 01:01:19,652 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-11 01:01:19,653 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-11 01:01:19,653 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-11 01:01:19,653 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: keepalive
48
+ 2024-04-11 01:01:19,653 DEBUG SenderThread:138 [sender.py:send():379] send: files
49
+ 2024-04-11 01:01:19,653 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
+ 2024-04-11 01:01:19,933 INFO wandb-upload_0:138 [upload_job.py:push():131] Uploaded file /tmp/tmpiqqv1dwfwandb/3d55vshp-wandb-metadata.json
51
+ 2024-04-11 01:01:20,508 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/wandb-metadata.json
52
+ 2024-04-11 01:01:20,599 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: python_packages
53
+ 2024-04-11 01:01:20,599 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: python_packages
54
+ 2024-04-11 01:01:20,602 DEBUG SenderThread:138 [sender.py:send():379] send: telemetry
55
+ 2024-04-11 01:01:20,613 DEBUG SenderThread:138 [sender.py:send():379] send: config
56
+ 2024-04-11 01:01:20,615 DEBUG SenderThread:138 [sender.py:send():379] send: metric
57
+ 2024-04-11 01:01:20,615 DEBUG SenderThread:138 [sender.py:send():379] send: telemetry
58
+ 2024-04-11 01:01:20,615 DEBUG SenderThread:138 [sender.py:send():379] send: metric
59
+ 2024-04-11 01:01:20,615 WARNING SenderThread:138 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
60
+ 2024-04-11 01:01:20,616 DEBUG SenderThread:138 [sender.py:send():379] send: telemetry
61
+ 2024-04-11 01:01:20,616 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
62
+ 2024-04-11 01:01:20,616 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
63
+ 2024-04-11 01:01:20,617 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
64
+ 2024-04-11 01:01:21,508 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/requirements.txt
65
+ 2024-04-11 01:01:21,509 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
66
+ 2024-04-11 01:01:23,509 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
67
+ 2024-04-11 01:01:25,510 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
68
+ 2024-04-11 01:01:25,513 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
69
+ 2024-04-11 01:01:27,511 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
70
+ 2024-04-11 01:01:30,790 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
71
+ 2024-04-11 01:01:35,602 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
72
+ 2024-04-11 01:01:35,603 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
73
+ 2024-04-11 01:01:35,604 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
74
+ 2024-04-11 01:01:36,648 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
75
+ 2024-04-11 01:01:37,515 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/config.yaml
76
+ 2024-04-11 01:01:41,766 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
77
+ 2024-04-11 01:01:46,767 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
78
+ 2024-04-11 01:01:50,600 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
79
+ 2024-04-11 01:01:50,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
80
+ 2024-04-11 01:01:50,640 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
81
+ 2024-04-11 01:01:52,707 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
82
+ 2024-04-11 01:01:57,708 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
83
+ 2024-04-11 01:02:02,709 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
84
+ 2024-04-11 01:02:04,614 DEBUG SystemMonitor:138 [system_monitor.py:_start():172] Starting system metrics aggregation loop
85
+ 2024-04-11 01:02:04,616 DEBUG SenderThread:138 [sender.py:send():379] send: stats
86
+ 2024-04-11 01:02:05,600 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
87
+ 2024-04-11 01:02:05,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
88
+ 2024-04-11 01:02:05,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
89
+ 2024-04-11 01:02:08,651 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
90
+ 2024-04-11 01:02:13,651 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
91
+ 2024-04-11 01:02:18,652 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
92
+ 2024-04-11 01:02:20,600 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
93
+ 2024-04-11 01:02:20,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
94
+ 2024-04-11 01:02:20,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
95
+ 2024-04-11 01:02:23,671 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
96
+ 2024-04-11 01:02:28,672 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
97
+ 2024-04-11 01:02:33,673 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
98
+ 2024-04-11 01:02:34,617 DEBUG SenderThread:138 [sender.py:send():379] send: stats
99
+ 2024-04-11 01:02:35,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
100
+ 2024-04-11 01:02:35,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
101
+ 2024-04-11 01:02:35,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
102
+ 2024-04-11 01:02:39,669 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
103
+ 2024-04-11 01:02:44,669 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
104
+ 2024-04-11 01:02:49,670 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
105
+ 2024-04-11 01:02:50,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
106
+ 2024-04-11 01:02:50,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
107
+ 2024-04-11 01:02:50,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
108
+ 2024-04-11 01:02:54,683 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
109
+ 2024-04-11 01:02:59,684 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
110
+ 2024-04-11 01:03:04,618 DEBUG SenderThread:138 [sender.py:send():379] send: stats
111
+ 2024-04-11 01:03:05,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
112
+ 2024-04-11 01:03:05,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
113
+ 2024-04-11 01:03:05,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
114
+ 2024-04-11 01:03:05,668 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
115
+ 2024-04-11 01:03:10,669 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
116
+ 2024-04-11 01:03:15,670 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
117
+ 2024-04-11 01:03:20,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
118
+ 2024-04-11 01:03:20,602 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
119
+ 2024-04-11 01:03:20,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
120
+ 2024-04-11 01:03:21,658 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
121
+ 2024-04-11 01:03:26,659 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
122
+ 2024-04-11 01:03:31,660 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
123
+ 2024-04-11 01:03:34,619 DEBUG SenderThread:138 [sender.py:send():379] send: stats
124
+ 2024-04-11 01:03:35,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
125
+ 2024-04-11 01:03:35,602 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
126
+ 2024-04-11 01:03:35,642 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
127
+ 2024-04-11 01:03:36,661 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
128
+ 2024-04-11 01:03:37,017 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: partial_history
129
+ 2024-04-11 01:03:37,019 DEBUG SenderThread:138 [sender.py:send():379] send: metric
130
+ 2024-04-11 01:03:37,019 DEBUG SenderThread:138 [sender.py:send():379] send: metric
131
+ 2024-04-11 01:03:37,019 DEBUG SenderThread:138 [sender.py:send():379] send: metric
132
+ 2024-04-11 01:03:37,020 DEBUG SenderThread:138 [sender.py:send():379] send: metric
133
+ 2024-04-11 01:03:37,020 DEBUG SenderThread:138 [sender.py:send():379] send: history
134
+ 2024-04-11 01:03:37,020 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
135
+ 2024-04-11 01:03:37,020 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
136
+ 2024-04-11 01:03:37,559 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/wandb-summary.json
137
+ 2024-04-11 01:03:38,738 DEBUG SenderThread:138 [sender.py:send():379] send: telemetry
138
+ 2024-04-11 01:03:38,738 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
139
+ 2024-04-11 01:03:38,739 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: partial_history
140
+ 2024-04-11 01:03:38,741 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
141
+ 2024-04-11 01:03:38,741 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
142
+ 2024-04-11 01:03:38,741 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
143
+ 2024-04-11 01:03:38,742 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
144
+ 2024-04-11 01:03:38,742 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
145
+ 2024-04-11 01:03:38,742 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
146
+ 2024-04-11 01:03:38,743 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
147
+ 2024-04-11 01:03:38,743 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
148
+ 2024-04-11 01:03:38,743 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
149
+ 2024-04-11 01:03:38,744 DEBUG SenderThread:138 [sender.py:send():379] send: history
150
+ 2024-04-11 01:03:38,744 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
151
+ 2024-04-11 01:03:38,744 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
152
+ 2024-04-11 01:03:39,559 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/wandb-summary.json
153
+ 2024-04-11 01:03:39,560 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
154
+ 2024-04-11 01:03:41,560 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
155
+ 2024-04-11 01:03:41,871 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
156
+ 2024-04-11 01:03:42,561 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/config.yaml
157
+ 2024-04-11 01:03:46,983 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
158
+ 2024-04-11 01:03:50,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
159
+ 2024-04-11 01:03:50,602 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
160
+ 2024-04-11 01:03:50,604 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
161
+ 2024-04-11 01:03:52,705 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
162
+ 2024-04-11 01:03:57,706 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
wandb/debug.log CHANGED
@@ -1,36 +1,32 @@
1
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_setup.py:_flush():76] Configure stats pid to 227
3
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_setup.py:_flush():76] Applying login settings: {}
9
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_setup.py:_flush():76] Applying login settings: {'api_key': '***REDACTED***'}
10
- 2024-04-10 21:51:01,068 INFO MainThread:227 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240410_215101-ta1pahei/logs/debug.log
11
- 2024-04-10 21:51:01,069 INFO MainThread:227 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240410_215101-ta1pahei/logs/debug-internal.log
12
- 2024-04-10 21:51:01,069 INFO MainThread:227 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7b92ac4655a0>
13
- 2024-04-10 21:51:01,069 INFO MainThread:227 [wandb_init.py:init():567] calling init triggers
14
- 2024-04-10 21:51:01,069 INFO MainThread:227 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
15
  config: {}
16
- 2024-04-10 21:51:01,069 INFO MainThread:227 [wandb_init.py:init():617] starting backend
17
- 2024-04-10 21:51:01,069 INFO MainThread:227 [wandb_init.py:init():621] setting up manager
18
- 2024-04-10 21:51:01,071 INFO MainThread:227 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
19
- 2024-04-10 21:51:01,073 INFO MainThread:227 [wandb_init.py:init():629] backend started and connected
20
- 2024-04-10 21:51:01,085 INFO MainThread:227 [wandb_run.py:_label_probe_notebook():1299] probe notebook
21
- 2024-04-10 21:51:01,447 INFO MainThread:227 [wandb_init.py:init():721] updated telemetry
22
- 2024-04-10 21:51:01,458 INFO MainThread:227 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
23
- 2024-04-10 21:51:01,677 INFO MainThread:227 [wandb_run.py:_on_init():2344] communicating current version
24
- 2024-04-10 21:51:01,771 INFO MainThread:227 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
25
 
26
- 2024-04-10 21:51:01,771 INFO MainThread:227 [wandb_init.py:init():805] starting run threads in backend
27
- 2024-04-10 21:51:17,777 INFO MainThread:227 [wandb_run.py:_console_start():2323] atexit reg
28
- 2024-04-10 21:51:17,777 INFO MainThread:227 [wandb_run.py:_redirect():2178] redirect: wrap_raw
29
- 2024-04-10 21:51:17,777 INFO MainThread:227 [wandb_run.py:_redirect():2243] Wrapping output streams.
30
- 2024-04-10 21:51:17,778 INFO MainThread:227 [wandb_run.py:_redirect():2268] Redirects installed.
31
- 2024-04-10 21:51:17,779 INFO MainThread:227 [wandb_init.py:init():848] run started, returning control to user process
32
- 2024-04-10 21:51:17,784 INFO MainThread:227 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 50304, 'hidden_size': 2048, 'num_hidden_layers': 24, 'num_attention_heads': 32, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'use_cache': False, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'bos_token_id': 1, 'eos_token_id': 2, 'num_kv_heads': 32, 'alibi': True, 'new_decoder_architecture': False, 'multi_query': False, 'parallel_attn': False, 'bias': True, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'bfloat16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['FalconForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': None, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'tiiuae/falcon-rw-1b', 'transformers_version': '4.39.3', 'apply_residual_connection_post_layernorm': False, 'auto_map': {'AutoConfig': 'tiiuae/falcon-rw-1b--configuration_falcon.FalconConfig', 'AutoModel': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconModel', 'AutoModelForSequenceClassification': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconForSequenceClassification', 'AutoModelForTokenClassification': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconForTokenClassification', 'AutoModelForQuestionAnswering': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconForQuestionAnswering', 'AutoModelForCausalLM': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconForCausalLM'}, 'model_type': 'falcon', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.0001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 5, 'max_steps': 20, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr10_21-42-21_4bf7c58fba03', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_8bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
33
- 2024-04-10 21:51:22,848 INFO MainThread:227 [jupyter.py:save_ipynb():373] not saving jupyter notebook
34
- 2024-04-10 21:51:22,848 INFO MainThread:227 [wandb_init.py:_pause_backend():438] pausing backend
35
- 2024-04-10 21:52:40,758 INFO MainThread:227 [wandb_init.py:_resume_backend():443] resuming backend
36
- 2024-04-10 22:00:44,550 INFO MainThread:227 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 50304, 'hidden_size': 2048, 'num_hidden_layers': 24, 'num_attention_heads': 32, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'use_cache': False, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'bos_token_id': 1, 'eos_token_id': 2, 'num_kv_heads': 32, 'alibi': True, 'new_decoder_architecture': False, 'multi_query': False, 'parallel_attn': False, 'bias': True, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'bfloat16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['FalconForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': None, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'tiiuae/falcon-rw-1b', 'transformers_version': '4.39.3', 'apply_residual_connection_post_layernorm': False, 'auto_map': {'AutoConfig': 'tiiuae/falcon-rw-1b--configuration_falcon.FalconConfig', 'AutoModel': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconModel', 'AutoModelForSequenceClassification': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconForSequenceClassification', 'AutoModelForTokenClassification': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconForTokenClassification', 'AutoModelForQuestionAnswering': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconForQuestionAnswering', 'AutoModelForCausalLM': 'tiiuae/falcon-rw-1b--modeling_falcon.FalconForCausalLM'}, 'model_type': 'falcon', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.0001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 5, 'max_steps': 20, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr10_21-52-40_4bf7c58fba03', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_8bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
 
1
+ 2024-04-11 01:01:03,977 INFO MainThread:94 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Configure stats pid to 94
3
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Applying login settings: {'api_key': '***REDACTED***'}
10
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240411_010103-4b3fzolv/logs/debug.log
11
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240411_010103-4b3fzolv/logs/debug-internal.log
12
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7bfd48105450>
13
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_init.py:init():567] calling init triggers
14
+ 2024-04-11 01:01:03,979 INFO MainThread:94 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
15
  config: {}
16
+ 2024-04-11 01:01:03,979 INFO MainThread:94 [wandb_init.py:init():617] starting backend
17
+ 2024-04-11 01:01:03,979 INFO MainThread:94 [wandb_init.py:init():621] setting up manager
18
+ 2024-04-11 01:01:03,980 INFO MainThread:94 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
19
+ 2024-04-11 01:01:03,984 INFO MainThread:94 [wandb_init.py:init():629] backend started and connected
20
+ 2024-04-11 01:01:03,997 INFO MainThread:94 [wandb_run.py:_label_probe_notebook():1299] probe notebook
21
+ 2024-04-11 01:01:04,350 INFO MainThread:94 [wandb_init.py:init():721] updated telemetry
22
+ 2024-04-11 01:01:04,354 INFO MainThread:94 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
23
+ 2024-04-11 01:01:04,510 INFO MainThread:94 [wandb_run.py:_on_init():2344] communicating current version
24
+ 2024-04-11 01:01:04,593 INFO MainThread:94 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
25
 
26
+ 2024-04-11 01:01:04,593 INFO MainThread:94 [wandb_init.py:init():805] starting run threads in backend
27
+ 2024-04-11 01:01:20,600 INFO MainThread:94 [wandb_run.py:_console_start():2323] atexit reg
28
+ 2024-04-11 01:01:20,600 INFO MainThread:94 [wandb_run.py:_redirect():2178] redirect: wrap_raw
29
+ 2024-04-11 01:01:20,601 INFO MainThread:94 [wandb_run.py:_redirect():2243] Wrapping output streams.
30
+ 2024-04-11 01:01:20,601 INFO MainThread:94 [wandb_run.py:_redirect():2268] Redirects installed.
31
+ 2024-04-11 01:01:20,602 INFO MainThread:94 [wandb_init.py:init():848] run started, returning control to user process
32
+ 2024-04-11 01:01:20,608 INFO MainThread:94 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 50257, 'n_positions': 1024, 'n_embd': 1280, 'n_layer': 36, 'n_head': 20, 'n_inner': None, 'activation_function': 'gelu_new', 'resid_pdrop': 0.1, 'embd_pdrop': 0.1, 'attn_pdrop': 0.1, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'summary_type': 'cls_index', 'summary_use_proj': True, 'summary_activation': None, 'summary_first_dropout': 0.1, 'summary_proj_to_labels': True, 'scale_attn_weights': True, 'use_cache': False, 'scale_attn_by_inverse_layer_idx': False, 'reorder_and_upcast_attn': False, 'bos_token_id': 50256, 'eos_token_id': 50256, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': None, 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['GPT2LMHeadModel'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': None, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': {'text-generation': {'do_sample': True, 'max_length': 50}}, 'problem_type': None, '_name_or_path': 'openai-community/gpt2-large', 'transformers_version': '4.39.3', 'model_type': 'gpt2', 'n_ctx': 1024, 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.0001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 5, 'max_steps': 20, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr11_00-59-48_c072b7c9e487', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_8bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
 
 
 
 
wandb/run-20240411_010103-4b3fzolv/files/conda-environment.yaml ADDED
File without changes
wandb/run-20240411_010103-4b3fzolv/files/config.yaml ADDED
@@ -0,0 +1,708 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.10.13
7
+ cli_version: 0.16.5
8
+ framework: huggingface
9
+ huggingface_version: 4.39.3
10
+ is_jupyter_run: true
11
+ is_kaggle_kernel: true
12
+ start_time: 1712797263.0
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 84
27
+ - 98
28
+ - 105
29
+ 2:
30
+ - 1
31
+ - 2
32
+ - 3
33
+ - 5
34
+ - 11
35
+ - 12
36
+ - 49
37
+ - 51
38
+ - 53
39
+ - 55
40
+ - 71
41
+ - 84
42
+ - 98
43
+ - 105
44
+ 3:
45
+ - 7
46
+ - 23
47
+ - 62
48
+ 4: 3.10.13
49
+ 5: 0.16.5
50
+ 6: 4.39.3
51
+ 8:
52
+ - 1
53
+ - 2
54
+ - 5
55
+ 9:
56
+ 1: transformers_trainer
57
+ 13: linux-x86_64
58
+ m:
59
+ - 1: train/global_step
60
+ 6:
61
+ - 3
62
+ - 1: train/loss
63
+ 5: 1
64
+ 6:
65
+ - 1
66
+ - 1: train/grad_norm
67
+ 5: 1
68
+ 6:
69
+ - 1
70
+ - 1: train/learning_rate
71
+ 5: 1
72
+ 6:
73
+ - 1
74
+ - 1: train/epoch
75
+ 5: 1
76
+ 6:
77
+ - 1
78
+ vocab_size:
79
+ desc: null
80
+ value: 50257
81
+ n_positions:
82
+ desc: null
83
+ value: 1024
84
+ n_embd:
85
+ desc: null
86
+ value: 1280
87
+ n_layer:
88
+ desc: null
89
+ value: 36
90
+ n_head:
91
+ desc: null
92
+ value: 20
93
+ n_inner:
94
+ desc: null
95
+ value: null
96
+ activation_function:
97
+ desc: null
98
+ value: gelu_new
99
+ resid_pdrop:
100
+ desc: null
101
+ value: 0.1
102
+ embd_pdrop:
103
+ desc: null
104
+ value: 0.1
105
+ attn_pdrop:
106
+ desc: null
107
+ value: 0.1
108
+ layer_norm_epsilon:
109
+ desc: null
110
+ value: 1.0e-05
111
+ initializer_range:
112
+ desc: null
113
+ value: 0.02
114
+ summary_type:
115
+ desc: null
116
+ value: cls_index
117
+ summary_use_proj:
118
+ desc: null
119
+ value: true
120
+ summary_activation:
121
+ desc: null
122
+ value: null
123
+ summary_first_dropout:
124
+ desc: null
125
+ value: 0.1
126
+ summary_proj_to_labels:
127
+ desc: null
128
+ value: true
129
+ scale_attn_weights:
130
+ desc: null
131
+ value: true
132
+ use_cache:
133
+ desc: null
134
+ value: false
135
+ scale_attn_by_inverse_layer_idx:
136
+ desc: null
137
+ value: false
138
+ reorder_and_upcast_attn:
139
+ desc: null
140
+ value: false
141
+ bos_token_id:
142
+ desc: null
143
+ value: 50256
144
+ eos_token_id:
145
+ desc: null
146
+ value: 50256
147
+ return_dict:
148
+ desc: null
149
+ value: true
150
+ output_hidden_states:
151
+ desc: null
152
+ value: false
153
+ output_attentions:
154
+ desc: null
155
+ value: false
156
+ torchscript:
157
+ desc: null
158
+ value: false
159
+ torch_dtype:
160
+ desc: null
161
+ value: null
162
+ use_bfloat16:
163
+ desc: null
164
+ value: false
165
+ tf_legacy_loss:
166
+ desc: null
167
+ value: false
168
+ pruned_heads:
169
+ desc: null
170
+ value: {}
171
+ tie_word_embeddings:
172
+ desc: null
173
+ value: true
174
+ chunk_size_feed_forward:
175
+ desc: null
176
+ value: 0
177
+ is_encoder_decoder:
178
+ desc: null
179
+ value: false
180
+ is_decoder:
181
+ desc: null
182
+ value: false
183
+ cross_attention_hidden_size:
184
+ desc: null
185
+ value: null
186
+ add_cross_attention:
187
+ desc: null
188
+ value: false
189
+ tie_encoder_decoder:
190
+ desc: null
191
+ value: false
192
+ max_length:
193
+ desc: null
194
+ value: 20
195
+ min_length:
196
+ desc: null
197
+ value: 0
198
+ do_sample:
199
+ desc: null
200
+ value: false
201
+ early_stopping:
202
+ desc: null
203
+ value: false
204
+ num_beams:
205
+ desc: null
206
+ value: 1
207
+ num_beam_groups:
208
+ desc: null
209
+ value: 1
210
+ diversity_penalty:
211
+ desc: null
212
+ value: 0.0
213
+ temperature:
214
+ desc: null
215
+ value: 1.0
216
+ top_k:
217
+ desc: null
218
+ value: 50
219
+ top_p:
220
+ desc: null
221
+ value: 1.0
222
+ typical_p:
223
+ desc: null
224
+ value: 1.0
225
+ repetition_penalty:
226
+ desc: null
227
+ value: 1.0
228
+ length_penalty:
229
+ desc: null
230
+ value: 1.0
231
+ no_repeat_ngram_size:
232
+ desc: null
233
+ value: 0
234
+ encoder_no_repeat_ngram_size:
235
+ desc: null
236
+ value: 0
237
+ bad_words_ids:
238
+ desc: null
239
+ value: null
240
+ num_return_sequences:
241
+ desc: null
242
+ value: 1
243
+ output_scores:
244
+ desc: null
245
+ value: false
246
+ return_dict_in_generate:
247
+ desc: null
248
+ value: false
249
+ forced_bos_token_id:
250
+ desc: null
251
+ value: null
252
+ forced_eos_token_id:
253
+ desc: null
254
+ value: null
255
+ remove_invalid_values:
256
+ desc: null
257
+ value: false
258
+ exponential_decay_length_penalty:
259
+ desc: null
260
+ value: null
261
+ suppress_tokens:
262
+ desc: null
263
+ value: null
264
+ begin_suppress_tokens:
265
+ desc: null
266
+ value: null
267
+ architectures:
268
+ desc: null
269
+ value:
270
+ - GPT2LMHeadModel
271
+ finetuning_task:
272
+ desc: null
273
+ value: null
274
+ id2label:
275
+ desc: null
276
+ value:
277
+ '0': LABEL_0
278
+ '1': LABEL_1
279
+ label2id:
280
+ desc: null
281
+ value:
282
+ LABEL_0: 0
283
+ LABEL_1: 1
284
+ tokenizer_class:
285
+ desc: null
286
+ value: null
287
+ prefix:
288
+ desc: null
289
+ value: null
290
+ pad_token_id:
291
+ desc: null
292
+ value: null
293
+ sep_token_id:
294
+ desc: null
295
+ value: null
296
+ decoder_start_token_id:
297
+ desc: null
298
+ value: null
299
+ task_specific_params:
300
+ desc: null
301
+ value:
302
+ text-generation:
303
+ do_sample: true
304
+ max_length: 50
305
+ problem_type:
306
+ desc: null
307
+ value: null
308
+ _name_or_path:
309
+ desc: null
310
+ value: openai-community/gpt2-large
311
+ transformers_version:
312
+ desc: null
313
+ value: 4.39.3
314
+ model_type:
315
+ desc: null
316
+ value: gpt2
317
+ n_ctx:
318
+ desc: null
319
+ value: 1024
320
+ quantization_config:
321
+ desc: null
322
+ value:
323
+ quant_method: QuantizationMethod.BITS_AND_BYTES
324
+ _load_in_8bit: false
325
+ _load_in_4bit: true
326
+ llm_int8_threshold: 6.0
327
+ llm_int8_skip_modules: null
328
+ llm_int8_enable_fp32_cpu_offload: false
329
+ llm_int8_has_fp16_weight: false
330
+ bnb_4bit_quant_type: nf4
331
+ bnb_4bit_use_double_quant: false
332
+ bnb_4bit_compute_dtype: float16
333
+ bnb_4bit_quant_storage: uint8
334
+ load_in_4bit: true
335
+ load_in_8bit: false
336
+ output_dir:
337
+ desc: null
338
+ value: /kaggle/working/
339
+ overwrite_output_dir:
340
+ desc: null
341
+ value: false
342
+ do_train:
343
+ desc: null
344
+ value: false
345
+ do_eval:
346
+ desc: null
347
+ value: false
348
+ do_predict:
349
+ desc: null
350
+ value: false
351
+ evaluation_strategy:
352
+ desc: null
353
+ value: 'no'
354
+ prediction_loss_only:
355
+ desc: null
356
+ value: false
357
+ per_device_train_batch_size:
358
+ desc: null
359
+ value: 8
360
+ per_device_eval_batch_size:
361
+ desc: null
362
+ value: 8
363
+ per_gpu_train_batch_size:
364
+ desc: null
365
+ value: null
366
+ per_gpu_eval_batch_size:
367
+ desc: null
368
+ value: null
369
+ gradient_accumulation_steps:
370
+ desc: null
371
+ value: 1
372
+ eval_accumulation_steps:
373
+ desc: null
374
+ value: null
375
+ eval_delay:
376
+ desc: null
377
+ value: 0
378
+ learning_rate:
379
+ desc: null
380
+ value: 3.0e-05
381
+ weight_decay:
382
+ desc: null
383
+ value: 0.0001
384
+ adam_beta1:
385
+ desc: null
386
+ value: 0.9
387
+ adam_beta2:
388
+ desc: null
389
+ value: 0.999
390
+ adam_epsilon:
391
+ desc: null
392
+ value: 1.0e-08
393
+ max_grad_norm:
394
+ desc: null
395
+ value: 0.3
396
+ num_train_epochs:
397
+ desc: null
398
+ value: 5
399
+ max_steps:
400
+ desc: null
401
+ value: 20
402
+ lr_scheduler_type:
403
+ desc: null
404
+ value: cosine
405
+ lr_scheduler_kwargs:
406
+ desc: null
407
+ value: {}
408
+ warmup_ratio:
409
+ desc: null
410
+ value: 0.03
411
+ warmup_steps:
412
+ desc: null
413
+ value: 0
414
+ log_level:
415
+ desc: null
416
+ value: passive
417
+ log_level_replica:
418
+ desc: null
419
+ value: warning
420
+ log_on_each_node:
421
+ desc: null
422
+ value: true
423
+ logging_dir:
424
+ desc: null
425
+ value: /kaggle/working/runs/Apr11_00-59-48_c072b7c9e487
426
+ logging_strategy:
427
+ desc: null
428
+ value: steps
429
+ logging_first_step:
430
+ desc: null
431
+ value: false
432
+ logging_steps:
433
+ desc: null
434
+ value: 20
435
+ logging_nan_inf_filter:
436
+ desc: null
437
+ value: true
438
+ save_strategy:
439
+ desc: null
440
+ value: steps
441
+ save_steps:
442
+ desc: null
443
+ value: 20
444
+ save_total_limit:
445
+ desc: null
446
+ value: 1
447
+ save_safetensors:
448
+ desc: null
449
+ value: true
450
+ save_on_each_node:
451
+ desc: null
452
+ value: false
453
+ save_only_model:
454
+ desc: null
455
+ value: false
456
+ no_cuda:
457
+ desc: null
458
+ value: false
459
+ use_cpu:
460
+ desc: null
461
+ value: false
462
+ use_mps_device:
463
+ desc: null
464
+ value: false
465
+ seed:
466
+ desc: null
467
+ value: 42
468
+ data_seed:
469
+ desc: null
470
+ value: null
471
+ jit_mode_eval:
472
+ desc: null
473
+ value: false
474
+ use_ipex:
475
+ desc: null
476
+ value: false
477
+ bf16:
478
+ desc: null
479
+ value: false
480
+ fp16:
481
+ desc: null
482
+ value: false
483
+ fp16_opt_level:
484
+ desc: null
485
+ value: O1
486
+ half_precision_backend:
487
+ desc: null
488
+ value: auto
489
+ bf16_full_eval:
490
+ desc: null
491
+ value: false
492
+ fp16_full_eval:
493
+ desc: null
494
+ value: false
495
+ tf32:
496
+ desc: null
497
+ value: null
498
+ local_rank:
499
+ desc: null
500
+ value: 0
501
+ ddp_backend:
502
+ desc: null
503
+ value: null
504
+ tpu_num_cores:
505
+ desc: null
506
+ value: null
507
+ tpu_metrics_debug:
508
+ desc: null
509
+ value: false
510
+ debug:
511
+ desc: null
512
+ value: []
513
+ dataloader_drop_last:
514
+ desc: null
515
+ value: false
516
+ eval_steps:
517
+ desc: null
518
+ value: null
519
+ dataloader_num_workers:
520
+ desc: null
521
+ value: 8
522
+ dataloader_prefetch_factor:
523
+ desc: null
524
+ value: null
525
+ past_index:
526
+ desc: null
527
+ value: -1
528
+ run_name:
529
+ desc: null
530
+ value: /kaggle/working/
531
+ disable_tqdm:
532
+ desc: null
533
+ value: false
534
+ remove_unused_columns:
535
+ desc: null
536
+ value: true
537
+ label_names:
538
+ desc: null
539
+ value: null
540
+ load_best_model_at_end:
541
+ desc: null
542
+ value: false
543
+ metric_for_best_model:
544
+ desc: null
545
+ value: null
546
+ greater_is_better:
547
+ desc: null
548
+ value: null
549
+ ignore_data_skip:
550
+ desc: null
551
+ value: false
552
+ fsdp:
553
+ desc: null
554
+ value: []
555
+ fsdp_min_num_params:
556
+ desc: null
557
+ value: 0
558
+ fsdp_config:
559
+ desc: null
560
+ value:
561
+ min_num_params: 0
562
+ xla: false
563
+ xla_fsdp_v2: false
564
+ xla_fsdp_grad_ckpt: false
565
+ fsdp_transformer_layer_cls_to_wrap:
566
+ desc: null
567
+ value: null
568
+ accelerator_config:
569
+ desc: null
570
+ value:
571
+ split_batches: false
572
+ dispatch_batches: null
573
+ even_batches: true
574
+ use_seedable_sampler: true
575
+ deepspeed:
576
+ desc: null
577
+ value: null
578
+ label_smoothing_factor:
579
+ desc: null
580
+ value: 0.0
581
+ optim:
582
+ desc: null
583
+ value: paged_adamw_8bit
584
+ optim_args:
585
+ desc: null
586
+ value: null
587
+ adafactor:
588
+ desc: null
589
+ value: false
590
+ group_by_length:
591
+ desc: null
592
+ value: false
593
+ length_column_name:
594
+ desc: null
595
+ value: length
596
+ report_to:
597
+ desc: null
598
+ value:
599
+ - tensorboard
600
+ - wandb
601
+ ddp_find_unused_parameters:
602
+ desc: null
603
+ value: null
604
+ ddp_bucket_cap_mb:
605
+ desc: null
606
+ value: null
607
+ ddp_broadcast_buffers:
608
+ desc: null
609
+ value: null
610
+ dataloader_pin_memory:
611
+ desc: null
612
+ value: true
613
+ dataloader_persistent_workers:
614
+ desc: null
615
+ value: false
616
+ skip_memory_metrics:
617
+ desc: null
618
+ value: true
619
+ use_legacy_prediction_loop:
620
+ desc: null
621
+ value: false
622
+ push_to_hub:
623
+ desc: null
624
+ value: false
625
+ resume_from_checkpoint:
626
+ desc: null
627
+ value: null
628
+ hub_model_id:
629
+ desc: null
630
+ value: null
631
+ hub_strategy:
632
+ desc: null
633
+ value: every_save
634
+ hub_token:
635
+ desc: null
636
+ value: <HUB_TOKEN>
637
+ hub_private_repo:
638
+ desc: null
639
+ value: false
640
+ hub_always_push:
641
+ desc: null
642
+ value: false
643
+ gradient_checkpointing:
644
+ desc: null
645
+ value: true
646
+ gradient_checkpointing_kwargs:
647
+ desc: null
648
+ value: null
649
+ include_inputs_for_metrics:
650
+ desc: null
651
+ value: false
652
+ fp16_backend:
653
+ desc: null
654
+ value: auto
655
+ push_to_hub_model_id:
656
+ desc: null
657
+ value: null
658
+ push_to_hub_organization:
659
+ desc: null
660
+ value: null
661
+ push_to_hub_token:
662
+ desc: null
663
+ value: <PUSH_TO_HUB_TOKEN>
664
+ mp_parameters:
665
+ desc: null
666
+ value: ''
667
+ auto_find_batch_size:
668
+ desc: null
669
+ value: true
670
+ full_determinism:
671
+ desc: null
672
+ value: false
673
+ torchdynamo:
674
+ desc: null
675
+ value: null
676
+ ray_scope:
677
+ desc: null
678
+ value: last
679
+ ddp_timeout:
680
+ desc: null
681
+ value: 1800
682
+ torch_compile:
683
+ desc: null
684
+ value: false
685
+ torch_compile_backend:
686
+ desc: null
687
+ value: null
688
+ torch_compile_mode:
689
+ desc: null
690
+ value: null
691
+ dispatch_batches:
692
+ desc: null
693
+ value: null
694
+ split_batches:
695
+ desc: null
696
+ value: null
697
+ include_tokens_per_second:
698
+ desc: null
699
+ value: false
700
+ include_num_input_tokens_seen:
701
+ desc: null
702
+ value: false
703
+ neftune_noise_alpha:
704
+ desc: null
705
+ value: null
706
+ optim_target_modules:
707
+ desc: null
708
+ value: null
wandb/run-20240411_010103-4b3fzolv/files/output.log ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:557: UserWarning: This DataLoader will create 8 worker processes in total. Our suggested max number of worker in current system is 4, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
2
+ warnings.warn(_create_warning_msg(
3
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
4
+ To disable this warning, you can either:
5
+ - Avoid using `tokenizers` before the fork if possible
6
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
7
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
8
+ To disable this warning, you can either:
9
+ - Avoid using `tokenizers` before the fork if possible
10
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
11
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
12
+ To disable this warning, you can either:
13
+ - Avoid using `tokenizers` before the fork if possible
14
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
15
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
16
+ To disable this warning, you can either:
17
+ - Avoid using `tokenizers` before the fork if possible
18
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
19
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
20
+ To disable this warning, you can either:
21
+ - Avoid using `tokenizers` before the fork if possible
22
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
23
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
24
+ To disable this warning, you can either:
25
+ - Avoid using `tokenizers` before the fork if possible
26
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
27
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
28
+ To disable this warning, you can either:
29
+ - Avoid using `tokenizers` before the fork if possible
30
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
31
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
32
+ To disable this warning, you can either:
33
+ - Avoid using `tokenizers` before the fork if possible
34
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
35
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
36
+ warnings.warn(
37
+ /opt/conda/lib/python3.10/site-packages/torch/nn/parallel/_functions.py:68: UserWarning: Was asked to gather along dimension 0, but all input tensors were scalars; will instead unsqueeze and return a vector.
38
+ warnings.warn('Was asked to gather along dimension 0, but all '
39
+ /opt/conda/lib/python3.10/site-packages/peft/utils/save_and_load.py:139: UserWarning: Setting `save_embedding_layers` to `True` as embedding layers found in `target_modules`.
40
+ warnings.warn("Setting `save_embedding_layers` to `True` as embedding layers found in `target_modules`.")
41
+ /opt/conda/lib/python3.10/site-packages/peft/utils/save_and_load.py:139: UserWarning: Setting `save_embedding_layers` to `True` as embedding layers found in `target_modules`.
wandb/run-20240411_010103-4b3fzolv/files/requirements.txt ADDED
@@ -0,0 +1,864 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Babel==2.14.0
2
+ Boruta==0.3
3
+ Brotli==1.0.9
4
+ CVXcanon==0.1.2
5
+ Cartopy==0.22.0
6
+ Cython==3.0.8
7
+ Deprecated==1.2.14
8
+ Farama-Notifications==0.0.4
9
+ Flask==3.0.2
10
+ Geohash==1.0
11
+ GitPython==3.1.41
12
+ ImageHash==4.3.1
13
+ Janome==0.5.0
14
+ Jinja2==3.1.2
15
+ LunarCalendar==0.0.9
16
+ Mako==1.3.2
17
+ Markdown==3.5.2
18
+ MarkupSafe==2.1.3
19
+ MarkupSafe==2.1.5
20
+ Pillow==9.5.0
21
+ PuLP==2.8.0
22
+ PyArabic==0.6.15
23
+ PyJWT==2.8.0
24
+ PyMeeus==0.5.12
25
+ PySocks==1.7.1
26
+ PyUpSet==0.1.1.post7
27
+ PyWavelets==1.5.0
28
+ PyYAML==6.0.1
29
+ Pygments==2.17.2
30
+ Pympler==1.0.1
31
+ QtPy==2.4.1
32
+ Rtree==1.2.0
33
+ SQLAlchemy==2.0.25
34
+ SecretStorage==3.3.3
35
+ Send2Trash==1.8.2
36
+ Shapely==1.8.5.post1
37
+ Shimmy==1.3.0
38
+ SimpleITK==2.3.1
39
+ TPOT==0.12.1
40
+ Theano-PyMC==1.1.2
41
+ Theano==1.0.5
42
+ Wand==0.6.13
43
+ Werkzeug==3.0.2
44
+ absl-py==1.4.0
45
+ accelerate==0.28.0
46
+ access==1.1.9
47
+ affine==2.4.0
48
+ aiobotocore==2.12.2
49
+ aiofiles==22.1.0
50
+ aiohttp-cors==0.7.0
51
+ aiohttp==3.9.1
52
+ aioitertools==0.11.0
53
+ aiorwlock==1.3.0
54
+ aiosignal==1.3.1
55
+ aiosqlite==0.19.0
56
+ albumentations==1.4.0
57
+ alembic==1.13.1
58
+ altair==5.3.0
59
+ annotated-types==0.6.0
60
+ annoy==1.17.3
61
+ anyio==4.2.0
62
+ apache-beam==2.46.0
63
+ aplus==0.11.0
64
+ appdirs==1.4.4
65
+ archspec==0.2.3
66
+ argon2-cffi-bindings==21.2.0
67
+ argon2-cffi==23.1.0
68
+ array-record==0.5.0
69
+ arrow==1.3.0
70
+ arviz==0.17.1
71
+ astroid==3.1.0
72
+ astropy-iers-data==0.2024.4.1.0.33.14
73
+ astropy==6.0.1
74
+ asttokens==2.4.1
75
+ astunparse==1.6.3
76
+ async-lru==2.0.4
77
+ async-timeout==4.0.3
78
+ attrs==23.2.0
79
+ audioread==3.0.1
80
+ autopep8==2.0.4
81
+ backoff==2.2.1
82
+ bayesian-optimization==1.4.3
83
+ beatrix_jupyterlab==2023.128.151533
84
+ beautifulsoup4==4.12.2
85
+ bitsandbytes==0.43.0
86
+ blake3==0.2.1
87
+ bleach==6.1.0
88
+ blessed==1.20.0
89
+ blinker==1.7.0
90
+ blis==0.7.10
91
+ blosc2==2.6.0
92
+ bokeh==3.3.4
93
+ boltons==23.1.1
94
+ boto3==1.26.100
95
+ botocore==1.34.51
96
+ bq_helper==0.4.1
97
+ bqplot==0.12.43
98
+ branca==0.7.1
99
+ brewer2mpl==1.4.1
100
+ brotlipy==0.7.0
101
+ cached-property==1.5.2
102
+ cachetools==4.2.4
103
+ cachetools==5.3.2
104
+ catalogue==2.0.10
105
+ catalyst==22.4
106
+ catboost==1.2.3
107
+ category-encoders==2.6.3
108
+ certifi==2024.2.2
109
+ cesium==0.12.1
110
+ cffi==1.16.0
111
+ charset-normalizer==3.3.2
112
+ chex==0.1.86
113
+ cleverhans==4.0.0
114
+ click-plugins==1.1.1
115
+ click==8.1.7
116
+ cligj==0.7.2
117
+ cloud-tpu-client==0.10
118
+ cloud-tpu-profiler==2.4.0
119
+ cloudpathlib==0.16.0
120
+ cloudpickle==2.2.1
121
+ cloudpickle==3.0.0
122
+ cmdstanpy==1.2.2
123
+ colorama==0.4.6
124
+ colorcet==3.1.0
125
+ colorful==0.5.6
126
+ colorlog==6.8.2
127
+ colorlover==0.3.0
128
+ comm==0.2.1
129
+ conda-libmamba-solver==23.7.0
130
+ conda-package-handling==2.2.0
131
+ conda==23.7.4
132
+ conda_package_streaming==0.9.0
133
+ confection==0.1.4
134
+ contextily==1.6.0
135
+ contourpy==1.2.0
136
+ convertdate==2.4.0
137
+ crcmod==1.7
138
+ cryptography==41.0.7
139
+ cuda-python==12.4.0
140
+ cudf==23.8.0
141
+ cufflinks==0.17.3
142
+ cuml==23.8.0
143
+ cupy==13.0.0
144
+ cycler==0.12.1
145
+ cymem==2.0.8
146
+ cytoolz==0.12.3
147
+ daal4py==2024.2.0
148
+ daal==2024.2.0
149
+ dacite==1.8.1
150
+ dask-cuda==23.8.0
151
+ dask-cudf==23.8.0
152
+ dask-expr==1.0.9
153
+ dask==2024.4.0
154
+ dataclasses-json==0.6.4
155
+ dataproc_jupyter_plugin==0.1.66
156
+ datasets==2.16.0
157
+ datashader==0.16.0
158
+ datatile==1.0.3
159
+ db-dtypes==1.2.0
160
+ deap==1.4.1
161
+ debugpy==1.8.0
162
+ decorator==5.1.1
163
+ deepdiff==6.7.1
164
+ defusedxml==0.7.1
165
+ deprecation==2.1.0
166
+ descartes==1.1.0
167
+ dill==0.3.7
168
+ dipy==1.9.0
169
+ distlib==0.3.8
170
+ distributed==2023.7.1
171
+ distro==1.9.0
172
+ dm-tree==0.1.8
173
+ docker-pycreds==0.4.0
174
+ docker==7.0.0
175
+ docopt==0.6.2
176
+ docstring-parser==0.15
177
+ docstring-to-markdown==0.15
178
+ docutils==0.20.1
179
+ earthengine-api==0.1.395
180
+ easydict==1.13
181
+ easyocr==1.7.1
182
+ ecos==2.0.13
183
+ eli5==0.13.0
184
+ emoji==2.11.0
185
+ en-core-web-lg==3.7.1
186
+ en-core-web-sm==3.7.1
187
+ entrypoints==0.4
188
+ ephem==4.1.5
189
+ esda==2.5.1
190
+ essentia==2.1b6.dev1110
191
+ et-xmlfile==1.1.0
192
+ etils==1.6.0
193
+ exceptiongroup==1.2.0
194
+ executing==2.0.1
195
+ explainable-ai-sdk==1.3.3
196
+ fastai==2.7.14
197
+ fastapi==0.108.0
198
+ fastavro==1.9.3
199
+ fastcore==1.5.29
200
+ fastdownload==0.0.7
201
+ fasteners==0.19
202
+ fastjsonschema==2.19.1
203
+ fastprogress==1.0.3
204
+ fastrlock==0.8.2
205
+ fasttext==0.9.2
206
+ feather-format==0.4.1
207
+ featuretools==1.30.0
208
+ filelock==3.13.1
209
+ fiona==1.9.6
210
+ fitter==1.7.0
211
+ flake8==7.0.0
212
+ flashtext==2.7
213
+ flatbuffers==23.5.26
214
+ flax==0.8.2
215
+ folium==0.16.0
216
+ fonttools==4.47.0
217
+ fonttools==4.50.0
218
+ fqdn==1.5.1
219
+ frozendict==2.4.1
220
+ frozenlist==1.4.1
221
+ fsspec==2023.10.0
222
+ fsspec==2024.3.1
223
+ funcy==2.0
224
+ fury==0.10.0
225
+ future==1.0.0
226
+ fuzzywuzzy==0.18.0
227
+ gast==0.5.4
228
+ gatspy==0.3
229
+ gcsfs==2024.2.0
230
+ gensim==4.3.2
231
+ geographiclib==2.0
232
+ geojson==3.1.0
233
+ geopandas==0.14.3
234
+ geoplot==0.5.1
235
+ geopy==2.4.1
236
+ geoviews==1.11.1
237
+ ggplot==0.11.5
238
+ giddy==2.3.5
239
+ gitdb==4.0.11
240
+ google-ai-generativelanguage==0.4.0
241
+ google-api-core==2.11.1
242
+ google-api-core==2.18.0
243
+ google-api-python-client==2.125.0
244
+ google-apitools==0.5.31
245
+ google-auth-httplib2==0.2.0
246
+ google-auth-oauthlib==1.2.0
247
+ google-auth==2.26.1
248
+ google-cloud-aiplatform==0.6.0a1
249
+ google-cloud-artifact-registry==1.10.0
250
+ google-cloud-automl==1.0.1
251
+ google-cloud-bigquery==2.34.4
252
+ google-cloud-bigtable==1.7.3
253
+ google-cloud-core==2.4.1
254
+ google-cloud-datastore==2.19.0
255
+ google-cloud-dlp==3.14.0
256
+ google-cloud-jupyter-config==0.0.5
257
+ google-cloud-language==2.13.3
258
+ google-cloud-monitoring==2.18.0
259
+ google-cloud-pubsub==2.19.0
260
+ google-cloud-pubsublite==1.9.0
261
+ google-cloud-recommendations-ai==0.7.1
262
+ google-cloud-resource-manager==1.11.0
263
+ google-cloud-spanner==3.40.1
264
+ google-cloud-storage==1.44.0
265
+ google-cloud-translate==3.12.1
266
+ google-cloud-videointelligence==2.13.3
267
+ google-cloud-vision==2.8.0
268
+ google-crc32c==1.5.0
269
+ google-generativeai==0.4.1
270
+ google-pasta==0.2.0
271
+ google-resumable-media==2.7.0
272
+ googleapis-common-protos==1.62.0
273
+ gplearn==0.4.2
274
+ gpustat==1.0.0
275
+ gpxpy==1.6.2
276
+ graphviz==0.20.3
277
+ greenlet==3.0.3
278
+ grpc-google-iam-v1==0.12.7
279
+ grpcio-status==1.48.1
280
+ grpcio-status==1.48.2
281
+ grpcio==1.51.1
282
+ grpcio==1.60.0
283
+ gviz-api==1.10.0
284
+ gym-notices==0.0.8
285
+ gym==0.26.2
286
+ gymnasium==0.29.0
287
+ h11==0.14.0
288
+ h2o==3.46.0.1
289
+ h5netcdf==1.3.0
290
+ h5py==3.10.0
291
+ haversine==2.8.1
292
+ hdfs==2.7.3
293
+ hep-ml==0.7.2
294
+ hijri-converter==2.3.1
295
+ hmmlearn==0.3.2
296
+ holidays==0.24
297
+ holoviews==1.18.3
298
+ hpsklearn==0.1.0
299
+ html5lib==1.1
300
+ htmlmin==0.1.12
301
+ httpcore==1.0.5
302
+ httplib2==0.21.0
303
+ httptools==0.6.1
304
+ httpx==0.27.0
305
+ huggingface-hub==0.22.2
306
+ hunspell==0.5.5
307
+ hydra-slayer==0.5.0
308
+ hyperopt==0.2.7
309
+ hypertools==0.8.0
310
+ idna==3.6
311
+ igraph==0.11.4
312
+ imagecodecs==2024.1.1
313
+ imageio==2.33.1
314
+ imbalanced-learn==0.12.2
315
+ imgaug==0.4.0
316
+ importlib-metadata==6.11.0
317
+ importlib-metadata==7.0.1
318
+ importlib-resources==6.1.1
319
+ inequality==1.0.1
320
+ iniconfig==2.0.0
321
+ ipydatawidgets==4.3.5
322
+ ipykernel==6.28.0
323
+ ipyleaflet==0.18.2
324
+ ipympl==0.7.0
325
+ ipython-genutils==0.2.0
326
+ ipython-genutils==0.2.0
327
+ ipython-sql==0.5.0
328
+ ipython==8.20.0
329
+ ipyvolume==0.6.3
330
+ ipyvue==1.10.2
331
+ ipyvuetify==1.9.3
332
+ ipywebrtc==0.6.0
333
+ ipywidgets==7.7.1
334
+ isoduration==20.11.0
335
+ isort==5.13.2
336
+ isoweek==1.3.3
337
+ itsdangerous==2.1.2
338
+ jaraco.classes==3.3.0
339
+ jax-jumpy==1.0.0
340
+ jax==0.4.23
341
+ jaxlib==0.4.23.dev20240116
342
+ jedi==0.19.1
343
+ jeepney==0.8.0
344
+ jieba==0.42.1
345
+ jmespath==1.0.1
346
+ joblib==1.3.2
347
+ json5==0.9.14
348
+ jsonpatch==1.33
349
+ jsonpointer==2.4
350
+ jsonschema-specifications==2023.12.1
351
+ jsonschema==4.20.0
352
+ jupyter-console==6.6.3
353
+ jupyter-events==0.9.0
354
+ jupyter-http-over-ws==0.0.8
355
+ jupyter-lsp==1.5.1
356
+ jupyter-server-mathjax==0.2.6
357
+ jupyter-ydoc==0.2.5
358
+ jupyter_client==7.4.9
359
+ jupyter_client==8.6.0
360
+ jupyter_core==5.7.1
361
+ jupyter_server==2.13.0
362
+ jupyter_server_fileid==0.9.1
363
+ jupyter_server_proxy==4.1.0
364
+ jupyter_server_terminals==0.5.1
365
+ jupyter_server_ydoc==0.8.0
366
+ jupyterlab-lsp==5.1.0
367
+ jupyterlab-widgets==3.0.9
368
+ jupyterlab==4.1.5
369
+ jupyterlab_git==0.44.0
370
+ jupyterlab_pygments==0.3.0
371
+ jupyterlab_server==2.25.2
372
+ jupytext==1.16.0
373
+ kaggle-environments==1.14.3
374
+ kaggle==1.6.8
375
+ kagglehub==0.2.2
376
+ keras-cv==0.8.2
377
+ keras-nlp==0.8.2
378
+ keras-tuner==1.4.6
379
+ keras==3.1.1
380
+ kernels-mixer==0.0.7
381
+ keyring==24.3.0
382
+ keyrings.google-artifactregistry-auth==1.1.2
383
+ kfp-pipeline-spec==0.2.2
384
+ kfp-server-api==2.0.5
385
+ kfp==2.5.0
386
+ kiwisolver==1.4.5
387
+ kmapper==2.0.1
388
+ kmodes==0.12.2
389
+ korean-lunar-calendar==0.3.1
390
+ kornia==0.7.2
391
+ kornia_rs==0.1.3
392
+ kt-legacy==1.0.5
393
+ kubernetes==26.1.0
394
+ langcodes==3.3.0
395
+ langid==1.1.6
396
+ lazy_loader==0.3
397
+ learntools==0.3.4
398
+ leven==1.0.4
399
+ libclang==16.0.6
400
+ libmambapy==1.5.0
401
+ libpysal==4.9.2
402
+ librosa==0.10.1
403
+ lightgbm==4.2.0
404
+ lightning-utilities==0.11.2
405
+ lime==0.2.0.1
406
+ line-profiler==4.1.2
407
+ linkify-it-py==2.0.3
408
+ llvmlite==0.41.1
409
+ llvmlite==0.42.0
410
+ lml==0.1.0
411
+ locket==1.0.0
412
+ loguru==0.7.2
413
+ lxml==5.2.1
414
+ lz4==4.3.3
415
+ mamba==1.5.0
416
+ mapclassify==2.6.1
417
+ markdown-it-py==3.0.0
418
+ marshmallow==3.21.1
419
+ matplotlib-inline==0.1.6
420
+ matplotlib-venn==0.11.10
421
+ matplotlib==3.7.5
422
+ matplotlib==3.8.3
423
+ mccabe==0.7.0
424
+ mdit-py-plugins==0.4.0
425
+ mdurl==0.1.2
426
+ memory-profiler==0.61.0
427
+ menuinst==2.0.1
428
+ mercantile==1.2.1
429
+ mgwr==2.2.1
430
+ missingno==0.5.2
431
+ mistune==0.8.4
432
+ mizani==0.11.1
433
+ ml-dtypes==0.2.0
434
+ mlcrate==0.2.0
435
+ mlens==0.2.3
436
+ mlxtend==0.23.1
437
+ mne==1.6.1
438
+ mnist==0.2.2
439
+ momepy==0.7.0
440
+ more-itertools==10.2.0
441
+ mpld3==0.5.10
442
+ mpmath==1.3.0
443
+ msgpack==1.0.7
444
+ multidict==6.0.4
445
+ multimethod==1.10
446
+ multipledispatch==1.0.0
447
+ multiprocess==0.70.15
448
+ munkres==1.1.4
449
+ murmurhash==1.0.10
450
+ mypy-extensions==1.0.0
451
+ namex==0.0.7
452
+ nb-conda-kernels==2.3.1
453
+ nb_conda==2.2.1
454
+ nbclassic==1.0.0
455
+ nbclient==0.5.13
456
+ nbconvert==6.4.5
457
+ nbdime==3.2.0
458
+ nbformat==5.9.2
459
+ ndindex==1.8
460
+ nest-asyncio==1.5.8
461
+ networkx==3.2.1
462
+ nibabel==5.2.1
463
+ nilearn==0.10.3
464
+ ninja==1.11.1.1
465
+ nltk==3.2.4
466
+ nose==1.3.7
467
+ notebook==6.5.4
468
+ notebook==6.5.6
469
+ notebook_executor==0.2
470
+ notebook_shim==0.2.3
471
+ numba==0.58.1
472
+ numba==0.59.1
473
+ numexpr==2.10.0
474
+ numpy==1.26.4
475
+ nvidia-ml-py==11.495.46
476
+ nvtx==0.2.10
477
+ oauth2client==4.1.3
478
+ oauthlib==3.2.2
479
+ objsize==0.6.1
480
+ odfpy==1.4.1
481
+ olefile==0.47
482
+ onnx==1.16.0
483
+ opencensus-context==0.1.3
484
+ opencensus==0.11.4
485
+ opencv-contrib-python==4.9.0.80
486
+ opencv-python-headless==4.9.0.80
487
+ opencv-python==4.9.0.80
488
+ openpyxl==3.1.2
489
+ openslide-python==1.3.1
490
+ opentelemetry-api==1.22.0
491
+ opentelemetry-exporter-otlp-proto-common==1.22.0
492
+ opentelemetry-exporter-otlp-proto-grpc==1.22.0
493
+ opentelemetry-exporter-otlp-proto-http==1.22.0
494
+ opentelemetry-exporter-otlp==1.22.0
495
+ opentelemetry-proto==1.22.0
496
+ opentelemetry-sdk==1.22.0
497
+ opentelemetry-semantic-conventions==0.43b0
498
+ opt-einsum==3.3.0
499
+ optax==0.2.2
500
+ optree==0.11.0
501
+ optuna==3.6.1
502
+ orbax-checkpoint==0.5.7
503
+ ordered-set==4.1.0
504
+ orjson==3.9.10
505
+ ortools==9.4.1874
506
+ osmnx==1.9.2
507
+ overrides==7.4.0
508
+ packaging==21.3
509
+ pandas-datareader==0.10.0
510
+ pandas-profiling==3.6.6
511
+ pandas-summary==0.2.0
512
+ pandas==2.1.4
513
+ pandas==2.2.1
514
+ pandasql==0.7.3
515
+ pandocfilters==1.5.0
516
+ panel==1.3.8
517
+ papermill==2.5.0
518
+ param==2.1.0
519
+ parso==0.8.3
520
+ partd==1.4.1
521
+ path.py==12.5.0
522
+ path==16.10.0
523
+ pathos==0.3.2
524
+ pathy==0.10.3
525
+ patsy==0.5.6
526
+ pdf2image==1.17.0
527
+ peft==0.10.0
528
+ pettingzoo==1.24.0
529
+ pexpect==4.8.0
530
+ pexpect==4.9.0
531
+ phik==0.12.4
532
+ pickleshare==0.7.5
533
+ pillow==10.3.0
534
+ pip==23.3.2
535
+ pkgutil_resolve_name==1.3.10
536
+ platformdirs==4.2.0
537
+ plotly-express==0.4.1
538
+ plotly==5.18.0
539
+ plotnine==0.13.4
540
+ pluggy==1.4.0
541
+ pointpats==2.4.0
542
+ polars==0.20.18
543
+ polyglot==16.7.4
544
+ pooch==1.8.1
545
+ pox==0.3.4
546
+ ppca==0.0.4
547
+ ppft==1.7.6.8
548
+ preprocessing==0.1.13
549
+ preshed==3.0.9
550
+ prettytable==3.9.0
551
+ progressbar2==4.4.2
552
+ prometheus-client==0.19.0
553
+ promise==2.3
554
+ prompt-toolkit==3.0.42
555
+ prompt-toolkit==3.0.43
556
+ prophet==1.1.1
557
+ proto-plus==1.23.0
558
+ protobuf==3.20.3
559
+ protobuf==4.21.12
560
+ psutil==5.9.3
561
+ psutil==5.9.7
562
+ ptyprocess==0.7.0
563
+ pudb==2024.1
564
+ pure-eval==0.2.2
565
+ py-cpuinfo==9.0.0
566
+ py-spy==0.3.14
567
+ py4j==0.10.9.7
568
+ pyLDAvis==3.4.1
569
+ pyOpenSSL==23.3.0
570
+ pyaml==23.12.0
571
+ pyarrow-hotfix==0.6
572
+ pyarrow==15.0.2
573
+ pyasn1-modules==0.3.0
574
+ pyasn1==0.5.1
575
+ pybind11==2.12.0
576
+ pyclipper==1.3.0.post5
577
+ pycodestyle==2.11.1
578
+ pycosat==0.6.6
579
+ pycparser==2.21
580
+ pycryptodome==3.20.0
581
+ pyct==0.5.0
582
+ pycuda==2024.1
583
+ pydantic==2.5.3
584
+ pydantic==2.6.4
585
+ pydantic_core==2.14.6
586
+ pydantic_core==2.16.3
587
+ pydegensac==0.1.2
588
+ pydicom==2.4.4
589
+ pydocstyle==6.3.0
590
+ pydot==1.4.2
591
+ pydub==0.25.1
592
+ pyemd==1.0.0
593
+ pyerfa==2.0.1.1
594
+ pyexcel-io==0.6.6
595
+ pyexcel-ods==0.6.0
596
+ pyflakes==3.2.0
597
+ pygltflib==1.16.2
598
+ pykalman==0.9.7
599
+ pylibraft==23.8.0
600
+ pylint==3.1.0
601
+ pymc3==3.11.4
602
+ pymongo==3.13.0
603
+ pynndescent==0.5.12
604
+ pynvml==11.4.1
605
+ pynvrtc==9.2
606
+ pyparsing==3.1.1
607
+ pyparsing==3.1.2
608
+ pypdf==4.1.0
609
+ pyproj==3.6.1
610
+ pysal==24.1
611
+ pyshp==2.3.1
612
+ pytesseract==0.3.10
613
+ pytest==8.1.1
614
+ python-bidi==0.4.2
615
+ python-dateutil==2.9.0.post0
616
+ python-dotenv==1.0.0
617
+ python-json-logger==2.0.7
618
+ python-louvain==0.16
619
+ python-lsp-jsonrpc==1.1.2
620
+ python-lsp-server==1.11.0
621
+ python-slugify==8.0.4
622
+ python-utils==3.8.2
623
+ pythreejs==2.4.2
624
+ pytoolconfig==1.3.1
625
+ pytools==2024.1.1
626
+ pytorch-ignite==0.5.0.post2
627
+ pytorch-lightning==2.2.1
628
+ pytz==2023.3.post1
629
+ pytz==2024.1
630
+ pyu2f==0.1.5
631
+ pyviz_comms==3.0.2
632
+ pyzmq==24.0.1
633
+ pyzmq==25.1.2
634
+ qgrid==1.3.1
635
+ qtconsole==5.5.1
636
+ quantecon==0.7.2
637
+ qudida==0.0.4
638
+ raft-dask==23.8.0
639
+ rasterio==1.3.9
640
+ rasterstats==0.19.0
641
+ ray-cpp==2.9.0
642
+ ray==2.9.0
643
+ referencing==0.32.1
644
+ regex==2023.12.25
645
+ requests-oauthlib==1.3.1
646
+ requests-toolbelt==0.10.1
647
+ requests==2.31.0
648
+ retrying==1.3.3
649
+ retrying==1.3.4
650
+ rfc3339-validator==0.1.4
651
+ rfc3986-validator==0.1.1
652
+ rgf-python==3.12.0
653
+ rich-click==1.7.4
654
+ rich==13.7.0
655
+ rich==13.7.1
656
+ rmm==23.8.0
657
+ rope==1.13.0
658
+ rpds-py==0.16.2
659
+ rsa==4.9
660
+ ruamel-yaml-conda==0.15.100
661
+ ruamel.yaml.clib==0.2.7
662
+ ruamel.yaml==0.17.40
663
+ s2sphere==0.2.5
664
+ s3fs==2024.2.0
665
+ s3transfer==0.6.2
666
+ safetensors==0.4.2
667
+ scattertext==0.1.19
668
+ scikit-image==0.22.0
669
+ scikit-learn-intelex==2024.2.0
670
+ scikit-learn==1.2.2
671
+ scikit-multilearn==0.2.0
672
+ scikit-optimize==0.10.1
673
+ scikit-plot==0.3.7
674
+ scikit-surprise==1.1.3
675
+ scipy==1.11.4
676
+ scipy==1.12.0
677
+ seaborn==0.12.2
678
+ segment_anything==1.0
679
+ segregation==2.5
680
+ semver==3.0.2
681
+ sentencepiece==0.2.0
682
+ sentry-sdk==1.44.1
683
+ setproctitle==1.3.3
684
+ setuptools-git==1.2
685
+ setuptools-scm==8.0.4
686
+ setuptools==69.0.3
687
+ shap==0.44.1
688
+ shapely==2.0.3
689
+ shellingham==1.5.4
690
+ shtab==1.7.1
691
+ simpervisor==1.0.0
692
+ simplejson==3.19.2
693
+ six==1.16.0
694
+ sklearn-pandas==2.2.0
695
+ slicer==0.0.7
696
+ smart-open==6.4.0
697
+ smmap==5.0.1
698
+ sniffio==1.3.0
699
+ snowballstemmer==2.2.0
700
+ snuggs==1.4.7
701
+ sortedcontainers==2.4.0
702
+ soundfile==0.12.1
703
+ soupsieve==2.5
704
+ soxr==0.3.7
705
+ spacy-legacy==3.0.12
706
+ spacy-loggers==1.0.5
707
+ spacy==3.7.2
708
+ spaghetti==1.7.5.post1
709
+ spectral==0.23.1
710
+ spglm==1.1.0
711
+ sphinx-rtd-theme==0.2.4
712
+ spint==1.0.7
713
+ splot==1.1.5.post1
714
+ spopt==0.6.0
715
+ spreg==1.4.2
716
+ spvcm==0.3.0
717
+ sqlparse==0.4.4
718
+ squarify==0.4.3
719
+ srsly==2.4.8
720
+ stable-baselines3==2.1.0
721
+ stack-data==0.6.2
722
+ stack-data==0.6.3
723
+ stanio==0.5.0
724
+ starlette==0.32.0.post1
725
+ statsmodels==0.14.1
726
+ stemming==1.0.1
727
+ stop-words==2018.7.23
728
+ stopit==1.1.2
729
+ stumpy==1.12.0
730
+ sympy==1.12
731
+ tables==3.9.2
732
+ tabulate==0.9.0
733
+ tangled-up-in-unicode==0.2.0
734
+ tbb==2021.12.0
735
+ tblib==3.0.0
736
+ tenacity==8.2.3
737
+ tensorboard-data-server==0.7.2
738
+ tensorboard-plugin-profile==2.15.0
739
+ tensorboard==2.15.1
740
+ tensorboardX==2.6.2.2
741
+ tensorflow-cloud==0.1.16
742
+ tensorflow-datasets==4.9.4
743
+ tensorflow-decision-forests==1.8.1
744
+ tensorflow-estimator==2.15.0
745
+ tensorflow-hub==0.16.1
746
+ tensorflow-io-gcs-filesystem==0.35.0
747
+ tensorflow-io==0.35.0
748
+ tensorflow-metadata==0.14.0
749
+ tensorflow-probability==0.23.0
750
+ tensorflow-serving-api==2.14.1
751
+ tensorflow-text==2.15.0
752
+ tensorflow-transform==0.14.0
753
+ tensorflow==2.15.0
754
+ tensorstore==0.1.56
755
+ termcolor==2.4.0
756
+ terminado==0.18.0
757
+ testpath==0.6.0
758
+ text-unidecode==1.3
759
+ textblob==0.18.0.post0
760
+ texttable==1.7.0
761
+ tf_keras==2.15.1
762
+ tfp-nightly==0.24.0.dev0
763
+ thinc==8.2.2
764
+ threadpoolctl==3.2.0
765
+ tifffile==2023.12.9
766
+ timm==0.9.16
767
+ tinycss2==1.2.1
768
+ tobler==0.11.2
769
+ tokenizers==0.15.2
770
+ toml==0.10.2
771
+ tomli==2.0.1
772
+ tomlkit==0.12.4
773
+ toolz==0.12.1
774
+ torch==2.1.2
775
+ torchaudio==2.1.2
776
+ torchdata==0.7.1
777
+ torchinfo==1.8.0
778
+ torchmetrics==1.3.2
779
+ torchtext==0.16.2
780
+ torchvision==0.16.2
781
+ tornado==6.3.3
782
+ tqdm==4.66.1
783
+ traceml==1.0.8
784
+ traitlets==5.9.0
785
+ traittypes==0.2.1
786
+ transformers==4.39.3
787
+ treelite-runtime==3.2.0
788
+ treelite==3.2.0
789
+ trl==0.8.1
790
+ truststore==0.8.0
791
+ trx-python==0.2.9
792
+ tsfresh==0.20.2
793
+ typeguard==4.1.5
794
+ typer==0.9.0
795
+ typer==0.9.4
796
+ types-python-dateutil==2.8.19.20240106
797
+ typing-inspect==0.9.0
798
+ typing-utils==0.1.0
799
+ typing_extensions==4.9.0
800
+ tyro==0.8.3
801
+ tzdata==2023.4
802
+ uc-micro-py==1.0.3
803
+ ucx-py==0.33.0
804
+ ujson==5.9.0
805
+ umap-learn==0.5.5
806
+ unicodedata2==15.1.0
807
+ update-checker==0.18.0
808
+ uri-template==1.3.0
809
+ uritemplate==3.0.1
810
+ urllib3==1.26.18
811
+ urllib3==2.1.0
812
+ urwid==2.6.10
813
+ urwid_readline==0.14
814
+ uvicorn==0.25.0
815
+ uvloop==0.19.0
816
+ vaex-astro==0.9.3
817
+ vaex-core==4.17.1
818
+ vaex-hdf5==0.14.1
819
+ vaex-jupyter==0.8.2
820
+ vaex-ml==0.18.3
821
+ vaex-server==0.9.0
822
+ vaex-viz==0.5.4
823
+ vaex==4.17.0
824
+ vec_noise==1.1.4
825
+ vecstack==0.4.0
826
+ virtualenv==20.21.0
827
+ visions==0.7.5
828
+ vowpalwabbit==9.9.0
829
+ vtk==9.3.0
830
+ wandb==0.16.5
831
+ wasabi==1.1.2
832
+ watchfiles==0.21.0
833
+ wavio==0.0.8
834
+ wcwidth==0.2.13
835
+ weasel==0.3.4
836
+ webcolors==1.13
837
+ webencodings==0.5.1
838
+ websocket-client==1.7.0
839
+ websockets==12.0
840
+ wfdb==4.1.2
841
+ whatthepatch==1.0.5
842
+ wheel==0.42.0
843
+ widgetsnbextension==3.6.6
844
+ witwidget==1.8.1
845
+ woodwork==0.29.0
846
+ wordcloud==1.9.3
847
+ wordsegment==1.3.1
848
+ wrapt==1.14.1
849
+ xarray-einstats==0.7.0
850
+ xarray==2024.3.0
851
+ xgboost==2.0.3
852
+ xvfbwrapper==0.2.9
853
+ xxhash==3.4.1
854
+ xyzservices==2023.10.1
855
+ y-py==0.6.2
856
+ yapf==0.40.2
857
+ yarl==1.9.3
858
+ yarl==1.9.4
859
+ ydata-profiling==4.6.4
860
+ yellowbrick==1.5
861
+ ypy-websocket==0.8.4
862
+ zict==3.0.0
863
+ zipp==3.17.0
864
+ zstandard==0.22.0
wandb/run-20240411_010103-4b3fzolv/files/wandb-metadata.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.15.133+-x86_64-with-glibc2.31",
3
+ "python": "3.10.13",
4
+ "heartbeatAt": "2024-04-11T01:01:04.623823",
5
+ "startedAt": "2024-04-11T01:01:03.976173",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [],
9
+ "state": "running",
10
+ "program": "kaggle.ipynb",
11
+ "codePathLocal": null,
12
+ "root": "/kaggle/working",
13
+ "host": "c072b7c9e487",
14
+ "username": "root",
15
+ "executable": "/opt/conda/bin/python3.10",
16
+ "cpu_count": 2,
17
+ "cpu_count_logical": 4,
18
+ "cpu_freq": {
19
+ "current": 2000.188,
20
+ "min": 0.0,
21
+ "max": 0.0
22
+ },
23
+ "cpu_freq_per_core": [
24
+ {
25
+ "current": 2000.188,
26
+ "min": 0.0,
27
+ "max": 0.0
28
+ },
29
+ {
30
+ "current": 2000.188,
31
+ "min": 0.0,
32
+ "max": 0.0
33
+ },
34
+ {
35
+ "current": 2000.188,
36
+ "min": 0.0,
37
+ "max": 0.0
38
+ },
39
+ {
40
+ "current": 2000.188,
41
+ "min": 0.0,
42
+ "max": 0.0
43
+ }
44
+ ],
45
+ "disk": {
46
+ "/": {
47
+ "total": 8062.387607574463,
48
+ "used": 5566.689571380615
49
+ }
50
+ },
51
+ "gpu": "Tesla T4",
52
+ "gpu_count": 2,
53
+ "gpu_devices": [
54
+ {
55
+ "name": "Tesla T4",
56
+ "memory_total": 16106127360
57
+ },
58
+ {
59
+ "name": "Tesla T4",
60
+ "memory_total": 16106127360
61
+ }
62
+ ],
63
+ "memory": {
64
+ "total": 31.357559204101562
65
+ }
66
+ }
wandb/run-20240411_010103-4b3fzolv/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/loss": 3.5326, "train/grad_norm": 3.3439154624938965, "train/learning_rate": 0.0, "train/epoch": 0.0, "train/global_step": 20, "_timestamp": 1712797418.7374408, "_runtime": 154.75297474861145, "_step": 1, "train_runtime": 169.16, "train_samples_per_second": 1.892, "train_steps_per_second": 0.118, "total_flos": 301651923975168.0, "train_loss": 3.5326202392578123}
wandb/run-20240411_010103-4b3fzolv/logs/debug-internal.log ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-11 01:01:03,983 INFO StreamThr :138 [internal.py:wandb_internal():86] W&B internal server running at pid: 138, started at: 2024-04-11 01:01:03.982415
2
+ 2024-04-11 01:01:03,984 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-11 01:01:04,351 INFO WriterThread:138 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/run-4b3fzolv.wandb
4
+ 2024-04-11 01:01:04,352 DEBUG SenderThread:138 [sender.py:send():379] send: header
5
+ 2024-04-11 01:01:04,355 DEBUG SenderThread:138 [sender.py:send():379] send: run
6
+ 2024-04-11 01:01:04,503 INFO SenderThread:138 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files
7
+ 2024-04-11 01:01:04,503 INFO SenderThread:138 [sender.py:_start_run_threads():1124] run started: 4b3fzolv with start time 1712797263.984466
8
+ 2024-04-11 01:01:04,511 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-11 01:01:04,511 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-11 01:01:04,599 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-11 01:01:04,610 DEBUG HandlerThread:138 [system_info.py:__init__():26] System info init
12
+ 2024-04-11 01:01:04,610 DEBUG HandlerThread:138 [system_info.py:__init__():41] System info init done
13
+ 2024-04-11 01:01:04,610 INFO HandlerThread:138 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-11 01:01:04,610 INFO SystemMonitor:138 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-11 01:01:04,610 INFO HandlerThread:138 [system_monitor.py:probe():214] Collecting system info
16
+ 2024-04-11 01:01:04,611 INFO SystemMonitor:138 [interfaces.py:start():190] Started cpu monitoring
17
+ 2024-04-11 01:01:04,611 INFO SystemMonitor:138 [interfaces.py:start():190] Started disk monitoring
18
+ 2024-04-11 01:01:04,612 INFO SystemMonitor:138 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-11 01:01:04,613 INFO SystemMonitor:138 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-11 01:01:04,614 INFO SystemMonitor:138 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-11 01:01:04,623 DEBUG HandlerThread:138 [system_info.py:probe():150] Probing system
22
+ 2024-04-11 01:01:04,625 DEBUG HandlerThread:138 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-11 01:01:04,625 DEBUG HandlerThread:138 [system_info.py:probe():198] Probing system done
24
+ 2024-04-11 01:01:04,626 DEBUG HandlerThread:138 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-11T01:01:04.623823', 'startedAt': '2024-04-11T01:01:03.976173', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': 'c072b7c9e487', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.188, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.188, 'min': 0.0, 'max': 0.0}, {'current': 2000.188, 'min': 0.0, 'max': 0.0}, {'current': 2000.188, 'min': 0.0, 'max': 0.0}, {'current': 2000.188, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5566.689571380615}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-11 01:01:04,626 INFO HandlerThread:138 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-11 01:01:04,626 INFO HandlerThread:138 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-11 01:01:04,626 DEBUG HandlerThread:138 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-11 01:01:05,505 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/conda-environment.yaml
29
+ 2024-04-11 01:01:19,640 ERROR HandlerThread:138 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
+ Traceback (most recent call last):
31
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
+ subprocess.call(
33
+ File "/opt/conda/lib/python3.10/subprocess.py", line 347, in call
34
+ return p.wait(timeout=timeout)
35
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1209, in wait
36
+ return self._wait(timeout=timeout)
37
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
+ raise TimeoutExpired(self.args, timeout)
39
+ subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-11 01:01:19,643 DEBUG HandlerThread:138 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-11 01:01:19,644 INFO HandlerThread:138 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-11 01:01:19,652 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-11 01:01:19,652 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-11 01:01:19,652 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-11 01:01:19,653 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-11 01:01:19,653 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-11 01:01:19,653 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: keepalive
48
+ 2024-04-11 01:01:19,653 DEBUG SenderThread:138 [sender.py:send():379] send: files
49
+ 2024-04-11 01:01:19,653 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
+ 2024-04-11 01:01:19,933 INFO wandb-upload_0:138 [upload_job.py:push():131] Uploaded file /tmp/tmpiqqv1dwfwandb/3d55vshp-wandb-metadata.json
51
+ 2024-04-11 01:01:20,508 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/wandb-metadata.json
52
+ 2024-04-11 01:01:20,599 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: python_packages
53
+ 2024-04-11 01:01:20,599 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: python_packages
54
+ 2024-04-11 01:01:20,602 DEBUG SenderThread:138 [sender.py:send():379] send: telemetry
55
+ 2024-04-11 01:01:20,613 DEBUG SenderThread:138 [sender.py:send():379] send: config
56
+ 2024-04-11 01:01:20,615 DEBUG SenderThread:138 [sender.py:send():379] send: metric
57
+ 2024-04-11 01:01:20,615 DEBUG SenderThread:138 [sender.py:send():379] send: telemetry
58
+ 2024-04-11 01:01:20,615 DEBUG SenderThread:138 [sender.py:send():379] send: metric
59
+ 2024-04-11 01:01:20,615 WARNING SenderThread:138 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
60
+ 2024-04-11 01:01:20,616 DEBUG SenderThread:138 [sender.py:send():379] send: telemetry
61
+ 2024-04-11 01:01:20,616 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
62
+ 2024-04-11 01:01:20,616 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
63
+ 2024-04-11 01:01:20,617 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
64
+ 2024-04-11 01:01:21,508 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/requirements.txt
65
+ 2024-04-11 01:01:21,509 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
66
+ 2024-04-11 01:01:23,509 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
67
+ 2024-04-11 01:01:25,510 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
68
+ 2024-04-11 01:01:25,513 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
69
+ 2024-04-11 01:01:27,511 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
70
+ 2024-04-11 01:01:30,790 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
71
+ 2024-04-11 01:01:35,602 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
72
+ 2024-04-11 01:01:35,603 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
73
+ 2024-04-11 01:01:35,604 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
74
+ 2024-04-11 01:01:36,648 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
75
+ 2024-04-11 01:01:37,515 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/config.yaml
76
+ 2024-04-11 01:01:41,766 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
77
+ 2024-04-11 01:01:46,767 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
78
+ 2024-04-11 01:01:50,600 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
79
+ 2024-04-11 01:01:50,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
80
+ 2024-04-11 01:01:50,640 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
81
+ 2024-04-11 01:01:52,707 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
82
+ 2024-04-11 01:01:57,708 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
83
+ 2024-04-11 01:02:02,709 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
84
+ 2024-04-11 01:02:04,614 DEBUG SystemMonitor:138 [system_monitor.py:_start():172] Starting system metrics aggregation loop
85
+ 2024-04-11 01:02:04,616 DEBUG SenderThread:138 [sender.py:send():379] send: stats
86
+ 2024-04-11 01:02:05,600 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
87
+ 2024-04-11 01:02:05,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
88
+ 2024-04-11 01:02:05,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
89
+ 2024-04-11 01:02:08,651 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
90
+ 2024-04-11 01:02:13,651 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
91
+ 2024-04-11 01:02:18,652 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
92
+ 2024-04-11 01:02:20,600 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
93
+ 2024-04-11 01:02:20,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
94
+ 2024-04-11 01:02:20,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
95
+ 2024-04-11 01:02:23,671 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
96
+ 2024-04-11 01:02:28,672 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
97
+ 2024-04-11 01:02:33,673 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
98
+ 2024-04-11 01:02:34,617 DEBUG SenderThread:138 [sender.py:send():379] send: stats
99
+ 2024-04-11 01:02:35,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
100
+ 2024-04-11 01:02:35,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
101
+ 2024-04-11 01:02:35,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
102
+ 2024-04-11 01:02:39,669 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
103
+ 2024-04-11 01:02:44,669 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
104
+ 2024-04-11 01:02:49,670 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
105
+ 2024-04-11 01:02:50,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
106
+ 2024-04-11 01:02:50,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
107
+ 2024-04-11 01:02:50,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
108
+ 2024-04-11 01:02:54,683 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
109
+ 2024-04-11 01:02:59,684 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
110
+ 2024-04-11 01:03:04,618 DEBUG SenderThread:138 [sender.py:send():379] send: stats
111
+ 2024-04-11 01:03:05,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
112
+ 2024-04-11 01:03:05,601 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
113
+ 2024-04-11 01:03:05,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
114
+ 2024-04-11 01:03:05,668 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
115
+ 2024-04-11 01:03:10,669 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
116
+ 2024-04-11 01:03:15,670 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
117
+ 2024-04-11 01:03:20,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
118
+ 2024-04-11 01:03:20,602 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
119
+ 2024-04-11 01:03:20,641 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
120
+ 2024-04-11 01:03:21,658 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
121
+ 2024-04-11 01:03:26,659 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
122
+ 2024-04-11 01:03:31,660 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
123
+ 2024-04-11 01:03:34,619 DEBUG SenderThread:138 [sender.py:send():379] send: stats
124
+ 2024-04-11 01:03:35,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
125
+ 2024-04-11 01:03:35,602 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
126
+ 2024-04-11 01:03:35,642 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
127
+ 2024-04-11 01:03:36,661 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
128
+ 2024-04-11 01:03:37,017 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: partial_history
129
+ 2024-04-11 01:03:37,019 DEBUG SenderThread:138 [sender.py:send():379] send: metric
130
+ 2024-04-11 01:03:37,019 DEBUG SenderThread:138 [sender.py:send():379] send: metric
131
+ 2024-04-11 01:03:37,019 DEBUG SenderThread:138 [sender.py:send():379] send: metric
132
+ 2024-04-11 01:03:37,020 DEBUG SenderThread:138 [sender.py:send():379] send: metric
133
+ 2024-04-11 01:03:37,020 DEBUG SenderThread:138 [sender.py:send():379] send: history
134
+ 2024-04-11 01:03:37,020 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
135
+ 2024-04-11 01:03:37,020 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
136
+ 2024-04-11 01:03:37,559 INFO Thread-12 :138 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/wandb-summary.json
137
+ 2024-04-11 01:03:38,738 DEBUG SenderThread:138 [sender.py:send():379] send: telemetry
138
+ 2024-04-11 01:03:38,738 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
139
+ 2024-04-11 01:03:38,739 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: partial_history
140
+ 2024-04-11 01:03:38,741 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
141
+ 2024-04-11 01:03:38,741 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
142
+ 2024-04-11 01:03:38,741 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
143
+ 2024-04-11 01:03:38,742 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
144
+ 2024-04-11 01:03:38,742 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
145
+ 2024-04-11 01:03:38,742 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
146
+ 2024-04-11 01:03:38,743 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
147
+ 2024-04-11 01:03:38,743 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
148
+ 2024-04-11 01:03:38,743 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
149
+ 2024-04-11 01:03:38,744 DEBUG SenderThread:138 [sender.py:send():379] send: history
150
+ 2024-04-11 01:03:38,744 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: summary_record
151
+ 2024-04-11 01:03:38,744 INFO SenderThread:138 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
152
+ 2024-04-11 01:03:39,559 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/wandb-summary.json
153
+ 2024-04-11 01:03:39,560 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
154
+ 2024-04-11 01:03:41,560 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/output.log
155
+ 2024-04-11 01:03:41,871 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
156
+ 2024-04-11 01:03:42,561 INFO Thread-12 :138 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240411_010103-4b3fzolv/files/config.yaml
157
+ 2024-04-11 01:03:46,983 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
158
+ 2024-04-11 01:03:50,601 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: stop_status
159
+ 2024-04-11 01:03:50,602 DEBUG SenderThread:138 [sender.py:send_request():406] send_request: stop_status
160
+ 2024-04-11 01:03:50,604 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: internal_messages
161
+ 2024-04-11 01:03:52,705 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
162
+ 2024-04-11 01:03:57,706 DEBUG HandlerThread:138 [handler.py:handle_request():146] handle_request: status_report
wandb/run-20240411_010103-4b3fzolv/logs/debug.log ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-11 01:01:03,977 INFO MainThread:94 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Configure stats pid to 94
3
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_setup.py:_flush():76] Applying login settings: {'api_key': '***REDACTED***'}
10
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240411_010103-4b3fzolv/logs/debug.log
11
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240411_010103-4b3fzolv/logs/debug-internal.log
12
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7bfd48105450>
13
+ 2024-04-11 01:01:03,978 INFO MainThread:94 [wandb_init.py:init():567] calling init triggers
14
+ 2024-04-11 01:01:03,979 INFO MainThread:94 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
15
+ config: {}
16
+ 2024-04-11 01:01:03,979 INFO MainThread:94 [wandb_init.py:init():617] starting backend
17
+ 2024-04-11 01:01:03,979 INFO MainThread:94 [wandb_init.py:init():621] setting up manager
18
+ 2024-04-11 01:01:03,980 INFO MainThread:94 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
19
+ 2024-04-11 01:01:03,984 INFO MainThread:94 [wandb_init.py:init():629] backend started and connected
20
+ 2024-04-11 01:01:03,997 INFO MainThread:94 [wandb_run.py:_label_probe_notebook():1299] probe notebook
21
+ 2024-04-11 01:01:04,350 INFO MainThread:94 [wandb_init.py:init():721] updated telemetry
22
+ 2024-04-11 01:01:04,354 INFO MainThread:94 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
23
+ 2024-04-11 01:01:04,510 INFO MainThread:94 [wandb_run.py:_on_init():2344] communicating current version
24
+ 2024-04-11 01:01:04,593 INFO MainThread:94 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
25
+
26
+ 2024-04-11 01:01:04,593 INFO MainThread:94 [wandb_init.py:init():805] starting run threads in backend
27
+ 2024-04-11 01:01:20,600 INFO MainThread:94 [wandb_run.py:_console_start():2323] atexit reg
28
+ 2024-04-11 01:01:20,600 INFO MainThread:94 [wandb_run.py:_redirect():2178] redirect: wrap_raw
29
+ 2024-04-11 01:01:20,601 INFO MainThread:94 [wandb_run.py:_redirect():2243] Wrapping output streams.
30
+ 2024-04-11 01:01:20,601 INFO MainThread:94 [wandb_run.py:_redirect():2268] Redirects installed.
31
+ 2024-04-11 01:01:20,602 INFO MainThread:94 [wandb_init.py:init():848] run started, returning control to user process
32
+ 2024-04-11 01:01:20,608 INFO MainThread:94 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 50257, 'n_positions': 1024, 'n_embd': 1280, 'n_layer': 36, 'n_head': 20, 'n_inner': None, 'activation_function': 'gelu_new', 'resid_pdrop': 0.1, 'embd_pdrop': 0.1, 'attn_pdrop': 0.1, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'summary_type': 'cls_index', 'summary_use_proj': True, 'summary_activation': None, 'summary_first_dropout': 0.1, 'summary_proj_to_labels': True, 'scale_attn_weights': True, 'use_cache': False, 'scale_attn_by_inverse_layer_idx': False, 'reorder_and_upcast_attn': False, 'bos_token_id': 50256, 'eos_token_id': 50256, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': None, 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['GPT2LMHeadModel'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': None, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': {'text-generation': {'do_sample': True, 'max_length': 50}}, 'problem_type': None, '_name_or_path': 'openai-community/gpt2-large', 'transformers_version': '4.39.3', 'model_type': 'gpt2', 'n_ctx': 1024, 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.0001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 5, 'max_steps': 20, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr11_00-59-48_c072b7c9e487', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_8bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
wandb/run-20240411_010103-4b3fzolv/run-4b3fzolv.wandb ADDED
Binary file (14.6 kB). View file