Femboyuwu2000 commited on
Commit
6db4657
1 Parent(s): 766fe0d

lierotica_llama2

Browse files
Files changed (44) hide show
  1. .gitattributes +1 -0
  2. README.md +3 -3
  3. adapter_config.json +3 -3
  4. runs/Apr10_00-42-28_d91c9dc8354a/events.out.tfevents.1712710019.d91c9dc8354a.98.0 +3 -0
  5. runs/Apr10_00-49-34_d91c9dc8354a/events.out.tfevents.1712710295.d91c9dc8354a.248.0 +3 -0
  6. runs/Apr10_00-57-51_d91c9dc8354a/events.out.tfevents.1712710797.d91c9dc8354a.399.0 +3 -0
  7. runs/Apr10_01-00-47_d91c9dc8354a/events.out.tfevents.1712710968.d91c9dc8354a.483.0 +3 -0
  8. training_args.bin +1 -1
  9. wandb/debug-internal.log +0 -0
  10. wandb/debug.log +29 -29
  11. wandb/run-20240410_004732-5h6j3v5e/files/conda-environment.yaml +0 -0
  12. wandb/run-20240410_004732-5h6j3v5e/files/config.yaml +670 -0
  13. wandb/run-20240410_004732-5h6j3v5e/files/output.log +36 -0
  14. wandb/run-20240410_004732-5h6j3v5e/files/requirements.txt +864 -0
  15. wandb/run-20240410_004732-5h6j3v5e/files/wandb-metadata.json +66 -0
  16. wandb/run-20240410_004732-5h6j3v5e/files/wandb-summary.json +1 -0
  17. wandb/run-20240410_004732-5h6j3v5e/logs/debug-internal.log +217 -0
  18. wandb/run-20240410_004732-5h6j3v5e/logs/debug.log +34 -0
  19. wandb/run-20240410_004732-5h6j3v5e/run-5h6j3v5e.wandb +0 -0
  20. wandb/run-20240410_005137-yg20qnb4/files/conda-environment.yaml +0 -0
  21. wandb/run-20240410_005137-yg20qnb4/files/config.yaml +686 -0
  22. wandb/run-20240410_005137-yg20qnb4/files/output.log +38 -0
  23. wandb/run-20240410_005137-yg20qnb4/files/requirements.txt +864 -0
  24. wandb/run-20240410_005137-yg20qnb4/files/wandb-metadata.json +66 -0
  25. wandb/run-20240410_005137-yg20qnb4/files/wandb-summary.json +1 -0
  26. wandb/run-20240410_005137-yg20qnb4/logs/debug-internal.log +353 -0
  27. wandb/run-20240410_005137-yg20qnb4/logs/debug.log +33 -0
  28. wandb/run-20240410_005137-yg20qnb4/run-yg20qnb4.wandb +0 -0
  29. wandb/run-20240410_005959-52om3vq0/files/conda-environment.yaml +0 -0
  30. wandb/run-20240410_005959-52om3vq0/files/config.yaml +38 -0
  31. wandb/run-20240410_005959-52om3vq0/files/wandb-metadata.json +66 -0
  32. wandb/run-20240410_005959-52om3vq0/files/wandb-summary.json +1 -0
  33. wandb/run-20240410_005959-52om3vq0/logs/debug-internal.log +179 -0
  34. wandb/run-20240410_005959-52om3vq0/logs/debug.log +53 -0
  35. wandb/run-20240410_005959-52om3vq0/run-52om3vq0.wandb +0 -0
  36. wandb/run-20240410_010250-ft4a6i5j/files/conda-environment.yaml +0 -0
  37. wandb/run-20240410_010250-ft4a6i5j/files/config.yaml +686 -0
  38. wandb/run-20240410_010250-ft4a6i5j/files/output.log +334 -0
  39. wandb/run-20240410_010250-ft4a6i5j/files/requirements.txt +864 -0
  40. wandb/run-20240410_010250-ft4a6i5j/files/wandb-metadata.json +66 -0
  41. wandb/run-20240410_010250-ft4a6i5j/files/wandb-summary.json +1 -0
  42. wandb/run-20240410_010250-ft4a6i5j/logs/debug-internal.log +0 -0
  43. wandb/run-20240410_010250-ft4a6i5j/logs/debug.log +31 -0
  44. wandb/run-20240410_010250-ft4a6i5j/run-ft4a6i5j.wandb +3 -0
.gitattributes CHANGED
@@ -34,3 +34,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  wandb/run-20240408_203054-ldiuneeg/run-ldiuneeg.wandb filter=lfs diff=lfs merge=lfs -text
 
 
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  wandb/run-20240408_203054-ldiuneeg/run-ldiuneeg.wandb filter=lfs diff=lfs merge=lfs -text
37
+ wandb/run-20240410_010250-ft4a6i5j/run-ft4a6i5j.wandb filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -35,14 +35,14 @@ More information needed
35
  ### Training hyperparameters
36
 
37
  The following hyperparameters were used during training:
38
- - learning_rate: 1e-06
39
- - train_batch_size: 2
40
  - eval_batch_size: 8
41
  - seed: 42
42
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
  - lr_scheduler_type: cosine
44
  - lr_scheduler_warmup_ratio: 0.03
45
- - training_steps: 200
46
 
47
  ### Training results
48
 
 
35
  ### Training hyperparameters
36
 
37
  The following hyperparameters were used during training:
38
+ - learning_rate: 7e-06
39
+ - train_batch_size: 8
40
  - eval_batch_size: 8
41
  - seed: 42
42
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
  - lr_scheduler_type: cosine
44
  - lr_scheduler_warmup_ratio: 0.03
45
+ - training_steps: 15000
46
 
47
  ### Training results
48
 
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
23
  "v_proj",
24
  "o_proj",
 
25
  "up_proj",
26
- "gate_proj",
27
  "q_proj",
28
- "down_proj",
29
- "k_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "gate_proj",
24
  "v_proj",
25
  "o_proj",
26
+ "k_proj",
27
  "up_proj",
 
28
  "q_proj",
29
+ "down_proj"
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
runs/Apr10_00-42-28_d91c9dc8354a/events.out.tfevents.1712710019.d91c9dc8354a.98.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3d65a7d2a7f4f10193841a6245f9de475423a625271db3dc306324490378e4f
3
+ size 4184
runs/Apr10_00-49-34_d91c9dc8354a/events.out.tfevents.1712710295.d91c9dc8354a.248.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef9cf3cadd0082f1e8739fc3c6cbbe7901eafbe9467f45c0265af7c3b48c8c5e
3
+ size 5365
runs/Apr10_00-57-51_d91c9dc8354a/events.out.tfevents.1712710797.d91c9dc8354a.399.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09613a110084c05e8088d5ec9357045d651d281c10b3bd0839999bf88470ab54
3
+ size 4184
runs/Apr10_01-00-47_d91c9dc8354a/events.out.tfevents.1712710968.d91c9dc8354a.483.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5459acd8a728881ec4e8d236e8c60118150d0737700f357b8ae09c3dd8903b08
3
+ size 37158
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:79476ba32ee1c672acba45c0b115976802852236319c0685413a57fd770928c9
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:147058c240b6504dffc71c5e3cf4e64c5649a0006b05271d541e3a054520c963
3
  size 4920
wandb/debug-internal.log CHANGED
The diff for this file is too large to render. See raw diff
 
wandb/debug.log CHANGED
@@ -1,31 +1,31 @@
1
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_setup.py:_flush():76] Configure stats pid to 571
3
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_setup.py:_flush():76] Applying login settings: {}
9
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240409_221459-bis0oi0s/logs/debug.log
10
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240409_221459-bis0oi0s/logs/debug-internal.log
11
- 2024-04-09 22:14:59,278 INFO MainThread:571 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7c14ac5da110>
12
- 2024-04-09 22:14:59,279 INFO MainThread:571 [wandb_init.py:init():567] calling init triggers
13
- 2024-04-09 22:14:59,279 INFO MainThread:571 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
  config: {}
15
- 2024-04-09 22:14:59,279 INFO MainThread:571 [wandb_init.py:init():617] starting backend
16
- 2024-04-09 22:14:59,279 INFO MainThread:571 [wandb_init.py:init():621] setting up manager
17
- 2024-04-09 22:14:59,281 INFO MainThread:571 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
- 2024-04-09 22:14:59,282 INFO MainThread:571 [wandb_init.py:init():629] backend started and connected
19
- 2024-04-09 22:14:59,293 INFO MainThread:571 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
- 2024-04-09 22:14:59,558 INFO MainThread:571 [wandb_init.py:init():721] updated telemetry
21
- 2024-04-09 22:14:59,560 INFO MainThread:571 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
- 2024-04-09 22:14:59,786 INFO MainThread:571 [wandb_run.py:_on_init():2344] communicating current version
23
- 2024-04-09 22:14:59,879 INFO MainThread:571 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
 
25
- 2024-04-09 22:14:59,879 INFO MainThread:571 [wandb_init.py:init():805] starting run threads in backend
26
- 2024-04-09 22:15:15,888 INFO MainThread:571 [wandb_run.py:_console_start():2323] atexit reg
27
- 2024-04-09 22:15:15,889 INFO MainThread:571 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
- 2024-04-09 22:15:15,891 INFO MainThread:571 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
- 2024-04-09 22:15:15,891 INFO MainThread:571 [wandb_run.py:_redirect():2268] Redirects installed.
30
- 2024-04-09 22:15:15,892 INFO MainThread:571 [wandb_init.py:init():848] run started, returning control to user process
31
- 2024-04-09 22:15:15,897 INFO MainThread:571 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.39.3', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 1e-06, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.1, 'num_train_epochs': 5, 'max_steps': 200, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr09_22-12-41_6e44b39f6877', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 100, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
 
1
+ 2024-04-10 01:02:50,456 INFO MainThread:483 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Configure stats pid to 483
3
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240410_010250-ft4a6i5j/logs/debug.log
10
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240410_010250-ft4a6i5j/logs/debug-internal.log
11
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x78e1476822c0>
12
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:init():567] calling init triggers
13
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
  config: {}
15
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:init():617] starting backend
16
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:init():621] setting up manager
17
+ 2024-04-10 01:02:50,459 INFO MainThread:483 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
+ 2024-04-10 01:02:50,460 INFO MainThread:483 [wandb_init.py:init():629] backend started and connected
19
+ 2024-04-10 01:02:50,473 INFO MainThread:483 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
+ 2024-04-10 01:02:50,810 INFO MainThread:483 [wandb_init.py:init():721] updated telemetry
21
+ 2024-04-10 01:02:50,813 INFO MainThread:483 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
+ 2024-04-10 01:02:51,049 INFO MainThread:483 [wandb_run.py:_on_init():2344] communicating current version
23
+ 2024-04-10 01:02:51,115 INFO MainThread:483 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
 
25
+ 2024-04-10 01:02:51,115 INFO MainThread:483 [wandb_init.py:init():805] starting run threads in backend
26
+ 2024-04-10 01:03:07,089 INFO MainThread:483 [wandb_run.py:_console_start():2323] atexit reg
27
+ 2024-04-10 01:03:07,089 INFO MainThread:483 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
+ 2024-04-10 01:03:07,091 INFO MainThread:483 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
+ 2024-04-10 01:03:07,091 INFO MainThread:483 [wandb_run.py:_redirect():2268] Redirects installed.
30
+ 2024-04-10 01:03:07,092 INFO MainThread:483 [wandb_init.py:init():848] run started, returning control to user process
31
+ 2024-04-10 01:03:07,097 INFO MainThread:483 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.39.3', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 7e-06, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.07, 'num_train_epochs': 5, 'max_steps': 15000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr10_01-00-47_d91c9dc8354a', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 100, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
wandb/run-20240410_004732-5h6j3v5e/files/conda-environment.yaml ADDED
File without changes
wandb/run-20240410_004732-5h6j3v5e/files/config.yaml ADDED
@@ -0,0 +1,670 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.10.13
7
+ cli_version: 0.16.5
8
+ framework: huggingface
9
+ huggingface_version: 4.39.3
10
+ is_jupyter_run: true
11
+ is_kaggle_kernel: true
12
+ start_time: 1712710052.0
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 84
27
+ - 98
28
+ - 105
29
+ 2:
30
+ - 1
31
+ - 2
32
+ - 3
33
+ - 5
34
+ - 11
35
+ - 12
36
+ - 49
37
+ - 51
38
+ - 53
39
+ - 55
40
+ - 71
41
+ - 84
42
+ - 98
43
+ - 105
44
+ 3:
45
+ - 7
46
+ - 23
47
+ 4: 3.10.13
48
+ 5: 0.16.5
49
+ 6: 4.39.3
50
+ 8:
51
+ - 1
52
+ - 2
53
+ - 5
54
+ 9:
55
+ 1: transformers_trainer
56
+ 13: linux-x86_64
57
+ m:
58
+ - 1: train/global_step
59
+ 6:
60
+ - 3
61
+ vocab_size:
62
+ desc: null
63
+ value: 32000
64
+ max_position_embeddings:
65
+ desc: null
66
+ value: 2048
67
+ hidden_size:
68
+ desc: null
69
+ value: 2048
70
+ intermediate_size:
71
+ desc: null
72
+ value: 5632
73
+ num_hidden_layers:
74
+ desc: null
75
+ value: 22
76
+ num_attention_heads:
77
+ desc: null
78
+ value: 32
79
+ num_key_value_heads:
80
+ desc: null
81
+ value: 4
82
+ hidden_act:
83
+ desc: null
84
+ value: silu
85
+ initializer_range:
86
+ desc: null
87
+ value: 0.02
88
+ rms_norm_eps:
89
+ desc: null
90
+ value: 1.0e-05
91
+ pretraining_tp:
92
+ desc: null
93
+ value: 1
94
+ use_cache:
95
+ desc: null
96
+ value: false
97
+ rope_theta:
98
+ desc: null
99
+ value: 10000.0
100
+ rope_scaling:
101
+ desc: null
102
+ value: null
103
+ attention_bias:
104
+ desc: null
105
+ value: false
106
+ attention_dropout:
107
+ desc: null
108
+ value: 0.0
109
+ return_dict:
110
+ desc: null
111
+ value: true
112
+ output_hidden_states:
113
+ desc: null
114
+ value: false
115
+ output_attentions:
116
+ desc: null
117
+ value: false
118
+ torchscript:
119
+ desc: null
120
+ value: false
121
+ torch_dtype:
122
+ desc: null
123
+ value: float32
124
+ use_bfloat16:
125
+ desc: null
126
+ value: false
127
+ tf_legacy_loss:
128
+ desc: null
129
+ value: false
130
+ pruned_heads:
131
+ desc: null
132
+ value: {}
133
+ tie_word_embeddings:
134
+ desc: null
135
+ value: false
136
+ chunk_size_feed_forward:
137
+ desc: null
138
+ value: 0
139
+ is_encoder_decoder:
140
+ desc: null
141
+ value: false
142
+ is_decoder:
143
+ desc: null
144
+ value: false
145
+ cross_attention_hidden_size:
146
+ desc: null
147
+ value: null
148
+ add_cross_attention:
149
+ desc: null
150
+ value: false
151
+ tie_encoder_decoder:
152
+ desc: null
153
+ value: false
154
+ max_length:
155
+ desc: null
156
+ value: 20
157
+ min_length:
158
+ desc: null
159
+ value: 0
160
+ do_sample:
161
+ desc: null
162
+ value: false
163
+ early_stopping:
164
+ desc: null
165
+ value: false
166
+ num_beams:
167
+ desc: null
168
+ value: 1
169
+ num_beam_groups:
170
+ desc: null
171
+ value: 1
172
+ diversity_penalty:
173
+ desc: null
174
+ value: 0.0
175
+ temperature:
176
+ desc: null
177
+ value: 1.0
178
+ top_k:
179
+ desc: null
180
+ value: 50
181
+ top_p:
182
+ desc: null
183
+ value: 1.0
184
+ typical_p:
185
+ desc: null
186
+ value: 1.0
187
+ repetition_penalty:
188
+ desc: null
189
+ value: 1.0
190
+ length_penalty:
191
+ desc: null
192
+ value: 1.0
193
+ no_repeat_ngram_size:
194
+ desc: null
195
+ value: 0
196
+ encoder_no_repeat_ngram_size:
197
+ desc: null
198
+ value: 0
199
+ bad_words_ids:
200
+ desc: null
201
+ value: null
202
+ num_return_sequences:
203
+ desc: null
204
+ value: 1
205
+ output_scores:
206
+ desc: null
207
+ value: false
208
+ return_dict_in_generate:
209
+ desc: null
210
+ value: false
211
+ forced_bos_token_id:
212
+ desc: null
213
+ value: null
214
+ forced_eos_token_id:
215
+ desc: null
216
+ value: null
217
+ remove_invalid_values:
218
+ desc: null
219
+ value: false
220
+ exponential_decay_length_penalty:
221
+ desc: null
222
+ value: null
223
+ suppress_tokens:
224
+ desc: null
225
+ value: null
226
+ begin_suppress_tokens:
227
+ desc: null
228
+ value: null
229
+ architectures:
230
+ desc: null
231
+ value:
232
+ - LlamaForCausalLM
233
+ finetuning_task:
234
+ desc: null
235
+ value: null
236
+ id2label:
237
+ desc: null
238
+ value:
239
+ '0': LABEL_0
240
+ '1': LABEL_1
241
+ label2id:
242
+ desc: null
243
+ value:
244
+ LABEL_0: 0
245
+ LABEL_1: 1
246
+ tokenizer_class:
247
+ desc: null
248
+ value: null
249
+ prefix:
250
+ desc: null
251
+ value: null
252
+ bos_token_id:
253
+ desc: null
254
+ value: 1
255
+ pad_token_id:
256
+ desc: null
257
+ value: null
258
+ eos_token_id:
259
+ desc: null
260
+ value: 2
261
+ sep_token_id:
262
+ desc: null
263
+ value: null
264
+ decoder_start_token_id:
265
+ desc: null
266
+ value: null
267
+ task_specific_params:
268
+ desc: null
269
+ value: null
270
+ problem_type:
271
+ desc: null
272
+ value: null
273
+ _name_or_path:
274
+ desc: null
275
+ value: TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
276
+ transformers_version:
277
+ desc: null
278
+ value: 4.39.3
279
+ model_type:
280
+ desc: null
281
+ value: llama
282
+ quantization_config:
283
+ desc: null
284
+ value:
285
+ quant_method: QuantizationMethod.BITS_AND_BYTES
286
+ _load_in_8bit: false
287
+ _load_in_4bit: true
288
+ llm_int8_threshold: 6.0
289
+ llm_int8_skip_modules: null
290
+ llm_int8_enable_fp32_cpu_offload: false
291
+ llm_int8_has_fp16_weight: false
292
+ bnb_4bit_quant_type: nf4
293
+ bnb_4bit_use_double_quant: false
294
+ bnb_4bit_compute_dtype: float16
295
+ bnb_4bit_quant_storage: uint8
296
+ load_in_4bit: true
297
+ load_in_8bit: false
298
+ output_dir:
299
+ desc: null
300
+ value: /kaggle/working/
301
+ overwrite_output_dir:
302
+ desc: null
303
+ value: false
304
+ do_train:
305
+ desc: null
306
+ value: false
307
+ do_eval:
308
+ desc: null
309
+ value: false
310
+ do_predict:
311
+ desc: null
312
+ value: false
313
+ evaluation_strategy:
314
+ desc: null
315
+ value: 'no'
316
+ prediction_loss_only:
317
+ desc: null
318
+ value: false
319
+ per_device_train_batch_size:
320
+ desc: null
321
+ value: 8
322
+ per_device_eval_batch_size:
323
+ desc: null
324
+ value: 8
325
+ per_gpu_train_batch_size:
326
+ desc: null
327
+ value: null
328
+ per_gpu_eval_batch_size:
329
+ desc: null
330
+ value: null
331
+ gradient_accumulation_steps:
332
+ desc: null
333
+ value: 1
334
+ eval_accumulation_steps:
335
+ desc: null
336
+ value: null
337
+ eval_delay:
338
+ desc: null
339
+ value: 0
340
+ learning_rate:
341
+ desc: null
342
+ value: 7.0e-06
343
+ weight_decay:
344
+ desc: null
345
+ value: 0.001
346
+ adam_beta1:
347
+ desc: null
348
+ value: 0.9
349
+ adam_beta2:
350
+ desc: null
351
+ value: 0.999
352
+ adam_epsilon:
353
+ desc: null
354
+ value: 1.0e-08
355
+ max_grad_norm:
356
+ desc: null
357
+ value: 0.07
358
+ num_train_epochs:
359
+ desc: null
360
+ value: 5
361
+ max_steps:
362
+ desc: null
363
+ value: 11000
364
+ lr_scheduler_type:
365
+ desc: null
366
+ value: cosine
367
+ lr_scheduler_kwargs:
368
+ desc: null
369
+ value: {}
370
+ warmup_ratio:
371
+ desc: null
372
+ value: 0.03
373
+ warmup_steps:
374
+ desc: null
375
+ value: 0
376
+ log_level:
377
+ desc: null
378
+ value: passive
379
+ log_level_replica:
380
+ desc: null
381
+ value: warning
382
+ log_on_each_node:
383
+ desc: null
384
+ value: true
385
+ logging_dir:
386
+ desc: null
387
+ value: /kaggle/working/runs/Apr10_00-42-28_d91c9dc8354a
388
+ logging_strategy:
389
+ desc: null
390
+ value: steps
391
+ logging_first_step:
392
+ desc: null
393
+ value: false
394
+ logging_steps:
395
+ desc: null
396
+ value: 100
397
+ logging_nan_inf_filter:
398
+ desc: null
399
+ value: true
400
+ save_strategy:
401
+ desc: null
402
+ value: steps
403
+ save_steps:
404
+ desc: null
405
+ value: 100
406
+ save_total_limit:
407
+ desc: null
408
+ value: 1
409
+ save_safetensors:
410
+ desc: null
411
+ value: true
412
+ save_on_each_node:
413
+ desc: null
414
+ value: false
415
+ save_only_model:
416
+ desc: null
417
+ value: false
418
+ no_cuda:
419
+ desc: null
420
+ value: false
421
+ use_cpu:
422
+ desc: null
423
+ value: false
424
+ use_mps_device:
425
+ desc: null
426
+ value: false
427
+ seed:
428
+ desc: null
429
+ value: 42
430
+ data_seed:
431
+ desc: null
432
+ value: null
433
+ jit_mode_eval:
434
+ desc: null
435
+ value: false
436
+ use_ipex:
437
+ desc: null
438
+ value: false
439
+ bf16:
440
+ desc: null
441
+ value: false
442
+ fp16:
443
+ desc: null
444
+ value: false
445
+ fp16_opt_level:
446
+ desc: null
447
+ value: O1
448
+ half_precision_backend:
449
+ desc: null
450
+ value: auto
451
+ bf16_full_eval:
452
+ desc: null
453
+ value: false
454
+ fp16_full_eval:
455
+ desc: null
456
+ value: false
457
+ tf32:
458
+ desc: null
459
+ value: null
460
+ local_rank:
461
+ desc: null
462
+ value: 0
463
+ ddp_backend:
464
+ desc: null
465
+ value: null
466
+ tpu_num_cores:
467
+ desc: null
468
+ value: null
469
+ tpu_metrics_debug:
470
+ desc: null
471
+ value: false
472
+ debug:
473
+ desc: null
474
+ value: []
475
+ dataloader_drop_last:
476
+ desc: null
477
+ value: false
478
+ eval_steps:
479
+ desc: null
480
+ value: null
481
+ dataloader_num_workers:
482
+ desc: null
483
+ value: 8
484
+ dataloader_prefetch_factor:
485
+ desc: null
486
+ value: null
487
+ past_index:
488
+ desc: null
489
+ value: -1
490
+ run_name:
491
+ desc: null
492
+ value: /kaggle/working/
493
+ disable_tqdm:
494
+ desc: null
495
+ value: false
496
+ remove_unused_columns:
497
+ desc: null
498
+ value: true
499
+ label_names:
500
+ desc: null
501
+ value: null
502
+ load_best_model_at_end:
503
+ desc: null
504
+ value: false
505
+ metric_for_best_model:
506
+ desc: null
507
+ value: null
508
+ greater_is_better:
509
+ desc: null
510
+ value: null
511
+ ignore_data_skip:
512
+ desc: null
513
+ value: false
514
+ fsdp:
515
+ desc: null
516
+ value: []
517
+ fsdp_min_num_params:
518
+ desc: null
519
+ value: 0
520
+ fsdp_config:
521
+ desc: null
522
+ value:
523
+ min_num_params: 0
524
+ xla: false
525
+ xla_fsdp_v2: false
526
+ xla_fsdp_grad_ckpt: false
527
+ fsdp_transformer_layer_cls_to_wrap:
528
+ desc: null
529
+ value: null
530
+ accelerator_config:
531
+ desc: null
532
+ value:
533
+ split_batches: false
534
+ dispatch_batches: null
535
+ even_batches: true
536
+ use_seedable_sampler: true
537
+ deepspeed:
538
+ desc: null
539
+ value: null
540
+ label_smoothing_factor:
541
+ desc: null
542
+ value: 0.0
543
+ optim:
544
+ desc: null
545
+ value: paged_adamw_32bit
546
+ optim_args:
547
+ desc: null
548
+ value: null
549
+ adafactor:
550
+ desc: null
551
+ value: false
552
+ group_by_length:
553
+ desc: null
554
+ value: false
555
+ length_column_name:
556
+ desc: null
557
+ value: length
558
+ report_to:
559
+ desc: null
560
+ value:
561
+ - tensorboard
562
+ - wandb
563
+ ddp_find_unused_parameters:
564
+ desc: null
565
+ value: null
566
+ ddp_bucket_cap_mb:
567
+ desc: null
568
+ value: null
569
+ ddp_broadcast_buffers:
570
+ desc: null
571
+ value: null
572
+ dataloader_pin_memory:
573
+ desc: null
574
+ value: true
575
+ dataloader_persistent_workers:
576
+ desc: null
577
+ value: false
578
+ skip_memory_metrics:
579
+ desc: null
580
+ value: true
581
+ use_legacy_prediction_loop:
582
+ desc: null
583
+ value: false
584
+ push_to_hub:
585
+ desc: null
586
+ value: false
587
+ resume_from_checkpoint:
588
+ desc: null
589
+ value: null
590
+ hub_model_id:
591
+ desc: null
592
+ value: null
593
+ hub_strategy:
594
+ desc: null
595
+ value: every_save
596
+ hub_token:
597
+ desc: null
598
+ value: <HUB_TOKEN>
599
+ hub_private_repo:
600
+ desc: null
601
+ value: false
602
+ hub_always_push:
603
+ desc: null
604
+ value: false
605
+ gradient_checkpointing:
606
+ desc: null
607
+ value: true
608
+ gradient_checkpointing_kwargs:
609
+ desc: null
610
+ value: null
611
+ include_inputs_for_metrics:
612
+ desc: null
613
+ value: false
614
+ fp16_backend:
615
+ desc: null
616
+ value: auto
617
+ push_to_hub_model_id:
618
+ desc: null
619
+ value: null
620
+ push_to_hub_organization:
621
+ desc: null
622
+ value: null
623
+ push_to_hub_token:
624
+ desc: null
625
+ value: <PUSH_TO_HUB_TOKEN>
626
+ mp_parameters:
627
+ desc: null
628
+ value: ''
629
+ auto_find_batch_size:
630
+ desc: null
631
+ value: true
632
+ full_determinism:
633
+ desc: null
634
+ value: false
635
+ torchdynamo:
636
+ desc: null
637
+ value: null
638
+ ray_scope:
639
+ desc: null
640
+ value: last
641
+ ddp_timeout:
642
+ desc: null
643
+ value: 1800
644
+ torch_compile:
645
+ desc: null
646
+ value: false
647
+ torch_compile_backend:
648
+ desc: null
649
+ value: null
650
+ torch_compile_mode:
651
+ desc: null
652
+ value: null
653
+ dispatch_batches:
654
+ desc: null
655
+ value: null
656
+ split_batches:
657
+ desc: null
658
+ value: null
659
+ include_tokens_per_second:
660
+ desc: null
661
+ value: false
662
+ include_num_input_tokens_seen:
663
+ desc: null
664
+ value: false
665
+ neftune_noise_alpha:
666
+ desc: null
667
+ value: null
668
+ optim_target_modules:
669
+ desc: null
670
+ value: null
wandb/run-20240410_004732-5h6j3v5e/files/output.log ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:557: UserWarning: This DataLoader will create 8 worker processes in total. Our suggested max number of worker in current system is 4, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
2
+ warnings.warn(_create_warning_msg(
3
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
4
+ To disable this warning, you can either:
5
+ - Avoid using `tokenizers` before the fork if possible
6
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
7
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
8
+ To disable this warning, you can either:
9
+ - Avoid using `tokenizers` before the fork if possible
10
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
11
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
12
+ To disable this warning, you can either:
13
+ - Avoid using `tokenizers` before the fork if possible
14
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
15
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
16
+ To disable this warning, you can either:
17
+ - Avoid using `tokenizers` before the fork if possible
18
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
19
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
20
+ To disable this warning, you can either:
21
+ - Avoid using `tokenizers` before the fork if possible
22
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
23
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
24
+ To disable this warning, you can either:
25
+ - Avoid using `tokenizers` before the fork if possible
26
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
27
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
28
+ To disable this warning, you can either:
29
+ - Avoid using `tokenizers` before the fork if possible
30
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
31
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
32
+ To disable this warning, you can either:
33
+ - Avoid using `tokenizers` before the fork if possible
34
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
35
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
36
+ warnings.warn(
wandb/run-20240410_004732-5h6j3v5e/files/requirements.txt ADDED
@@ -0,0 +1,864 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Babel==2.14.0
2
+ Boruta==0.3
3
+ Brotli==1.0.9
4
+ CVXcanon==0.1.2
5
+ Cartopy==0.22.0
6
+ Cython==3.0.8
7
+ Deprecated==1.2.14
8
+ Farama-Notifications==0.0.4
9
+ Flask==3.0.2
10
+ Geohash==1.0
11
+ GitPython==3.1.41
12
+ ImageHash==4.3.1
13
+ Janome==0.5.0
14
+ Jinja2==3.1.2
15
+ LunarCalendar==0.0.9
16
+ Mako==1.3.2
17
+ Markdown==3.5.2
18
+ MarkupSafe==2.1.3
19
+ MarkupSafe==2.1.5
20
+ Pillow==9.5.0
21
+ PuLP==2.8.0
22
+ PyArabic==0.6.15
23
+ PyJWT==2.8.0
24
+ PyMeeus==0.5.12
25
+ PySocks==1.7.1
26
+ PyUpSet==0.1.1.post7
27
+ PyWavelets==1.5.0
28
+ PyYAML==6.0.1
29
+ Pygments==2.17.2
30
+ Pympler==1.0.1
31
+ QtPy==2.4.1
32
+ Rtree==1.2.0
33
+ SQLAlchemy==2.0.25
34
+ SecretStorage==3.3.3
35
+ Send2Trash==1.8.2
36
+ Shapely==1.8.5.post1
37
+ Shimmy==1.3.0
38
+ SimpleITK==2.3.1
39
+ TPOT==0.12.1
40
+ Theano-PyMC==1.1.2
41
+ Theano==1.0.5
42
+ Wand==0.6.13
43
+ Werkzeug==3.0.2
44
+ absl-py==1.4.0
45
+ accelerate==0.28.0
46
+ access==1.1.9
47
+ affine==2.4.0
48
+ aiobotocore==2.12.2
49
+ aiofiles==22.1.0
50
+ aiohttp-cors==0.7.0
51
+ aiohttp==3.9.1
52
+ aioitertools==0.11.0
53
+ aiorwlock==1.3.0
54
+ aiosignal==1.3.1
55
+ aiosqlite==0.19.0
56
+ albumentations==1.4.0
57
+ alembic==1.13.1
58
+ altair==5.3.0
59
+ annotated-types==0.6.0
60
+ annoy==1.17.3
61
+ anyio==4.2.0
62
+ apache-beam==2.46.0
63
+ aplus==0.11.0
64
+ appdirs==1.4.4
65
+ archspec==0.2.3
66
+ argon2-cffi-bindings==21.2.0
67
+ argon2-cffi==23.1.0
68
+ array-record==0.5.0
69
+ arrow==1.3.0
70
+ arviz==0.17.1
71
+ astroid==3.1.0
72
+ astropy-iers-data==0.2024.4.1.0.33.14
73
+ astropy==6.0.1
74
+ asttokens==2.4.1
75
+ astunparse==1.6.3
76
+ async-lru==2.0.4
77
+ async-timeout==4.0.3
78
+ attrs==23.2.0
79
+ audioread==3.0.1
80
+ autopep8==2.0.4
81
+ backoff==2.2.1
82
+ bayesian-optimization==1.4.3
83
+ beatrix_jupyterlab==2023.128.151533
84
+ beautifulsoup4==4.12.2
85
+ bitsandbytes==0.43.0
86
+ blake3==0.2.1
87
+ bleach==6.1.0
88
+ blessed==1.20.0
89
+ blinker==1.7.0
90
+ blis==0.7.10
91
+ blosc2==2.6.0
92
+ bokeh==3.3.4
93
+ boltons==23.1.1
94
+ boto3==1.26.100
95
+ botocore==1.34.51
96
+ bq_helper==0.4.1
97
+ bqplot==0.12.43
98
+ branca==0.7.1
99
+ brewer2mpl==1.4.1
100
+ brotlipy==0.7.0
101
+ cached-property==1.5.2
102
+ cachetools==4.2.4
103
+ cachetools==5.3.2
104
+ catalogue==2.0.10
105
+ catalyst==22.4
106
+ catboost==1.2.3
107
+ category-encoders==2.6.3
108
+ certifi==2024.2.2
109
+ cesium==0.12.1
110
+ cffi==1.16.0
111
+ charset-normalizer==3.3.2
112
+ chex==0.1.86
113
+ cleverhans==4.0.0
114
+ click-plugins==1.1.1
115
+ click==8.1.7
116
+ cligj==0.7.2
117
+ cloud-tpu-client==0.10
118
+ cloud-tpu-profiler==2.4.0
119
+ cloudpathlib==0.16.0
120
+ cloudpickle==2.2.1
121
+ cloudpickle==3.0.0
122
+ cmdstanpy==1.2.2
123
+ colorama==0.4.6
124
+ colorcet==3.1.0
125
+ colorful==0.5.6
126
+ colorlog==6.8.2
127
+ colorlover==0.3.0
128
+ comm==0.2.1
129
+ conda-libmamba-solver==23.7.0
130
+ conda-package-handling==2.2.0
131
+ conda==23.7.4
132
+ conda_package_streaming==0.9.0
133
+ confection==0.1.4
134
+ contextily==1.6.0
135
+ contourpy==1.2.0
136
+ convertdate==2.4.0
137
+ crcmod==1.7
138
+ cryptography==41.0.7
139
+ cuda-python==12.4.0
140
+ cudf==23.8.0
141
+ cufflinks==0.17.3
142
+ cuml==23.8.0
143
+ cupy==13.0.0
144
+ cycler==0.12.1
145
+ cymem==2.0.8
146
+ cytoolz==0.12.3
147
+ daal4py==2024.2.0
148
+ daal==2024.2.0
149
+ dacite==1.8.1
150
+ dask-cuda==23.8.0
151
+ dask-cudf==23.8.0
152
+ dask-expr==1.0.9
153
+ dask==2024.4.0
154
+ dataclasses-json==0.6.4
155
+ dataproc_jupyter_plugin==0.1.66
156
+ datasets==2.16.0
157
+ datashader==0.16.0
158
+ datatile==1.0.3
159
+ db-dtypes==1.2.0
160
+ deap==1.4.1
161
+ debugpy==1.8.0
162
+ decorator==5.1.1
163
+ deepdiff==6.7.1
164
+ defusedxml==0.7.1
165
+ deprecation==2.1.0
166
+ descartes==1.1.0
167
+ dill==0.3.7
168
+ dipy==1.9.0
169
+ distlib==0.3.8
170
+ distributed==2023.7.1
171
+ distro==1.9.0
172
+ dm-tree==0.1.8
173
+ docker-pycreds==0.4.0
174
+ docker==7.0.0
175
+ docopt==0.6.2
176
+ docstring-parser==0.15
177
+ docstring-to-markdown==0.15
178
+ docutils==0.20.1
179
+ earthengine-api==0.1.395
180
+ easydict==1.13
181
+ easyocr==1.7.1
182
+ ecos==2.0.13
183
+ eli5==0.13.0
184
+ emoji==2.11.0
185
+ en-core-web-lg==3.7.1
186
+ en-core-web-sm==3.7.1
187
+ entrypoints==0.4
188
+ ephem==4.1.5
189
+ esda==2.5.1
190
+ essentia==2.1b6.dev1110
191
+ et-xmlfile==1.1.0
192
+ etils==1.6.0
193
+ exceptiongroup==1.2.0
194
+ executing==2.0.1
195
+ explainable-ai-sdk==1.3.3
196
+ fastai==2.7.14
197
+ fastapi==0.108.0
198
+ fastavro==1.9.3
199
+ fastcore==1.5.29
200
+ fastdownload==0.0.7
201
+ fasteners==0.19
202
+ fastjsonschema==2.19.1
203
+ fastprogress==1.0.3
204
+ fastrlock==0.8.2
205
+ fasttext==0.9.2
206
+ feather-format==0.4.1
207
+ featuretools==1.30.0
208
+ filelock==3.13.1
209
+ fiona==1.9.6
210
+ fitter==1.7.0
211
+ flake8==7.0.0
212
+ flashtext==2.7
213
+ flatbuffers==23.5.26
214
+ flax==0.8.2
215
+ folium==0.16.0
216
+ fonttools==4.47.0
217
+ fonttools==4.50.0
218
+ fqdn==1.5.1
219
+ frozendict==2.4.1
220
+ frozenlist==1.4.1
221
+ fsspec==2023.10.0
222
+ fsspec==2024.3.1
223
+ funcy==2.0
224
+ fury==0.10.0
225
+ future==1.0.0
226
+ fuzzywuzzy==0.18.0
227
+ gast==0.5.4
228
+ gatspy==0.3
229
+ gcsfs==2024.2.0
230
+ gensim==4.3.2
231
+ geographiclib==2.0
232
+ geojson==3.1.0
233
+ geopandas==0.14.3
234
+ geoplot==0.5.1
235
+ geopy==2.4.1
236
+ geoviews==1.11.1
237
+ ggplot==0.11.5
238
+ giddy==2.3.5
239
+ gitdb==4.0.11
240
+ google-ai-generativelanguage==0.4.0
241
+ google-api-core==2.11.1
242
+ google-api-core==2.18.0
243
+ google-api-python-client==2.125.0
244
+ google-apitools==0.5.31
245
+ google-auth-httplib2==0.2.0
246
+ google-auth-oauthlib==1.2.0
247
+ google-auth==2.26.1
248
+ google-cloud-aiplatform==0.6.0a1
249
+ google-cloud-artifact-registry==1.10.0
250
+ google-cloud-automl==1.0.1
251
+ google-cloud-bigquery==2.34.4
252
+ google-cloud-bigtable==1.7.3
253
+ google-cloud-core==2.4.1
254
+ google-cloud-datastore==2.19.0
255
+ google-cloud-dlp==3.14.0
256
+ google-cloud-jupyter-config==0.0.5
257
+ google-cloud-language==2.13.3
258
+ google-cloud-monitoring==2.18.0
259
+ google-cloud-pubsub==2.19.0
260
+ google-cloud-pubsublite==1.9.0
261
+ google-cloud-recommendations-ai==0.7.1
262
+ google-cloud-resource-manager==1.11.0
263
+ google-cloud-spanner==3.40.1
264
+ google-cloud-storage==1.44.0
265
+ google-cloud-translate==3.12.1
266
+ google-cloud-videointelligence==2.13.3
267
+ google-cloud-vision==2.8.0
268
+ google-crc32c==1.5.0
269
+ google-generativeai==0.4.1
270
+ google-pasta==0.2.0
271
+ google-resumable-media==2.7.0
272
+ googleapis-common-protos==1.62.0
273
+ gplearn==0.4.2
274
+ gpustat==1.0.0
275
+ gpxpy==1.6.2
276
+ graphviz==0.20.3
277
+ greenlet==3.0.3
278
+ grpc-google-iam-v1==0.12.7
279
+ grpcio-status==1.48.1
280
+ grpcio-status==1.48.2
281
+ grpcio==1.51.1
282
+ grpcio==1.60.0
283
+ gviz-api==1.10.0
284
+ gym-notices==0.0.8
285
+ gym==0.26.2
286
+ gymnasium==0.29.0
287
+ h11==0.14.0
288
+ h2o==3.46.0.1
289
+ h5netcdf==1.3.0
290
+ h5py==3.10.0
291
+ haversine==2.8.1
292
+ hdfs==2.7.3
293
+ hep-ml==0.7.2
294
+ hijri-converter==2.3.1
295
+ hmmlearn==0.3.2
296
+ holidays==0.24
297
+ holoviews==1.18.3
298
+ hpsklearn==0.1.0
299
+ html5lib==1.1
300
+ htmlmin==0.1.12
301
+ httpcore==1.0.5
302
+ httplib2==0.21.0
303
+ httptools==0.6.1
304
+ httpx==0.27.0
305
+ huggingface-hub==0.22.2
306
+ hunspell==0.5.5
307
+ hydra-slayer==0.5.0
308
+ hyperopt==0.2.7
309
+ hypertools==0.8.0
310
+ idna==3.6
311
+ igraph==0.11.4
312
+ imagecodecs==2024.1.1
313
+ imageio==2.33.1
314
+ imbalanced-learn==0.12.2
315
+ imgaug==0.4.0
316
+ importlib-metadata==6.11.0
317
+ importlib-metadata==7.0.1
318
+ importlib-resources==6.1.1
319
+ inequality==1.0.1
320
+ iniconfig==2.0.0
321
+ ipydatawidgets==4.3.5
322
+ ipykernel==6.28.0
323
+ ipyleaflet==0.18.2
324
+ ipympl==0.7.0
325
+ ipython-genutils==0.2.0
326
+ ipython-genutils==0.2.0
327
+ ipython-sql==0.5.0
328
+ ipython==8.20.0
329
+ ipyvolume==0.6.3
330
+ ipyvue==1.10.2
331
+ ipyvuetify==1.9.3
332
+ ipywebrtc==0.6.0
333
+ ipywidgets==7.7.1
334
+ isoduration==20.11.0
335
+ isort==5.13.2
336
+ isoweek==1.3.3
337
+ itsdangerous==2.1.2
338
+ jaraco.classes==3.3.0
339
+ jax-jumpy==1.0.0
340
+ jax==0.4.23
341
+ jaxlib==0.4.23.dev20240116
342
+ jedi==0.19.1
343
+ jeepney==0.8.0
344
+ jieba==0.42.1
345
+ jmespath==1.0.1
346
+ joblib==1.3.2
347
+ json5==0.9.14
348
+ jsonpatch==1.33
349
+ jsonpointer==2.4
350
+ jsonschema-specifications==2023.12.1
351
+ jsonschema==4.20.0
352
+ jupyter-console==6.6.3
353
+ jupyter-events==0.9.0
354
+ jupyter-http-over-ws==0.0.8
355
+ jupyter-lsp==1.5.1
356
+ jupyter-server-mathjax==0.2.6
357
+ jupyter-ydoc==0.2.5
358
+ jupyter_client==7.4.9
359
+ jupyter_client==8.6.0
360
+ jupyter_core==5.7.1
361
+ jupyter_server==2.13.0
362
+ jupyter_server_fileid==0.9.1
363
+ jupyter_server_proxy==4.1.0
364
+ jupyter_server_terminals==0.5.1
365
+ jupyter_server_ydoc==0.8.0
366
+ jupyterlab-lsp==5.1.0
367
+ jupyterlab-widgets==3.0.9
368
+ jupyterlab==4.1.5
369
+ jupyterlab_git==0.44.0
370
+ jupyterlab_pygments==0.3.0
371
+ jupyterlab_server==2.25.2
372
+ jupytext==1.16.0
373
+ kaggle-environments==1.14.3
374
+ kaggle==1.6.8
375
+ kagglehub==0.2.2
376
+ keras-cv==0.8.2
377
+ keras-nlp==0.8.2
378
+ keras-tuner==1.4.6
379
+ keras==3.1.1
380
+ kernels-mixer==0.0.7
381
+ keyring==24.3.0
382
+ keyrings.google-artifactregistry-auth==1.1.2
383
+ kfp-pipeline-spec==0.2.2
384
+ kfp-server-api==2.0.5
385
+ kfp==2.5.0
386
+ kiwisolver==1.4.5
387
+ kmapper==2.0.1
388
+ kmodes==0.12.2
389
+ korean-lunar-calendar==0.3.1
390
+ kornia==0.7.2
391
+ kornia_rs==0.1.3
392
+ kt-legacy==1.0.5
393
+ kubernetes==26.1.0
394
+ langcodes==3.3.0
395
+ langid==1.1.6
396
+ lazy_loader==0.3
397
+ learntools==0.3.4
398
+ leven==1.0.4
399
+ libclang==16.0.6
400
+ libmambapy==1.5.0
401
+ libpysal==4.9.2
402
+ librosa==0.10.1
403
+ lightgbm==4.2.0
404
+ lightning-utilities==0.11.2
405
+ lime==0.2.0.1
406
+ line-profiler==4.1.2
407
+ linkify-it-py==2.0.3
408
+ llvmlite==0.41.1
409
+ llvmlite==0.42.0
410
+ lml==0.1.0
411
+ locket==1.0.0
412
+ loguru==0.7.2
413
+ lxml==5.2.1
414
+ lz4==4.3.3
415
+ mamba==1.5.0
416
+ mapclassify==2.6.1
417
+ markdown-it-py==3.0.0
418
+ marshmallow==3.21.1
419
+ matplotlib-inline==0.1.6
420
+ matplotlib-venn==0.11.10
421
+ matplotlib==3.7.5
422
+ matplotlib==3.8.3
423
+ mccabe==0.7.0
424
+ mdit-py-plugins==0.4.0
425
+ mdurl==0.1.2
426
+ memory-profiler==0.61.0
427
+ menuinst==2.0.1
428
+ mercantile==1.2.1
429
+ mgwr==2.2.1
430
+ missingno==0.5.2
431
+ mistune==0.8.4
432
+ mizani==0.11.1
433
+ ml-dtypes==0.2.0
434
+ mlcrate==0.2.0
435
+ mlens==0.2.3
436
+ mlxtend==0.23.1
437
+ mne==1.6.1
438
+ mnist==0.2.2
439
+ momepy==0.7.0
440
+ more-itertools==10.2.0
441
+ mpld3==0.5.10
442
+ mpmath==1.3.0
443
+ msgpack==1.0.7
444
+ multidict==6.0.4
445
+ multimethod==1.10
446
+ multipledispatch==1.0.0
447
+ multiprocess==0.70.15
448
+ munkres==1.1.4
449
+ murmurhash==1.0.10
450
+ mypy-extensions==1.0.0
451
+ namex==0.0.7
452
+ nb-conda-kernels==2.3.1
453
+ nb_conda==2.2.1
454
+ nbclassic==1.0.0
455
+ nbclient==0.5.13
456
+ nbconvert==6.4.5
457
+ nbdime==3.2.0
458
+ nbformat==5.9.2
459
+ ndindex==1.8
460
+ nest-asyncio==1.5.8
461
+ networkx==3.2.1
462
+ nibabel==5.2.1
463
+ nilearn==0.10.3
464
+ ninja==1.11.1.1
465
+ nltk==3.2.4
466
+ nose==1.3.7
467
+ notebook==6.5.4
468
+ notebook==6.5.6
469
+ notebook_executor==0.2
470
+ notebook_shim==0.2.3
471
+ numba==0.58.1
472
+ numba==0.59.1
473
+ numexpr==2.10.0
474
+ numpy==1.26.4
475
+ nvidia-ml-py==11.495.46
476
+ nvtx==0.2.10
477
+ oauth2client==4.1.3
478
+ oauthlib==3.2.2
479
+ objsize==0.6.1
480
+ odfpy==1.4.1
481
+ olefile==0.47
482
+ onnx==1.16.0
483
+ opencensus-context==0.1.3
484
+ opencensus==0.11.4
485
+ opencv-contrib-python==4.9.0.80
486
+ opencv-python-headless==4.9.0.80
487
+ opencv-python==4.9.0.80
488
+ openpyxl==3.1.2
489
+ openslide-python==1.3.1
490
+ opentelemetry-api==1.22.0
491
+ opentelemetry-exporter-otlp-proto-common==1.22.0
492
+ opentelemetry-exporter-otlp-proto-grpc==1.22.0
493
+ opentelemetry-exporter-otlp-proto-http==1.22.0
494
+ opentelemetry-exporter-otlp==1.22.0
495
+ opentelemetry-proto==1.22.0
496
+ opentelemetry-sdk==1.22.0
497
+ opentelemetry-semantic-conventions==0.43b0
498
+ opt-einsum==3.3.0
499
+ optax==0.2.2
500
+ optree==0.11.0
501
+ optuna==3.6.1
502
+ orbax-checkpoint==0.5.7
503
+ ordered-set==4.1.0
504
+ orjson==3.9.10
505
+ ortools==9.4.1874
506
+ osmnx==1.9.2
507
+ overrides==7.4.0
508
+ packaging==21.3
509
+ pandas-datareader==0.10.0
510
+ pandas-profiling==3.6.6
511
+ pandas-summary==0.2.0
512
+ pandas==2.1.4
513
+ pandas==2.2.1
514
+ pandasql==0.7.3
515
+ pandocfilters==1.5.0
516
+ panel==1.3.8
517
+ papermill==2.5.0
518
+ param==2.1.0
519
+ parso==0.8.3
520
+ partd==1.4.1
521
+ path.py==12.5.0
522
+ path==16.10.0
523
+ pathos==0.3.2
524
+ pathy==0.10.3
525
+ patsy==0.5.6
526
+ pdf2image==1.17.0
527
+ peft==0.10.0
528
+ pettingzoo==1.24.0
529
+ pexpect==4.8.0
530
+ pexpect==4.9.0
531
+ phik==0.12.4
532
+ pickleshare==0.7.5
533
+ pillow==10.3.0
534
+ pip==23.3.2
535
+ pkgutil_resolve_name==1.3.10
536
+ platformdirs==4.2.0
537
+ plotly-express==0.4.1
538
+ plotly==5.18.0
539
+ plotnine==0.13.4
540
+ pluggy==1.4.0
541
+ pointpats==2.4.0
542
+ polars==0.20.18
543
+ polyglot==16.7.4
544
+ pooch==1.8.1
545
+ pox==0.3.4
546
+ ppca==0.0.4
547
+ ppft==1.7.6.8
548
+ preprocessing==0.1.13
549
+ preshed==3.0.9
550
+ prettytable==3.9.0
551
+ progressbar2==4.4.2
552
+ prometheus-client==0.19.0
553
+ promise==2.3
554
+ prompt-toolkit==3.0.42
555
+ prompt-toolkit==3.0.43
556
+ prophet==1.1.1
557
+ proto-plus==1.23.0
558
+ protobuf==3.20.3
559
+ protobuf==4.21.12
560
+ psutil==5.9.3
561
+ psutil==5.9.7
562
+ ptyprocess==0.7.0
563
+ pudb==2024.1
564
+ pure-eval==0.2.2
565
+ py-cpuinfo==9.0.0
566
+ py-spy==0.3.14
567
+ py4j==0.10.9.7
568
+ pyLDAvis==3.4.1
569
+ pyOpenSSL==23.3.0
570
+ pyaml==23.12.0
571
+ pyarrow-hotfix==0.6
572
+ pyarrow==15.0.2
573
+ pyasn1-modules==0.3.0
574
+ pyasn1==0.5.1
575
+ pybind11==2.12.0
576
+ pyclipper==1.3.0.post5
577
+ pycodestyle==2.11.1
578
+ pycosat==0.6.6
579
+ pycparser==2.21
580
+ pycryptodome==3.20.0
581
+ pyct==0.5.0
582
+ pycuda==2024.1
583
+ pydantic==2.5.3
584
+ pydantic==2.6.4
585
+ pydantic_core==2.14.6
586
+ pydantic_core==2.16.3
587
+ pydegensac==0.1.2
588
+ pydicom==2.4.4
589
+ pydocstyle==6.3.0
590
+ pydot==1.4.2
591
+ pydub==0.25.1
592
+ pyemd==1.0.0
593
+ pyerfa==2.0.1.1
594
+ pyexcel-io==0.6.6
595
+ pyexcel-ods==0.6.0
596
+ pyflakes==3.2.0
597
+ pygltflib==1.16.2
598
+ pykalman==0.9.7
599
+ pylibraft==23.8.0
600
+ pylint==3.1.0
601
+ pymc3==3.11.4
602
+ pymongo==3.13.0
603
+ pynndescent==0.5.12
604
+ pynvml==11.4.1
605
+ pynvrtc==9.2
606
+ pyparsing==3.1.1
607
+ pyparsing==3.1.2
608
+ pypdf==4.1.0
609
+ pyproj==3.6.1
610
+ pysal==24.1
611
+ pyshp==2.3.1
612
+ pytesseract==0.3.10
613
+ pytest==8.1.1
614
+ python-bidi==0.4.2
615
+ python-dateutil==2.9.0.post0
616
+ python-dotenv==1.0.0
617
+ python-json-logger==2.0.7
618
+ python-louvain==0.16
619
+ python-lsp-jsonrpc==1.1.2
620
+ python-lsp-server==1.11.0
621
+ python-slugify==8.0.4
622
+ python-utils==3.8.2
623
+ pythreejs==2.4.2
624
+ pytoolconfig==1.3.1
625
+ pytools==2024.1.1
626
+ pytorch-ignite==0.5.0.post2
627
+ pytorch-lightning==2.2.1
628
+ pytz==2023.3.post1
629
+ pytz==2024.1
630
+ pyu2f==0.1.5
631
+ pyviz_comms==3.0.2
632
+ pyzmq==24.0.1
633
+ pyzmq==25.1.2
634
+ qgrid==1.3.1
635
+ qtconsole==5.5.1
636
+ quantecon==0.7.2
637
+ qudida==0.0.4
638
+ raft-dask==23.8.0
639
+ rasterio==1.3.9
640
+ rasterstats==0.19.0
641
+ ray-cpp==2.9.0
642
+ ray==2.9.0
643
+ referencing==0.32.1
644
+ regex==2023.12.25
645
+ requests-oauthlib==1.3.1
646
+ requests-toolbelt==0.10.1
647
+ requests==2.31.0
648
+ retrying==1.3.3
649
+ retrying==1.3.4
650
+ rfc3339-validator==0.1.4
651
+ rfc3986-validator==0.1.1
652
+ rgf-python==3.12.0
653
+ rich-click==1.7.4
654
+ rich==13.7.0
655
+ rich==13.7.1
656
+ rmm==23.8.0
657
+ rope==1.13.0
658
+ rpds-py==0.16.2
659
+ rsa==4.9
660
+ ruamel-yaml-conda==0.15.100
661
+ ruamel.yaml.clib==0.2.7
662
+ ruamel.yaml==0.17.40
663
+ s2sphere==0.2.5
664
+ s3fs==2024.2.0
665
+ s3transfer==0.6.2
666
+ safetensors==0.4.2
667
+ scattertext==0.1.19
668
+ scikit-image==0.22.0
669
+ scikit-learn-intelex==2024.2.0
670
+ scikit-learn==1.2.2
671
+ scikit-multilearn==0.2.0
672
+ scikit-optimize==0.10.1
673
+ scikit-plot==0.3.7
674
+ scikit-surprise==1.1.3
675
+ scipy==1.11.4
676
+ scipy==1.12.0
677
+ seaborn==0.12.2
678
+ segment_anything==1.0
679
+ segregation==2.5
680
+ semver==3.0.2
681
+ sentencepiece==0.2.0
682
+ sentry-sdk==1.44.1
683
+ setproctitle==1.3.3
684
+ setuptools-git==1.2
685
+ setuptools-scm==8.0.4
686
+ setuptools==69.0.3
687
+ shap==0.44.1
688
+ shapely==2.0.3
689
+ shellingham==1.5.4
690
+ shtab==1.7.1
691
+ simpervisor==1.0.0
692
+ simplejson==3.19.2
693
+ six==1.16.0
694
+ sklearn-pandas==2.2.0
695
+ slicer==0.0.7
696
+ smart-open==6.4.0
697
+ smmap==5.0.1
698
+ sniffio==1.3.0
699
+ snowballstemmer==2.2.0
700
+ snuggs==1.4.7
701
+ sortedcontainers==2.4.0
702
+ soundfile==0.12.1
703
+ soupsieve==2.5
704
+ soxr==0.3.7
705
+ spacy-legacy==3.0.12
706
+ spacy-loggers==1.0.5
707
+ spacy==3.7.2
708
+ spaghetti==1.7.5.post1
709
+ spectral==0.23.1
710
+ spglm==1.1.0
711
+ sphinx-rtd-theme==0.2.4
712
+ spint==1.0.7
713
+ splot==1.1.5.post1
714
+ spopt==0.6.0
715
+ spreg==1.4.2
716
+ spvcm==0.3.0
717
+ sqlparse==0.4.4
718
+ squarify==0.4.3
719
+ srsly==2.4.8
720
+ stable-baselines3==2.1.0
721
+ stack-data==0.6.2
722
+ stack-data==0.6.3
723
+ stanio==0.5.0
724
+ starlette==0.32.0.post1
725
+ statsmodels==0.14.1
726
+ stemming==1.0.1
727
+ stop-words==2018.7.23
728
+ stopit==1.1.2
729
+ stumpy==1.12.0
730
+ sympy==1.12
731
+ tables==3.9.2
732
+ tabulate==0.9.0
733
+ tangled-up-in-unicode==0.2.0
734
+ tbb==2021.12.0
735
+ tblib==3.0.0
736
+ tenacity==8.2.3
737
+ tensorboard-data-server==0.7.2
738
+ tensorboard-plugin-profile==2.15.0
739
+ tensorboard==2.15.1
740
+ tensorboardX==2.6.2.2
741
+ tensorflow-cloud==0.1.16
742
+ tensorflow-datasets==4.9.4
743
+ tensorflow-decision-forests==1.8.1
744
+ tensorflow-estimator==2.15.0
745
+ tensorflow-hub==0.16.1
746
+ tensorflow-io-gcs-filesystem==0.35.0
747
+ tensorflow-io==0.35.0
748
+ tensorflow-metadata==0.14.0
749
+ tensorflow-probability==0.23.0
750
+ tensorflow-serving-api==2.14.1
751
+ tensorflow-text==2.15.0
752
+ tensorflow-transform==0.14.0
753
+ tensorflow==2.15.0
754
+ tensorstore==0.1.56
755
+ termcolor==2.4.0
756
+ terminado==0.18.0
757
+ testpath==0.6.0
758
+ text-unidecode==1.3
759
+ textblob==0.18.0.post0
760
+ texttable==1.7.0
761
+ tf_keras==2.15.1
762
+ tfp-nightly==0.24.0.dev0
763
+ thinc==8.2.2
764
+ threadpoolctl==3.2.0
765
+ tifffile==2023.12.9
766
+ timm==0.9.16
767
+ tinycss2==1.2.1
768
+ tobler==0.11.2
769
+ tokenizers==0.15.2
770
+ toml==0.10.2
771
+ tomli==2.0.1
772
+ tomlkit==0.12.4
773
+ toolz==0.12.1
774
+ torch==2.1.2
775
+ torchaudio==2.1.2
776
+ torchdata==0.7.1
777
+ torchinfo==1.8.0
778
+ torchmetrics==1.3.2
779
+ torchtext==0.16.2
780
+ torchvision==0.16.2
781
+ tornado==6.3.3
782
+ tqdm==4.66.1
783
+ traceml==1.0.8
784
+ traitlets==5.9.0
785
+ traittypes==0.2.1
786
+ transformers==4.39.3
787
+ treelite-runtime==3.2.0
788
+ treelite==3.2.0
789
+ trl==0.8.1
790
+ truststore==0.8.0
791
+ trx-python==0.2.9
792
+ tsfresh==0.20.2
793
+ typeguard==4.1.5
794
+ typer==0.9.0
795
+ typer==0.9.4
796
+ types-python-dateutil==2.8.19.20240106
797
+ typing-inspect==0.9.0
798
+ typing-utils==0.1.0
799
+ typing_extensions==4.9.0
800
+ tyro==0.8.3
801
+ tzdata==2023.4
802
+ uc-micro-py==1.0.3
803
+ ucx-py==0.33.0
804
+ ujson==5.9.0
805
+ umap-learn==0.5.5
806
+ unicodedata2==15.1.0
807
+ update-checker==0.18.0
808
+ uri-template==1.3.0
809
+ uritemplate==3.0.1
810
+ urllib3==1.26.18
811
+ urllib3==2.1.0
812
+ urwid==2.6.10
813
+ urwid_readline==0.14
814
+ uvicorn==0.25.0
815
+ uvloop==0.19.0
816
+ vaex-astro==0.9.3
817
+ vaex-core==4.17.1
818
+ vaex-hdf5==0.14.1
819
+ vaex-jupyter==0.8.2
820
+ vaex-ml==0.18.3
821
+ vaex-server==0.9.0
822
+ vaex-viz==0.5.4
823
+ vaex==4.17.0
824
+ vec_noise==1.1.4
825
+ vecstack==0.4.0
826
+ virtualenv==20.21.0
827
+ visions==0.7.5
828
+ vowpalwabbit==9.9.0
829
+ vtk==9.3.0
830
+ wandb==0.16.5
831
+ wasabi==1.1.2
832
+ watchfiles==0.21.0
833
+ wavio==0.0.8
834
+ wcwidth==0.2.13
835
+ weasel==0.3.4
836
+ webcolors==1.13
837
+ webencodings==0.5.1
838
+ websocket-client==1.7.0
839
+ websockets==12.0
840
+ wfdb==4.1.2
841
+ whatthepatch==1.0.5
842
+ wheel==0.42.0
843
+ widgetsnbextension==3.6.6
844
+ witwidget==1.8.1
845
+ woodwork==0.29.0
846
+ wordcloud==1.9.3
847
+ wordsegment==1.3.1
848
+ wrapt==1.14.1
849
+ xarray-einstats==0.7.0
850
+ xarray==2024.3.0
851
+ xgboost==2.0.3
852
+ xvfbwrapper==0.2.9
853
+ xxhash==3.4.1
854
+ xyzservices==2023.10.1
855
+ y-py==0.6.2
856
+ yapf==0.40.2
857
+ yarl==1.9.3
858
+ yarl==1.9.4
859
+ ydata-profiling==4.6.4
860
+ yellowbrick==1.5
861
+ ypy-websocket==0.8.4
862
+ zict==3.0.0
863
+ zipp==3.17.0
864
+ zstandard==0.22.0
wandb/run-20240410_004732-5h6j3v5e/files/wandb-metadata.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.15.133+-x86_64-with-glibc2.31",
3
+ "python": "3.10.13",
4
+ "heartbeatAt": "2024-04-10T00:47:33.216472",
5
+ "startedAt": "2024-04-10T00:47:32.315058",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [],
9
+ "state": "running",
10
+ "program": "kaggle.ipynb",
11
+ "codePathLocal": null,
12
+ "root": "/kaggle/working",
13
+ "host": "d91c9dc8354a",
14
+ "username": "root",
15
+ "executable": "/opt/conda/bin/python3.10",
16
+ "cpu_count": 2,
17
+ "cpu_count_logical": 4,
18
+ "cpu_freq": {
19
+ "current": 2000.156,
20
+ "min": 0.0,
21
+ "max": 0.0
22
+ },
23
+ "cpu_freq_per_core": [
24
+ {
25
+ "current": 2000.156,
26
+ "min": 0.0,
27
+ "max": 0.0
28
+ },
29
+ {
30
+ "current": 2000.156,
31
+ "min": 0.0,
32
+ "max": 0.0
33
+ },
34
+ {
35
+ "current": 2000.156,
36
+ "min": 0.0,
37
+ "max": 0.0
38
+ },
39
+ {
40
+ "current": 2000.156,
41
+ "min": 0.0,
42
+ "max": 0.0
43
+ }
44
+ ],
45
+ "disk": {
46
+ "/": {
47
+ "total": 8062.387607574463,
48
+ "used": 5568.826065063477
49
+ }
50
+ },
51
+ "gpu": "Tesla T4",
52
+ "gpu_count": 2,
53
+ "gpu_devices": [
54
+ {
55
+ "name": "Tesla T4",
56
+ "memory_total": 16106127360
57
+ },
58
+ {
59
+ "name": "Tesla T4",
60
+ "memory_total": 16106127360
61
+ }
62
+ ],
63
+ "memory": {
64
+ "total": 31.357559204101562
65
+ }
66
+ }
wandb/run-20240410_004732-5h6j3v5e/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_wandb": {"runtime": 53}}
wandb/run-20240410_004732-5h6j3v5e/logs/debug-internal.log ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-10 00:47:32,321 INFO StreamThr :144 [internal.py:wandb_internal():86] W&B internal server running at pid: 144, started at: 2024-04-10 00:47:32.321132
2
+ 2024-04-10 00:47:32,323 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-10 00:47:32,856 INFO WriterThread:144 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/run-5h6j3v5e.wandb
4
+ 2024-04-10 00:47:32,856 DEBUG SenderThread:144 [sender.py:send():379] send: header
5
+ 2024-04-10 00:47:32,859 DEBUG SenderThread:144 [sender.py:send():379] send: run
6
+ 2024-04-10 00:47:33,111 INFO SenderThread:144 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files
7
+ 2024-04-10 00:47:33,111 INFO SenderThread:144 [sender.py:_start_run_threads():1124] run started: 5h6j3v5e with start time 1712710052.322605
8
+ 2024-04-10 00:47:33,120 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-10 00:47:33,120 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-10 00:47:33,191 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-10 00:47:33,202 DEBUG HandlerThread:144 [system_info.py:__init__():26] System info init
12
+ 2024-04-10 00:47:33,202 DEBUG HandlerThread:144 [system_info.py:__init__():41] System info init done
13
+ 2024-04-10 00:47:33,202 INFO HandlerThread:144 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-10 00:47:33,202 INFO SystemMonitor:144 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-10 00:47:33,203 INFO HandlerThread:144 [system_monitor.py:probe():214] Collecting system info
16
+ 2024-04-10 00:47:33,203 INFO SystemMonitor:144 [interfaces.py:start():190] Started cpu monitoring
17
+ 2024-04-10 00:47:33,203 INFO SystemMonitor:144 [interfaces.py:start():190] Started disk monitoring
18
+ 2024-04-10 00:47:33,205 INFO SystemMonitor:144 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-10 00:47:33,206 INFO SystemMonitor:144 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-10 00:47:33,206 INFO SystemMonitor:144 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-10 00:47:33,216 DEBUG HandlerThread:144 [system_info.py:probe():150] Probing system
22
+ 2024-04-10 00:47:33,218 DEBUG HandlerThread:144 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-10 00:47:33,218 DEBUG HandlerThread:144 [system_info.py:probe():198] Probing system done
24
+ 2024-04-10 00:47:33,218 DEBUG HandlerThread:144 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-10T00:47:33.216472', 'startedAt': '2024-04-10T00:47:32.315058', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': 'd91c9dc8354a', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.156, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.156, 'min': 0.0, 'max': 0.0}, {'current': 2000.156, 'min': 0.0, 'max': 0.0}, {'current': 2000.156, 'min': 0.0, 'max': 0.0}, {'current': 2000.156, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5568.826065063477}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-10 00:47:33,218 INFO HandlerThread:144 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-10 00:47:33,218 INFO HandlerThread:144 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-10 00:47:33,218 DEBUG HandlerThread:144 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-10 00:47:34,113 INFO Thread-12 :144 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/conda-environment.yaml
29
+ 2024-04-10 00:47:48,232 ERROR HandlerThread:144 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
+ Traceback (most recent call last):
31
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
+ subprocess.call(
33
+ File "/opt/conda/lib/python3.10/subprocess.py", line 347, in call
34
+ return p.wait(timeout=timeout)
35
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1209, in wait
36
+ return self._wait(timeout=timeout)
37
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
+ raise TimeoutExpired(self.args, timeout)
39
+ subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-10 00:47:48,236 DEBUG HandlerThread:144 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-10 00:47:48,236 INFO HandlerThread:144 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-10 00:47:48,244 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-10 00:47:48,244 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-10 00:47:48,244 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-10 00:47:48,244 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-10 00:47:48,244 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-10 00:47:48,244 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: keepalive
48
+ 2024-04-10 00:47:48,245 DEBUG SenderThread:144 [sender.py:send():379] send: files
49
+ 2024-04-10 00:47:48,245 INFO SenderThread:144 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
+ 2024-04-10 00:47:48,618 INFO wandb-upload_0:144 [upload_job.py:push():131] Uploaded file /tmp/tmp5skvkonhwandb/p3etykrr-wandb-metadata.json
51
+ 2024-04-10 00:47:49,116 INFO Thread-12 :144 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/wandb-metadata.json
52
+ 2024-04-10 00:47:49,191 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: python_packages
53
+ 2024-04-10 00:47:49,192 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: python_packages
54
+ 2024-04-10 00:47:49,195 DEBUG SenderThread:144 [sender.py:send():379] send: telemetry
55
+ 2024-04-10 00:47:49,205 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: stop_status
56
+ 2024-04-10 00:47:49,206 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: internal_messages
57
+ 2024-04-10 00:47:49,208 DEBUG SenderThread:144 [sender.py:send():379] send: config
58
+ 2024-04-10 00:47:49,209 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: stop_status
59
+ 2024-04-10 00:47:49,367 DEBUG SenderThread:144 [sender.py:send():379] send: metric
60
+ 2024-04-10 00:47:49,368 DEBUG SenderThread:144 [sender.py:send():379] send: telemetry
61
+ 2024-04-10 00:47:49,369 DEBUG SenderThread:144 [sender.py:send():379] send: metric
62
+ 2024-04-10 00:47:49,369 WARNING SenderThread:144 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
63
+ 2024-04-10 00:47:49,369 DEBUG SenderThread:144 [sender.py:send():379] send: telemetry
64
+ 2024-04-10 00:47:50,116 INFO Thread-12 :144 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/requirements.txt
65
+ 2024-04-10 00:47:50,117 INFO Thread-12 :144 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/output.log
66
+ 2024-04-10 00:47:52,117 INFO Thread-12 :144 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/output.log
67
+ 2024-04-10 00:47:53,371 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
68
+ 2024-04-10 00:47:58,372 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
69
+ 2024-04-10 00:48:03,378 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
70
+ 2024-04-10 00:48:04,121 INFO Thread-12 :144 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/config.yaml
71
+ 2024-04-10 00:48:04,194 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: stop_status
72
+ 2024-04-10 00:48:04,194 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: internal_messages
73
+ 2024-04-10 00:48:04,195 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: stop_status
74
+ 2024-04-10 00:48:09,324 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
75
+ 2024-04-10 00:48:14,324 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
76
+ 2024-04-10 00:48:19,192 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: stop_status
77
+ 2024-04-10 00:48:19,193 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: stop_status
78
+ 2024-04-10 00:48:19,233 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: internal_messages
79
+ 2024-04-10 00:48:19,344 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
80
+ 2024-04-10 00:48:24,345 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
81
+ 2024-04-10 00:48:27,062 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: pause
82
+ 2024-04-10 00:48:27,062 INFO HandlerThread:144 [handler.py:handle_request_pause():708] stopping system metrics thread
83
+ 2024-04-10 00:48:27,062 INFO HandlerThread:144 [system_monitor.py:finish():203] Stopping system monitor
84
+ 2024-04-10 00:48:27,062 DEBUG SystemMonitor:144 [system_monitor.py:_start():172] Starting system metrics aggregation loop
85
+ 2024-04-10 00:48:27,062 DEBUG SystemMonitor:144 [system_monitor.py:_start():179] Finished system metrics aggregation loop
86
+ 2024-04-10 00:48:27,062 DEBUG SystemMonitor:144 [system_monitor.py:_start():183] Publishing last batch of metrics
87
+ 2024-04-10 00:48:27,064 INFO HandlerThread:144 [interfaces.py:finish():202] Joined cpu monitor
88
+ 2024-04-10 00:48:27,064 INFO HandlerThread:144 [interfaces.py:finish():202] Joined disk monitor
89
+ 2024-04-10 00:48:27,074 INFO HandlerThread:144 [interfaces.py:finish():202] Joined gpu monitor
90
+ 2024-04-10 00:48:27,075 INFO HandlerThread:144 [interfaces.py:finish():202] Joined memory monitor
91
+ 2024-04-10 00:48:27,075 INFO HandlerThread:144 [interfaces.py:finish():202] Joined network monitor
92
+ 2024-04-10 00:48:27,075 DEBUG SenderThread:144 [sender.py:send():379] send: stats
93
+ 2024-04-10 00:48:30,076 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
94
+ 2024-04-10 00:48:31,228 DEBUG SenderThread:144 [sender.py:send():379] send: exit
95
+ 2024-04-10 00:48:31,228 INFO SenderThread:144 [sender.py:send_exit():586] handling exit code: 0
96
+ 2024-04-10 00:48:31,228 INFO SenderThread:144 [sender.py:send_exit():588] handling runtime: 53
97
+ 2024-04-10 00:48:31,230 INFO SenderThread:144 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
98
+ 2024-04-10 00:48:31,230 INFO SenderThread:144 [sender.py:send_exit():594] send defer
99
+ 2024-04-10 00:48:31,230 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
100
+ 2024-04-10 00:48:31,230 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 0
101
+ 2024-04-10 00:48:31,230 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
102
+ 2024-04-10 00:48:31,231 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 0
103
+ 2024-04-10 00:48:31,231 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 1
104
+ 2024-04-10 00:48:31,231 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
105
+ 2024-04-10 00:48:31,231 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 1
106
+ 2024-04-10 00:48:31,231 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
107
+ 2024-04-10 00:48:31,231 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 1
108
+ 2024-04-10 00:48:31,231 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 2
109
+ 2024-04-10 00:48:31,231 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
110
+ 2024-04-10 00:48:31,231 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 2
111
+ 2024-04-10 00:48:31,231 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
112
+ 2024-04-10 00:48:31,231 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 2
113
+ 2024-04-10 00:48:31,231 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 3
114
+ 2024-04-10 00:48:31,232 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
115
+ 2024-04-10 00:48:31,232 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 3
116
+ 2024-04-10 00:48:31,232 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
117
+ 2024-04-10 00:48:31,232 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 3
118
+ 2024-04-10 00:48:31,232 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 4
119
+ 2024-04-10 00:48:31,232 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
120
+ 2024-04-10 00:48:31,232 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 4
121
+ 2024-04-10 00:48:31,232 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
122
+ 2024-04-10 00:48:31,232 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 4
123
+ 2024-04-10 00:48:31,232 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 5
124
+ 2024-04-10 00:48:31,232 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
125
+ 2024-04-10 00:48:31,232 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 5
126
+ 2024-04-10 00:48:31,233 DEBUG SenderThread:144 [sender.py:send():379] send: summary
127
+ 2024-04-10 00:48:31,233 INFO SenderThread:144 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
128
+ 2024-04-10 00:48:31,233 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
129
+ 2024-04-10 00:48:31,233 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 5
130
+ 2024-04-10 00:48:31,233 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 6
131
+ 2024-04-10 00:48:31,233 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
132
+ 2024-04-10 00:48:31,233 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 6
133
+ 2024-04-10 00:48:31,234 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
134
+ 2024-04-10 00:48:31,234 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 6
135
+ 2024-04-10 00:48:31,234 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 7
136
+ 2024-04-10 00:48:31,234 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: status_report
137
+ 2024-04-10 00:48:31,234 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
138
+ 2024-04-10 00:48:31,234 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 7
139
+ 2024-04-10 00:48:31,234 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
140
+ 2024-04-10 00:48:31,234 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 7
141
+ 2024-04-10 00:48:31,434 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 8
142
+ 2024-04-10 00:48:31,434 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
143
+ 2024-04-10 00:48:31,434 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 8
144
+ 2024-04-10 00:48:31,434 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
145
+ 2024-04-10 00:48:31,434 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 8
146
+ 2024-04-10 00:48:31,434 INFO SenderThread:144 [job_builder.py:build():318] Attempting to build job artifact
147
+ 2024-04-10 00:48:31,436 INFO SenderThread:144 [job_builder.py:_get_source_type():466] no source found
148
+ 2024-04-10 00:48:31,436 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 9
149
+ 2024-04-10 00:48:31,436 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
150
+ 2024-04-10 00:48:31,436 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 9
151
+ 2024-04-10 00:48:31,437 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
152
+ 2024-04-10 00:48:31,437 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 9
153
+ 2024-04-10 00:48:31,437 INFO SenderThread:144 [dir_watcher.py:finish():358] shutting down directory watcher
154
+ 2024-04-10 00:48:32,127 INFO SenderThread:144 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/output.log
155
+ 2024-04-10 00:48:32,127 INFO SenderThread:144 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/wandb-summary.json
156
+ 2024-04-10 00:48:32,127 INFO SenderThread:144 [dir_watcher.py:finish():388] scan: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files
157
+ 2024-04-10 00:48:32,128 INFO SenderThread:144 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/wandb-summary.json wandb-summary.json
158
+ 2024-04-10 00:48:32,128 INFO SenderThread:144 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/conda-environment.yaml conda-environment.yaml
159
+ 2024-04-10 00:48:32,131 INFO SenderThread:144 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/wandb-metadata.json wandb-metadata.json
160
+ 2024-04-10 00:48:32,131 INFO SenderThread:144 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/requirements.txt requirements.txt
161
+ 2024-04-10 00:48:32,132 INFO SenderThread:144 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/output.log output.log
162
+ 2024-04-10 00:48:32,132 INFO SenderThread:144 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/config.yaml config.yaml
163
+ 2024-04-10 00:48:32,132 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 10
164
+ 2024-04-10 00:48:32,132 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
165
+ 2024-04-10 00:48:32,135 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 10
166
+ 2024-04-10 00:48:32,139 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
167
+ 2024-04-10 00:48:32,139 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 10
168
+ 2024-04-10 00:48:32,139 INFO SenderThread:144 [file_pusher.py:finish():172] shutting down file pusher
169
+ 2024-04-10 00:48:32,233 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: poll_exit
170
+ 2024-04-10 00:48:32,235 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: poll_exit
171
+ 2024-04-10 00:48:32,360 INFO wandb-upload_0:144 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/wandb-summary.json
172
+ 2024-04-10 00:48:32,410 INFO wandb-upload_2:144 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/output.log
173
+ 2024-04-10 00:48:32,428 INFO wandb-upload_1:144 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/requirements.txt
174
+ 2024-04-10 00:48:32,444 INFO wandb-upload_3:144 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/files/config.yaml
175
+ 2024-04-10 00:48:32,644 INFO Thread-11 (_thread_body):144 [sender.py:transition_state():614] send defer: 11
176
+ 2024-04-10 00:48:32,644 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
177
+ 2024-04-10 00:48:32,644 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 11
178
+ 2024-04-10 00:48:32,645 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
179
+ 2024-04-10 00:48:32,645 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 11
180
+ 2024-04-10 00:48:32,645 INFO SenderThread:144 [file_pusher.py:join():178] waiting for file pusher
181
+ 2024-04-10 00:48:32,646 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 12
182
+ 2024-04-10 00:48:32,647 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
183
+ 2024-04-10 00:48:32,647 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 12
184
+ 2024-04-10 00:48:32,647 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
185
+ 2024-04-10 00:48:32,647 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 12
186
+ 2024-04-10 00:48:32,647 INFO SenderThread:144 [file_stream.py:finish():614] file stream finish called
187
+ 2024-04-10 00:48:32,837 INFO SenderThread:144 [file_stream.py:finish():618] file stream finish is done
188
+ 2024-04-10 00:48:32,837 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 13
189
+ 2024-04-10 00:48:32,838 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
190
+ 2024-04-10 00:48:32,838 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 13
191
+ 2024-04-10 00:48:32,838 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
192
+ 2024-04-10 00:48:32,838 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 13
193
+ 2024-04-10 00:48:32,838 INFO SenderThread:144 [sender.py:transition_state():614] send defer: 14
194
+ 2024-04-10 00:48:32,838 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: defer
195
+ 2024-04-10 00:48:32,838 INFO HandlerThread:144 [handler.py:handle_request_defer():172] handle defer: 14
196
+ 2024-04-10 00:48:32,839 DEBUG SenderThread:144 [sender.py:send():379] send: final
197
+ 2024-04-10 00:48:32,839 DEBUG SenderThread:144 [sender.py:send():379] send: footer
198
+ 2024-04-10 00:48:32,839 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: defer
199
+ 2024-04-10 00:48:32,839 INFO SenderThread:144 [sender.py:send_request_defer():610] handle sender defer: 14
200
+ 2024-04-10 00:48:32,840 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: poll_exit
201
+ 2024-04-10 00:48:32,840 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: poll_exit
202
+ 2024-04-10 00:48:32,841 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: poll_exit
203
+ 2024-04-10 00:48:32,841 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: poll_exit
204
+ 2024-04-10 00:48:32,841 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: server_info
205
+ 2024-04-10 00:48:32,842 DEBUG SenderThread:144 [sender.py:send_request():406] send_request: server_info
206
+ 2024-04-10 00:48:32,844 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: get_summary
207
+ 2024-04-10 00:48:32,845 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: sampled_history
208
+ 2024-04-10 00:48:32,845 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: internal_messages
209
+ 2024-04-10 00:48:32,908 INFO MainThread:144 [wandb_run.py:_footer_history_summary_info():3920] rendering history
210
+ 2024-04-10 00:48:32,908 INFO MainThread:144 [wandb_run.py:_footer_history_summary_info():3952] rendering summary
211
+ 2024-04-10 00:48:32,908 INFO MainThread:144 [wandb_run.py:_footer_sync_info():3879] logging synced files
212
+ 2024-04-10 00:48:32,908 DEBUG HandlerThread:144 [handler.py:handle_request():146] handle_request: shutdown
213
+ 2024-04-10 00:48:32,909 INFO HandlerThread:144 [handler.py:finish():866] shutting down handler
214
+ 2024-04-10 00:48:33,842 INFO WriterThread:144 [datastore.py:close():296] close: /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/run-5h6j3v5e.wandb
215
+ 2024-04-10 00:48:33,908 INFO SenderThread:144 [sender.py:finish():1546] shutting down sender
216
+ 2024-04-10 00:48:33,908 INFO SenderThread:144 [file_pusher.py:finish():172] shutting down file pusher
217
+ 2024-04-10 00:48:33,908 INFO SenderThread:144 [file_pusher.py:join():178] waiting for file pusher
wandb/run-20240410_004732-5h6j3v5e/logs/debug.log ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-10 00:47:32,316 INFO MainThread:98 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-10 00:47:32,316 INFO MainThread:98 [wandb_setup.py:_flush():76] Configure stats pid to 98
3
+ 2024-04-10 00:47:32,316 INFO MainThread:98 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_setup.py:_flush():76] Applying login settings: {'api_key': '***REDACTED***'}
10
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/logs/debug.log
11
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240410_004732-5h6j3v5e/logs/debug-internal.log
12
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7ba9a17eea40>
13
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_init.py:init():567] calling init triggers
14
+ 2024-04-10 00:47:32,317 INFO MainThread:98 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
15
+ config: {}
16
+ 2024-04-10 00:47:32,318 INFO MainThread:98 [wandb_init.py:init():617] starting backend
17
+ 2024-04-10 00:47:32,318 INFO MainThread:98 [wandb_init.py:init():621] setting up manager
18
+ 2024-04-10 00:47:32,319 INFO MainThread:98 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
19
+ 2024-04-10 00:47:32,322 INFO MainThread:98 [wandb_init.py:init():629] backend started and connected
20
+ 2024-04-10 00:47:32,334 INFO MainThread:98 [wandb_run.py:_label_probe_notebook():1299] probe notebook
21
+ 2024-04-10 00:47:32,855 INFO MainThread:98 [wandb_init.py:init():721] updated telemetry
22
+ 2024-04-10 00:47:32,858 INFO MainThread:98 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
23
+ 2024-04-10 00:47:33,119 INFO MainThread:98 [wandb_run.py:_on_init():2344] communicating current version
24
+ 2024-04-10 00:47:33,185 INFO MainThread:98 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
25
+
26
+ 2024-04-10 00:47:33,185 INFO MainThread:98 [wandb_init.py:init():805] starting run threads in backend
27
+ 2024-04-10 00:47:49,192 INFO MainThread:98 [wandb_run.py:_console_start():2323] atexit reg
28
+ 2024-04-10 00:47:49,192 INFO MainThread:98 [wandb_run.py:_redirect():2178] redirect: wrap_raw
29
+ 2024-04-10 00:47:49,194 INFO MainThread:98 [wandb_run.py:_redirect():2243] Wrapping output streams.
30
+ 2024-04-10 00:47:49,194 INFO MainThread:98 [wandb_run.py:_redirect():2268] Redirects installed.
31
+ 2024-04-10 00:47:49,195 INFO MainThread:98 [wandb_init.py:init():848] run started, returning control to user process
32
+ 2024-04-10 00:47:49,201 INFO MainThread:98 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.39.3', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 7e-06, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.07, 'num_train_epochs': 5, 'max_steps': 11000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr10_00-42-28_d91c9dc8354a', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 100, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
33
+ 2024-04-10 00:48:27,061 INFO MainThread:98 [jupyter.py:save_ipynb():373] not saving jupyter notebook
34
+ 2024-04-10 00:48:27,061 INFO MainThread:98 [wandb_init.py:_pause_backend():438] pausing backend
wandb/run-20240410_004732-5h6j3v5e/run-5h6j3v5e.wandb ADDED
Binary file (12.5 kB). View file
 
wandb/run-20240410_005137-yg20qnb4/files/conda-environment.yaml ADDED
File without changes
wandb/run-20240410_005137-yg20qnb4/files/config.yaml ADDED
@@ -0,0 +1,686 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.10.13
7
+ cli_version: 0.16.5
8
+ framework: huggingface
9
+ huggingface_version: 4.39.3
10
+ is_jupyter_run: true
11
+ is_kaggle_kernel: true
12
+ start_time: 1712710297.0
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 84
27
+ - 98
28
+ - 105
29
+ 2:
30
+ - 1
31
+ - 2
32
+ - 3
33
+ - 5
34
+ - 11
35
+ - 12
36
+ - 49
37
+ - 51
38
+ - 53
39
+ - 55
40
+ - 71
41
+ - 84
42
+ - 98
43
+ - 105
44
+ 3:
45
+ - 7
46
+ - 23
47
+ 4: 3.10.13
48
+ 5: 0.16.5
49
+ 6: 4.39.3
50
+ 8:
51
+ - 1
52
+ - 2
53
+ - 5
54
+ 9:
55
+ 1: transformers_trainer
56
+ 13: linux-x86_64
57
+ m:
58
+ - 1: train/global_step
59
+ 6:
60
+ - 3
61
+ - 1: train/loss
62
+ 5: 1
63
+ 6:
64
+ - 1
65
+ - 1: train/grad_norm
66
+ 5: 1
67
+ 6:
68
+ - 1
69
+ - 1: train/learning_rate
70
+ 5: 1
71
+ 6:
72
+ - 1
73
+ - 1: train/epoch
74
+ 5: 1
75
+ 6:
76
+ - 1
77
+ vocab_size:
78
+ desc: null
79
+ value: 32000
80
+ max_position_embeddings:
81
+ desc: null
82
+ value: 2048
83
+ hidden_size:
84
+ desc: null
85
+ value: 2048
86
+ intermediate_size:
87
+ desc: null
88
+ value: 5632
89
+ num_hidden_layers:
90
+ desc: null
91
+ value: 22
92
+ num_attention_heads:
93
+ desc: null
94
+ value: 32
95
+ num_key_value_heads:
96
+ desc: null
97
+ value: 4
98
+ hidden_act:
99
+ desc: null
100
+ value: silu
101
+ initializer_range:
102
+ desc: null
103
+ value: 0.02
104
+ rms_norm_eps:
105
+ desc: null
106
+ value: 1.0e-05
107
+ pretraining_tp:
108
+ desc: null
109
+ value: 1
110
+ use_cache:
111
+ desc: null
112
+ value: false
113
+ rope_theta:
114
+ desc: null
115
+ value: 10000.0
116
+ rope_scaling:
117
+ desc: null
118
+ value: null
119
+ attention_bias:
120
+ desc: null
121
+ value: false
122
+ attention_dropout:
123
+ desc: null
124
+ value: 0.0
125
+ return_dict:
126
+ desc: null
127
+ value: true
128
+ output_hidden_states:
129
+ desc: null
130
+ value: false
131
+ output_attentions:
132
+ desc: null
133
+ value: false
134
+ torchscript:
135
+ desc: null
136
+ value: false
137
+ torch_dtype:
138
+ desc: null
139
+ value: float32
140
+ use_bfloat16:
141
+ desc: null
142
+ value: false
143
+ tf_legacy_loss:
144
+ desc: null
145
+ value: false
146
+ pruned_heads:
147
+ desc: null
148
+ value: {}
149
+ tie_word_embeddings:
150
+ desc: null
151
+ value: false
152
+ chunk_size_feed_forward:
153
+ desc: null
154
+ value: 0
155
+ is_encoder_decoder:
156
+ desc: null
157
+ value: false
158
+ is_decoder:
159
+ desc: null
160
+ value: false
161
+ cross_attention_hidden_size:
162
+ desc: null
163
+ value: null
164
+ add_cross_attention:
165
+ desc: null
166
+ value: false
167
+ tie_encoder_decoder:
168
+ desc: null
169
+ value: false
170
+ max_length:
171
+ desc: null
172
+ value: 20
173
+ min_length:
174
+ desc: null
175
+ value: 0
176
+ do_sample:
177
+ desc: null
178
+ value: false
179
+ early_stopping:
180
+ desc: null
181
+ value: false
182
+ num_beams:
183
+ desc: null
184
+ value: 1
185
+ num_beam_groups:
186
+ desc: null
187
+ value: 1
188
+ diversity_penalty:
189
+ desc: null
190
+ value: 0.0
191
+ temperature:
192
+ desc: null
193
+ value: 1.0
194
+ top_k:
195
+ desc: null
196
+ value: 50
197
+ top_p:
198
+ desc: null
199
+ value: 1.0
200
+ typical_p:
201
+ desc: null
202
+ value: 1.0
203
+ repetition_penalty:
204
+ desc: null
205
+ value: 1.0
206
+ length_penalty:
207
+ desc: null
208
+ value: 1.0
209
+ no_repeat_ngram_size:
210
+ desc: null
211
+ value: 0
212
+ encoder_no_repeat_ngram_size:
213
+ desc: null
214
+ value: 0
215
+ bad_words_ids:
216
+ desc: null
217
+ value: null
218
+ num_return_sequences:
219
+ desc: null
220
+ value: 1
221
+ output_scores:
222
+ desc: null
223
+ value: false
224
+ return_dict_in_generate:
225
+ desc: null
226
+ value: false
227
+ forced_bos_token_id:
228
+ desc: null
229
+ value: null
230
+ forced_eos_token_id:
231
+ desc: null
232
+ value: null
233
+ remove_invalid_values:
234
+ desc: null
235
+ value: false
236
+ exponential_decay_length_penalty:
237
+ desc: null
238
+ value: null
239
+ suppress_tokens:
240
+ desc: null
241
+ value: null
242
+ begin_suppress_tokens:
243
+ desc: null
244
+ value: null
245
+ architectures:
246
+ desc: null
247
+ value:
248
+ - LlamaForCausalLM
249
+ finetuning_task:
250
+ desc: null
251
+ value: null
252
+ id2label:
253
+ desc: null
254
+ value:
255
+ '0': LABEL_0
256
+ '1': LABEL_1
257
+ label2id:
258
+ desc: null
259
+ value:
260
+ LABEL_0: 0
261
+ LABEL_1: 1
262
+ tokenizer_class:
263
+ desc: null
264
+ value: null
265
+ prefix:
266
+ desc: null
267
+ value: null
268
+ bos_token_id:
269
+ desc: null
270
+ value: 1
271
+ pad_token_id:
272
+ desc: null
273
+ value: null
274
+ eos_token_id:
275
+ desc: null
276
+ value: 2
277
+ sep_token_id:
278
+ desc: null
279
+ value: null
280
+ decoder_start_token_id:
281
+ desc: null
282
+ value: null
283
+ task_specific_params:
284
+ desc: null
285
+ value: null
286
+ problem_type:
287
+ desc: null
288
+ value: null
289
+ _name_or_path:
290
+ desc: null
291
+ value: TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
292
+ transformers_version:
293
+ desc: null
294
+ value: 4.39.3
295
+ model_type:
296
+ desc: null
297
+ value: llama
298
+ quantization_config:
299
+ desc: null
300
+ value:
301
+ quant_method: QuantizationMethod.BITS_AND_BYTES
302
+ _load_in_8bit: false
303
+ _load_in_4bit: true
304
+ llm_int8_threshold: 6.0
305
+ llm_int8_skip_modules: null
306
+ llm_int8_enable_fp32_cpu_offload: false
307
+ llm_int8_has_fp16_weight: false
308
+ bnb_4bit_quant_type: nf4
309
+ bnb_4bit_use_double_quant: false
310
+ bnb_4bit_compute_dtype: float16
311
+ bnb_4bit_quant_storage: uint8
312
+ load_in_4bit: true
313
+ load_in_8bit: false
314
+ output_dir:
315
+ desc: null
316
+ value: /kaggle/working/
317
+ overwrite_output_dir:
318
+ desc: null
319
+ value: false
320
+ do_train:
321
+ desc: null
322
+ value: false
323
+ do_eval:
324
+ desc: null
325
+ value: false
326
+ do_predict:
327
+ desc: null
328
+ value: false
329
+ evaluation_strategy:
330
+ desc: null
331
+ value: 'no'
332
+ prediction_loss_only:
333
+ desc: null
334
+ value: false
335
+ per_device_train_batch_size:
336
+ desc: null
337
+ value: 8
338
+ per_device_eval_batch_size:
339
+ desc: null
340
+ value: 8
341
+ per_gpu_train_batch_size:
342
+ desc: null
343
+ value: null
344
+ per_gpu_eval_batch_size:
345
+ desc: null
346
+ value: null
347
+ gradient_accumulation_steps:
348
+ desc: null
349
+ value: 1
350
+ eval_accumulation_steps:
351
+ desc: null
352
+ value: null
353
+ eval_delay:
354
+ desc: null
355
+ value: 0
356
+ learning_rate:
357
+ desc: null
358
+ value: 7.0e-06
359
+ weight_decay:
360
+ desc: null
361
+ value: 0.001
362
+ adam_beta1:
363
+ desc: null
364
+ value: 0.9
365
+ adam_beta2:
366
+ desc: null
367
+ value: 0.999
368
+ adam_epsilon:
369
+ desc: null
370
+ value: 1.0e-08
371
+ max_grad_norm:
372
+ desc: null
373
+ value: 0.07
374
+ num_train_epochs:
375
+ desc: null
376
+ value: 5
377
+ max_steps:
378
+ desc: null
379
+ value: 20000
380
+ lr_scheduler_type:
381
+ desc: null
382
+ value: cosine
383
+ lr_scheduler_kwargs:
384
+ desc: null
385
+ value: {}
386
+ warmup_ratio:
387
+ desc: null
388
+ value: 0.03
389
+ warmup_steps:
390
+ desc: null
391
+ value: 0
392
+ log_level:
393
+ desc: null
394
+ value: passive
395
+ log_level_replica:
396
+ desc: null
397
+ value: warning
398
+ log_on_each_node:
399
+ desc: null
400
+ value: true
401
+ logging_dir:
402
+ desc: null
403
+ value: /kaggle/working/runs/Apr10_00-49-34_d91c9dc8354a
404
+ logging_strategy:
405
+ desc: null
406
+ value: steps
407
+ logging_first_step:
408
+ desc: null
409
+ value: false
410
+ logging_steps:
411
+ desc: null
412
+ value: 100
413
+ logging_nan_inf_filter:
414
+ desc: null
415
+ value: true
416
+ save_strategy:
417
+ desc: null
418
+ value: steps
419
+ save_steps:
420
+ desc: null
421
+ value: 100
422
+ save_total_limit:
423
+ desc: null
424
+ value: 1
425
+ save_safetensors:
426
+ desc: null
427
+ value: true
428
+ save_on_each_node:
429
+ desc: null
430
+ value: false
431
+ save_only_model:
432
+ desc: null
433
+ value: false
434
+ no_cuda:
435
+ desc: null
436
+ value: false
437
+ use_cpu:
438
+ desc: null
439
+ value: false
440
+ use_mps_device:
441
+ desc: null
442
+ value: false
443
+ seed:
444
+ desc: null
445
+ value: 42
446
+ data_seed:
447
+ desc: null
448
+ value: null
449
+ jit_mode_eval:
450
+ desc: null
451
+ value: false
452
+ use_ipex:
453
+ desc: null
454
+ value: false
455
+ bf16:
456
+ desc: null
457
+ value: false
458
+ fp16:
459
+ desc: null
460
+ value: false
461
+ fp16_opt_level:
462
+ desc: null
463
+ value: O1
464
+ half_precision_backend:
465
+ desc: null
466
+ value: auto
467
+ bf16_full_eval:
468
+ desc: null
469
+ value: false
470
+ fp16_full_eval:
471
+ desc: null
472
+ value: false
473
+ tf32:
474
+ desc: null
475
+ value: null
476
+ local_rank:
477
+ desc: null
478
+ value: 0
479
+ ddp_backend:
480
+ desc: null
481
+ value: null
482
+ tpu_num_cores:
483
+ desc: null
484
+ value: null
485
+ tpu_metrics_debug:
486
+ desc: null
487
+ value: false
488
+ debug:
489
+ desc: null
490
+ value: []
491
+ dataloader_drop_last:
492
+ desc: null
493
+ value: false
494
+ eval_steps:
495
+ desc: null
496
+ value: null
497
+ dataloader_num_workers:
498
+ desc: null
499
+ value: 8
500
+ dataloader_prefetch_factor:
501
+ desc: null
502
+ value: null
503
+ past_index:
504
+ desc: null
505
+ value: -1
506
+ run_name:
507
+ desc: null
508
+ value: /kaggle/working/
509
+ disable_tqdm:
510
+ desc: null
511
+ value: false
512
+ remove_unused_columns:
513
+ desc: null
514
+ value: true
515
+ label_names:
516
+ desc: null
517
+ value: null
518
+ load_best_model_at_end:
519
+ desc: null
520
+ value: false
521
+ metric_for_best_model:
522
+ desc: null
523
+ value: null
524
+ greater_is_better:
525
+ desc: null
526
+ value: null
527
+ ignore_data_skip:
528
+ desc: null
529
+ value: false
530
+ fsdp:
531
+ desc: null
532
+ value: []
533
+ fsdp_min_num_params:
534
+ desc: null
535
+ value: 0
536
+ fsdp_config:
537
+ desc: null
538
+ value:
539
+ min_num_params: 0
540
+ xla: false
541
+ xla_fsdp_v2: false
542
+ xla_fsdp_grad_ckpt: false
543
+ fsdp_transformer_layer_cls_to_wrap:
544
+ desc: null
545
+ value: null
546
+ accelerator_config:
547
+ desc: null
548
+ value:
549
+ split_batches: false
550
+ dispatch_batches: null
551
+ even_batches: true
552
+ use_seedable_sampler: true
553
+ deepspeed:
554
+ desc: null
555
+ value: null
556
+ label_smoothing_factor:
557
+ desc: null
558
+ value: 0.0
559
+ optim:
560
+ desc: null
561
+ value: paged_adamw_32bit
562
+ optim_args:
563
+ desc: null
564
+ value: null
565
+ adafactor:
566
+ desc: null
567
+ value: false
568
+ group_by_length:
569
+ desc: null
570
+ value: false
571
+ length_column_name:
572
+ desc: null
573
+ value: length
574
+ report_to:
575
+ desc: null
576
+ value:
577
+ - tensorboard
578
+ - wandb
579
+ ddp_find_unused_parameters:
580
+ desc: null
581
+ value: null
582
+ ddp_bucket_cap_mb:
583
+ desc: null
584
+ value: null
585
+ ddp_broadcast_buffers:
586
+ desc: null
587
+ value: null
588
+ dataloader_pin_memory:
589
+ desc: null
590
+ value: true
591
+ dataloader_persistent_workers:
592
+ desc: null
593
+ value: false
594
+ skip_memory_metrics:
595
+ desc: null
596
+ value: true
597
+ use_legacy_prediction_loop:
598
+ desc: null
599
+ value: false
600
+ push_to_hub:
601
+ desc: null
602
+ value: false
603
+ resume_from_checkpoint:
604
+ desc: null
605
+ value: null
606
+ hub_model_id:
607
+ desc: null
608
+ value: null
609
+ hub_strategy:
610
+ desc: null
611
+ value: every_save
612
+ hub_token:
613
+ desc: null
614
+ value: <HUB_TOKEN>
615
+ hub_private_repo:
616
+ desc: null
617
+ value: false
618
+ hub_always_push:
619
+ desc: null
620
+ value: false
621
+ gradient_checkpointing:
622
+ desc: null
623
+ value: true
624
+ gradient_checkpointing_kwargs:
625
+ desc: null
626
+ value: null
627
+ include_inputs_for_metrics:
628
+ desc: null
629
+ value: false
630
+ fp16_backend:
631
+ desc: null
632
+ value: auto
633
+ push_to_hub_model_id:
634
+ desc: null
635
+ value: null
636
+ push_to_hub_organization:
637
+ desc: null
638
+ value: null
639
+ push_to_hub_token:
640
+ desc: null
641
+ value: <PUSH_TO_HUB_TOKEN>
642
+ mp_parameters:
643
+ desc: null
644
+ value: ''
645
+ auto_find_batch_size:
646
+ desc: null
647
+ value: true
648
+ full_determinism:
649
+ desc: null
650
+ value: false
651
+ torchdynamo:
652
+ desc: null
653
+ value: null
654
+ ray_scope:
655
+ desc: null
656
+ value: last
657
+ ddp_timeout:
658
+ desc: null
659
+ value: 1800
660
+ torch_compile:
661
+ desc: null
662
+ value: false
663
+ torch_compile_backend:
664
+ desc: null
665
+ value: null
666
+ torch_compile_mode:
667
+ desc: null
668
+ value: null
669
+ dispatch_batches:
670
+ desc: null
671
+ value: null
672
+ split_batches:
673
+ desc: null
674
+ value: null
675
+ include_tokens_per_second:
676
+ desc: null
677
+ value: false
678
+ include_num_input_tokens_seen:
679
+ desc: null
680
+ value: false
681
+ neftune_noise_alpha:
682
+ desc: null
683
+ value: null
684
+ optim_target_modules:
685
+ desc: null
686
+ value: null
wandb/run-20240410_005137-yg20qnb4/files/output.log ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:557: UserWarning: This DataLoader will create 8 worker processes in total. Our suggested max number of worker in current system is 4, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
2
+ warnings.warn(_create_warning_msg(
3
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
4
+ To disable this warning, you can either:
5
+ - Avoid using `tokenizers` before the fork if possible
6
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
7
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
8
+ To disable this warning, you can either:
9
+ - Avoid using `tokenizers` before the fork if possible
10
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
11
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
12
+ To disable this warning, you can either:
13
+ - Avoid using `tokenizers` before the fork if possible
14
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
15
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
16
+ To disable this warning, you can either:
17
+ - Avoid using `tokenizers` before the fork if possible
18
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
19
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
20
+ To disable this warning, you can either:
21
+ - Avoid using `tokenizers` before the fork if possible
22
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
23
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
24
+ To disable this warning, you can either:
25
+ - Avoid using `tokenizers` before the fork if possible
26
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
27
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
28
+ To disable this warning, you can either:
29
+ - Avoid using `tokenizers` before the fork if possible
30
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
31
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
32
+ To disable this warning, you can either:
33
+ - Avoid using `tokenizers` before the fork if possible
34
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
35
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
36
+ warnings.warn(
37
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
38
+ warnings.warn(
wandb/run-20240410_005137-yg20qnb4/files/requirements.txt ADDED
@@ -0,0 +1,864 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Babel==2.14.0
2
+ Boruta==0.3
3
+ Brotli==1.0.9
4
+ CVXcanon==0.1.2
5
+ Cartopy==0.22.0
6
+ Cython==3.0.8
7
+ Deprecated==1.2.14
8
+ Farama-Notifications==0.0.4
9
+ Flask==3.0.2
10
+ Geohash==1.0
11
+ GitPython==3.1.41
12
+ ImageHash==4.3.1
13
+ Janome==0.5.0
14
+ Jinja2==3.1.2
15
+ LunarCalendar==0.0.9
16
+ Mako==1.3.2
17
+ Markdown==3.5.2
18
+ MarkupSafe==2.1.3
19
+ MarkupSafe==2.1.5
20
+ Pillow==9.5.0
21
+ PuLP==2.8.0
22
+ PyArabic==0.6.15
23
+ PyJWT==2.8.0
24
+ PyMeeus==0.5.12
25
+ PySocks==1.7.1
26
+ PyUpSet==0.1.1.post7
27
+ PyWavelets==1.5.0
28
+ PyYAML==6.0.1
29
+ Pygments==2.17.2
30
+ Pympler==1.0.1
31
+ QtPy==2.4.1
32
+ Rtree==1.2.0
33
+ SQLAlchemy==2.0.25
34
+ SecretStorage==3.3.3
35
+ Send2Trash==1.8.2
36
+ Shapely==1.8.5.post1
37
+ Shimmy==1.3.0
38
+ SimpleITK==2.3.1
39
+ TPOT==0.12.1
40
+ Theano-PyMC==1.1.2
41
+ Theano==1.0.5
42
+ Wand==0.6.13
43
+ Werkzeug==3.0.2
44
+ absl-py==1.4.0
45
+ accelerate==0.28.0
46
+ access==1.1.9
47
+ affine==2.4.0
48
+ aiobotocore==2.12.2
49
+ aiofiles==22.1.0
50
+ aiohttp-cors==0.7.0
51
+ aiohttp==3.9.1
52
+ aioitertools==0.11.0
53
+ aiorwlock==1.3.0
54
+ aiosignal==1.3.1
55
+ aiosqlite==0.19.0
56
+ albumentations==1.4.0
57
+ alembic==1.13.1
58
+ altair==5.3.0
59
+ annotated-types==0.6.0
60
+ annoy==1.17.3
61
+ anyio==4.2.0
62
+ apache-beam==2.46.0
63
+ aplus==0.11.0
64
+ appdirs==1.4.4
65
+ archspec==0.2.3
66
+ argon2-cffi-bindings==21.2.0
67
+ argon2-cffi==23.1.0
68
+ array-record==0.5.0
69
+ arrow==1.3.0
70
+ arviz==0.17.1
71
+ astroid==3.1.0
72
+ astropy-iers-data==0.2024.4.1.0.33.14
73
+ astropy==6.0.1
74
+ asttokens==2.4.1
75
+ astunparse==1.6.3
76
+ async-lru==2.0.4
77
+ async-timeout==4.0.3
78
+ attrs==23.2.0
79
+ audioread==3.0.1
80
+ autopep8==2.0.4
81
+ backoff==2.2.1
82
+ bayesian-optimization==1.4.3
83
+ beatrix_jupyterlab==2023.128.151533
84
+ beautifulsoup4==4.12.2
85
+ bitsandbytes==0.43.0
86
+ blake3==0.2.1
87
+ bleach==6.1.0
88
+ blessed==1.20.0
89
+ blinker==1.7.0
90
+ blis==0.7.10
91
+ blosc2==2.6.0
92
+ bokeh==3.3.4
93
+ boltons==23.1.1
94
+ boto3==1.26.100
95
+ botocore==1.34.51
96
+ bq_helper==0.4.1
97
+ bqplot==0.12.43
98
+ branca==0.7.1
99
+ brewer2mpl==1.4.1
100
+ brotlipy==0.7.0
101
+ cached-property==1.5.2
102
+ cachetools==4.2.4
103
+ cachetools==5.3.2
104
+ catalogue==2.0.10
105
+ catalyst==22.4
106
+ catboost==1.2.3
107
+ category-encoders==2.6.3
108
+ certifi==2024.2.2
109
+ cesium==0.12.1
110
+ cffi==1.16.0
111
+ charset-normalizer==3.3.2
112
+ chex==0.1.86
113
+ cleverhans==4.0.0
114
+ click-plugins==1.1.1
115
+ click==8.1.7
116
+ cligj==0.7.2
117
+ cloud-tpu-client==0.10
118
+ cloud-tpu-profiler==2.4.0
119
+ cloudpathlib==0.16.0
120
+ cloudpickle==2.2.1
121
+ cloudpickle==3.0.0
122
+ cmdstanpy==1.2.2
123
+ colorama==0.4.6
124
+ colorcet==3.1.0
125
+ colorful==0.5.6
126
+ colorlog==6.8.2
127
+ colorlover==0.3.0
128
+ comm==0.2.1
129
+ conda-libmamba-solver==23.7.0
130
+ conda-package-handling==2.2.0
131
+ conda==23.7.4
132
+ conda_package_streaming==0.9.0
133
+ confection==0.1.4
134
+ contextily==1.6.0
135
+ contourpy==1.2.0
136
+ convertdate==2.4.0
137
+ crcmod==1.7
138
+ cryptography==41.0.7
139
+ cuda-python==12.4.0
140
+ cudf==23.8.0
141
+ cufflinks==0.17.3
142
+ cuml==23.8.0
143
+ cupy==13.0.0
144
+ cycler==0.12.1
145
+ cymem==2.0.8
146
+ cytoolz==0.12.3
147
+ daal4py==2024.2.0
148
+ daal==2024.2.0
149
+ dacite==1.8.1
150
+ dask-cuda==23.8.0
151
+ dask-cudf==23.8.0
152
+ dask-expr==1.0.9
153
+ dask==2024.4.0
154
+ dataclasses-json==0.6.4
155
+ dataproc_jupyter_plugin==0.1.66
156
+ datasets==2.16.0
157
+ datashader==0.16.0
158
+ datatile==1.0.3
159
+ db-dtypes==1.2.0
160
+ deap==1.4.1
161
+ debugpy==1.8.0
162
+ decorator==5.1.1
163
+ deepdiff==6.7.1
164
+ defusedxml==0.7.1
165
+ deprecation==2.1.0
166
+ descartes==1.1.0
167
+ dill==0.3.7
168
+ dipy==1.9.0
169
+ distlib==0.3.8
170
+ distributed==2023.7.1
171
+ distro==1.9.0
172
+ dm-tree==0.1.8
173
+ docker-pycreds==0.4.0
174
+ docker==7.0.0
175
+ docopt==0.6.2
176
+ docstring-parser==0.15
177
+ docstring-to-markdown==0.15
178
+ docutils==0.20.1
179
+ earthengine-api==0.1.395
180
+ easydict==1.13
181
+ easyocr==1.7.1
182
+ ecos==2.0.13
183
+ eli5==0.13.0
184
+ emoji==2.11.0
185
+ en-core-web-lg==3.7.1
186
+ en-core-web-sm==3.7.1
187
+ entrypoints==0.4
188
+ ephem==4.1.5
189
+ esda==2.5.1
190
+ essentia==2.1b6.dev1110
191
+ et-xmlfile==1.1.0
192
+ etils==1.6.0
193
+ exceptiongroup==1.2.0
194
+ executing==2.0.1
195
+ explainable-ai-sdk==1.3.3
196
+ fastai==2.7.14
197
+ fastapi==0.108.0
198
+ fastavro==1.9.3
199
+ fastcore==1.5.29
200
+ fastdownload==0.0.7
201
+ fasteners==0.19
202
+ fastjsonschema==2.19.1
203
+ fastprogress==1.0.3
204
+ fastrlock==0.8.2
205
+ fasttext==0.9.2
206
+ feather-format==0.4.1
207
+ featuretools==1.30.0
208
+ filelock==3.13.1
209
+ fiona==1.9.6
210
+ fitter==1.7.0
211
+ flake8==7.0.0
212
+ flashtext==2.7
213
+ flatbuffers==23.5.26
214
+ flax==0.8.2
215
+ folium==0.16.0
216
+ fonttools==4.47.0
217
+ fonttools==4.50.0
218
+ fqdn==1.5.1
219
+ frozendict==2.4.1
220
+ frozenlist==1.4.1
221
+ fsspec==2023.10.0
222
+ fsspec==2024.3.1
223
+ funcy==2.0
224
+ fury==0.10.0
225
+ future==1.0.0
226
+ fuzzywuzzy==0.18.0
227
+ gast==0.5.4
228
+ gatspy==0.3
229
+ gcsfs==2024.2.0
230
+ gensim==4.3.2
231
+ geographiclib==2.0
232
+ geojson==3.1.0
233
+ geopandas==0.14.3
234
+ geoplot==0.5.1
235
+ geopy==2.4.1
236
+ geoviews==1.11.1
237
+ ggplot==0.11.5
238
+ giddy==2.3.5
239
+ gitdb==4.0.11
240
+ google-ai-generativelanguage==0.4.0
241
+ google-api-core==2.11.1
242
+ google-api-core==2.18.0
243
+ google-api-python-client==2.125.0
244
+ google-apitools==0.5.31
245
+ google-auth-httplib2==0.2.0
246
+ google-auth-oauthlib==1.2.0
247
+ google-auth==2.26.1
248
+ google-cloud-aiplatform==0.6.0a1
249
+ google-cloud-artifact-registry==1.10.0
250
+ google-cloud-automl==1.0.1
251
+ google-cloud-bigquery==2.34.4
252
+ google-cloud-bigtable==1.7.3
253
+ google-cloud-core==2.4.1
254
+ google-cloud-datastore==2.19.0
255
+ google-cloud-dlp==3.14.0
256
+ google-cloud-jupyter-config==0.0.5
257
+ google-cloud-language==2.13.3
258
+ google-cloud-monitoring==2.18.0
259
+ google-cloud-pubsub==2.19.0
260
+ google-cloud-pubsublite==1.9.0
261
+ google-cloud-recommendations-ai==0.7.1
262
+ google-cloud-resource-manager==1.11.0
263
+ google-cloud-spanner==3.40.1
264
+ google-cloud-storage==1.44.0
265
+ google-cloud-translate==3.12.1
266
+ google-cloud-videointelligence==2.13.3
267
+ google-cloud-vision==2.8.0
268
+ google-crc32c==1.5.0
269
+ google-generativeai==0.4.1
270
+ google-pasta==0.2.0
271
+ google-resumable-media==2.7.0
272
+ googleapis-common-protos==1.62.0
273
+ gplearn==0.4.2
274
+ gpustat==1.0.0
275
+ gpxpy==1.6.2
276
+ graphviz==0.20.3
277
+ greenlet==3.0.3
278
+ grpc-google-iam-v1==0.12.7
279
+ grpcio-status==1.48.1
280
+ grpcio-status==1.48.2
281
+ grpcio==1.51.1
282
+ grpcio==1.60.0
283
+ gviz-api==1.10.0
284
+ gym-notices==0.0.8
285
+ gym==0.26.2
286
+ gymnasium==0.29.0
287
+ h11==0.14.0
288
+ h2o==3.46.0.1
289
+ h5netcdf==1.3.0
290
+ h5py==3.10.0
291
+ haversine==2.8.1
292
+ hdfs==2.7.3
293
+ hep-ml==0.7.2
294
+ hijri-converter==2.3.1
295
+ hmmlearn==0.3.2
296
+ holidays==0.24
297
+ holoviews==1.18.3
298
+ hpsklearn==0.1.0
299
+ html5lib==1.1
300
+ htmlmin==0.1.12
301
+ httpcore==1.0.5
302
+ httplib2==0.21.0
303
+ httptools==0.6.1
304
+ httpx==0.27.0
305
+ huggingface-hub==0.22.2
306
+ hunspell==0.5.5
307
+ hydra-slayer==0.5.0
308
+ hyperopt==0.2.7
309
+ hypertools==0.8.0
310
+ idna==3.6
311
+ igraph==0.11.4
312
+ imagecodecs==2024.1.1
313
+ imageio==2.33.1
314
+ imbalanced-learn==0.12.2
315
+ imgaug==0.4.0
316
+ importlib-metadata==6.11.0
317
+ importlib-metadata==7.0.1
318
+ importlib-resources==6.1.1
319
+ inequality==1.0.1
320
+ iniconfig==2.0.0
321
+ ipydatawidgets==4.3.5
322
+ ipykernel==6.28.0
323
+ ipyleaflet==0.18.2
324
+ ipympl==0.7.0
325
+ ipython-genutils==0.2.0
326
+ ipython-genutils==0.2.0
327
+ ipython-sql==0.5.0
328
+ ipython==8.20.0
329
+ ipyvolume==0.6.3
330
+ ipyvue==1.10.2
331
+ ipyvuetify==1.9.3
332
+ ipywebrtc==0.6.0
333
+ ipywidgets==7.7.1
334
+ isoduration==20.11.0
335
+ isort==5.13.2
336
+ isoweek==1.3.3
337
+ itsdangerous==2.1.2
338
+ jaraco.classes==3.3.0
339
+ jax-jumpy==1.0.0
340
+ jax==0.4.23
341
+ jaxlib==0.4.23.dev20240116
342
+ jedi==0.19.1
343
+ jeepney==0.8.0
344
+ jieba==0.42.1
345
+ jmespath==1.0.1
346
+ joblib==1.3.2
347
+ json5==0.9.14
348
+ jsonpatch==1.33
349
+ jsonpointer==2.4
350
+ jsonschema-specifications==2023.12.1
351
+ jsonschema==4.20.0
352
+ jupyter-console==6.6.3
353
+ jupyter-events==0.9.0
354
+ jupyter-http-over-ws==0.0.8
355
+ jupyter-lsp==1.5.1
356
+ jupyter-server-mathjax==0.2.6
357
+ jupyter-ydoc==0.2.5
358
+ jupyter_client==7.4.9
359
+ jupyter_client==8.6.0
360
+ jupyter_core==5.7.1
361
+ jupyter_server==2.13.0
362
+ jupyter_server_fileid==0.9.1
363
+ jupyter_server_proxy==4.1.0
364
+ jupyter_server_terminals==0.5.1
365
+ jupyter_server_ydoc==0.8.0
366
+ jupyterlab-lsp==5.1.0
367
+ jupyterlab-widgets==3.0.9
368
+ jupyterlab==4.1.5
369
+ jupyterlab_git==0.44.0
370
+ jupyterlab_pygments==0.3.0
371
+ jupyterlab_server==2.25.2
372
+ jupytext==1.16.0
373
+ kaggle-environments==1.14.3
374
+ kaggle==1.6.8
375
+ kagglehub==0.2.2
376
+ keras-cv==0.8.2
377
+ keras-nlp==0.8.2
378
+ keras-tuner==1.4.6
379
+ keras==3.1.1
380
+ kernels-mixer==0.0.7
381
+ keyring==24.3.0
382
+ keyrings.google-artifactregistry-auth==1.1.2
383
+ kfp-pipeline-spec==0.2.2
384
+ kfp-server-api==2.0.5
385
+ kfp==2.5.0
386
+ kiwisolver==1.4.5
387
+ kmapper==2.0.1
388
+ kmodes==0.12.2
389
+ korean-lunar-calendar==0.3.1
390
+ kornia==0.7.2
391
+ kornia_rs==0.1.3
392
+ kt-legacy==1.0.5
393
+ kubernetes==26.1.0
394
+ langcodes==3.3.0
395
+ langid==1.1.6
396
+ lazy_loader==0.3
397
+ learntools==0.3.4
398
+ leven==1.0.4
399
+ libclang==16.0.6
400
+ libmambapy==1.5.0
401
+ libpysal==4.9.2
402
+ librosa==0.10.1
403
+ lightgbm==4.2.0
404
+ lightning-utilities==0.11.2
405
+ lime==0.2.0.1
406
+ line-profiler==4.1.2
407
+ linkify-it-py==2.0.3
408
+ llvmlite==0.41.1
409
+ llvmlite==0.42.0
410
+ lml==0.1.0
411
+ locket==1.0.0
412
+ loguru==0.7.2
413
+ lxml==5.2.1
414
+ lz4==4.3.3
415
+ mamba==1.5.0
416
+ mapclassify==2.6.1
417
+ markdown-it-py==3.0.0
418
+ marshmallow==3.21.1
419
+ matplotlib-inline==0.1.6
420
+ matplotlib-venn==0.11.10
421
+ matplotlib==3.7.5
422
+ matplotlib==3.8.3
423
+ mccabe==0.7.0
424
+ mdit-py-plugins==0.4.0
425
+ mdurl==0.1.2
426
+ memory-profiler==0.61.0
427
+ menuinst==2.0.1
428
+ mercantile==1.2.1
429
+ mgwr==2.2.1
430
+ missingno==0.5.2
431
+ mistune==0.8.4
432
+ mizani==0.11.1
433
+ ml-dtypes==0.2.0
434
+ mlcrate==0.2.0
435
+ mlens==0.2.3
436
+ mlxtend==0.23.1
437
+ mne==1.6.1
438
+ mnist==0.2.2
439
+ momepy==0.7.0
440
+ more-itertools==10.2.0
441
+ mpld3==0.5.10
442
+ mpmath==1.3.0
443
+ msgpack==1.0.7
444
+ multidict==6.0.4
445
+ multimethod==1.10
446
+ multipledispatch==1.0.0
447
+ multiprocess==0.70.15
448
+ munkres==1.1.4
449
+ murmurhash==1.0.10
450
+ mypy-extensions==1.0.0
451
+ namex==0.0.7
452
+ nb-conda-kernels==2.3.1
453
+ nb_conda==2.2.1
454
+ nbclassic==1.0.0
455
+ nbclient==0.5.13
456
+ nbconvert==6.4.5
457
+ nbdime==3.2.0
458
+ nbformat==5.9.2
459
+ ndindex==1.8
460
+ nest-asyncio==1.5.8
461
+ networkx==3.2.1
462
+ nibabel==5.2.1
463
+ nilearn==0.10.3
464
+ ninja==1.11.1.1
465
+ nltk==3.2.4
466
+ nose==1.3.7
467
+ notebook==6.5.4
468
+ notebook==6.5.6
469
+ notebook_executor==0.2
470
+ notebook_shim==0.2.3
471
+ numba==0.58.1
472
+ numba==0.59.1
473
+ numexpr==2.10.0
474
+ numpy==1.26.4
475
+ nvidia-ml-py==11.495.46
476
+ nvtx==0.2.10
477
+ oauth2client==4.1.3
478
+ oauthlib==3.2.2
479
+ objsize==0.6.1
480
+ odfpy==1.4.1
481
+ olefile==0.47
482
+ onnx==1.16.0
483
+ opencensus-context==0.1.3
484
+ opencensus==0.11.4
485
+ opencv-contrib-python==4.9.0.80
486
+ opencv-python-headless==4.9.0.80
487
+ opencv-python==4.9.0.80
488
+ openpyxl==3.1.2
489
+ openslide-python==1.3.1
490
+ opentelemetry-api==1.22.0
491
+ opentelemetry-exporter-otlp-proto-common==1.22.0
492
+ opentelemetry-exporter-otlp-proto-grpc==1.22.0
493
+ opentelemetry-exporter-otlp-proto-http==1.22.0
494
+ opentelemetry-exporter-otlp==1.22.0
495
+ opentelemetry-proto==1.22.0
496
+ opentelemetry-sdk==1.22.0
497
+ opentelemetry-semantic-conventions==0.43b0
498
+ opt-einsum==3.3.0
499
+ optax==0.2.2
500
+ optree==0.11.0
501
+ optuna==3.6.1
502
+ orbax-checkpoint==0.5.7
503
+ ordered-set==4.1.0
504
+ orjson==3.9.10
505
+ ortools==9.4.1874
506
+ osmnx==1.9.2
507
+ overrides==7.4.0
508
+ packaging==21.3
509
+ pandas-datareader==0.10.0
510
+ pandas-profiling==3.6.6
511
+ pandas-summary==0.2.0
512
+ pandas==2.1.4
513
+ pandas==2.2.1
514
+ pandasql==0.7.3
515
+ pandocfilters==1.5.0
516
+ panel==1.3.8
517
+ papermill==2.5.0
518
+ param==2.1.0
519
+ parso==0.8.3
520
+ partd==1.4.1
521
+ path.py==12.5.0
522
+ path==16.10.0
523
+ pathos==0.3.2
524
+ pathy==0.10.3
525
+ patsy==0.5.6
526
+ pdf2image==1.17.0
527
+ peft==0.10.0
528
+ pettingzoo==1.24.0
529
+ pexpect==4.8.0
530
+ pexpect==4.9.0
531
+ phik==0.12.4
532
+ pickleshare==0.7.5
533
+ pillow==10.3.0
534
+ pip==23.3.2
535
+ pkgutil_resolve_name==1.3.10
536
+ platformdirs==4.2.0
537
+ plotly-express==0.4.1
538
+ plotly==5.18.0
539
+ plotnine==0.13.4
540
+ pluggy==1.4.0
541
+ pointpats==2.4.0
542
+ polars==0.20.18
543
+ polyglot==16.7.4
544
+ pooch==1.8.1
545
+ pox==0.3.4
546
+ ppca==0.0.4
547
+ ppft==1.7.6.8
548
+ preprocessing==0.1.13
549
+ preshed==3.0.9
550
+ prettytable==3.9.0
551
+ progressbar2==4.4.2
552
+ prometheus-client==0.19.0
553
+ promise==2.3
554
+ prompt-toolkit==3.0.42
555
+ prompt-toolkit==3.0.43
556
+ prophet==1.1.1
557
+ proto-plus==1.23.0
558
+ protobuf==3.20.3
559
+ protobuf==4.21.12
560
+ psutil==5.9.3
561
+ psutil==5.9.7
562
+ ptyprocess==0.7.0
563
+ pudb==2024.1
564
+ pure-eval==0.2.2
565
+ py-cpuinfo==9.0.0
566
+ py-spy==0.3.14
567
+ py4j==0.10.9.7
568
+ pyLDAvis==3.4.1
569
+ pyOpenSSL==23.3.0
570
+ pyaml==23.12.0
571
+ pyarrow-hotfix==0.6
572
+ pyarrow==15.0.2
573
+ pyasn1-modules==0.3.0
574
+ pyasn1==0.5.1
575
+ pybind11==2.12.0
576
+ pyclipper==1.3.0.post5
577
+ pycodestyle==2.11.1
578
+ pycosat==0.6.6
579
+ pycparser==2.21
580
+ pycryptodome==3.20.0
581
+ pyct==0.5.0
582
+ pycuda==2024.1
583
+ pydantic==2.5.3
584
+ pydantic==2.6.4
585
+ pydantic_core==2.14.6
586
+ pydantic_core==2.16.3
587
+ pydegensac==0.1.2
588
+ pydicom==2.4.4
589
+ pydocstyle==6.3.0
590
+ pydot==1.4.2
591
+ pydub==0.25.1
592
+ pyemd==1.0.0
593
+ pyerfa==2.0.1.1
594
+ pyexcel-io==0.6.6
595
+ pyexcel-ods==0.6.0
596
+ pyflakes==3.2.0
597
+ pygltflib==1.16.2
598
+ pykalman==0.9.7
599
+ pylibraft==23.8.0
600
+ pylint==3.1.0
601
+ pymc3==3.11.4
602
+ pymongo==3.13.0
603
+ pynndescent==0.5.12
604
+ pynvml==11.4.1
605
+ pynvrtc==9.2
606
+ pyparsing==3.1.1
607
+ pyparsing==3.1.2
608
+ pypdf==4.1.0
609
+ pyproj==3.6.1
610
+ pysal==24.1
611
+ pyshp==2.3.1
612
+ pytesseract==0.3.10
613
+ pytest==8.1.1
614
+ python-bidi==0.4.2
615
+ python-dateutil==2.9.0.post0
616
+ python-dotenv==1.0.0
617
+ python-json-logger==2.0.7
618
+ python-louvain==0.16
619
+ python-lsp-jsonrpc==1.1.2
620
+ python-lsp-server==1.11.0
621
+ python-slugify==8.0.4
622
+ python-utils==3.8.2
623
+ pythreejs==2.4.2
624
+ pytoolconfig==1.3.1
625
+ pytools==2024.1.1
626
+ pytorch-ignite==0.5.0.post2
627
+ pytorch-lightning==2.2.1
628
+ pytz==2023.3.post1
629
+ pytz==2024.1
630
+ pyu2f==0.1.5
631
+ pyviz_comms==3.0.2
632
+ pyzmq==24.0.1
633
+ pyzmq==25.1.2
634
+ qgrid==1.3.1
635
+ qtconsole==5.5.1
636
+ quantecon==0.7.2
637
+ qudida==0.0.4
638
+ raft-dask==23.8.0
639
+ rasterio==1.3.9
640
+ rasterstats==0.19.0
641
+ ray-cpp==2.9.0
642
+ ray==2.9.0
643
+ referencing==0.32.1
644
+ regex==2023.12.25
645
+ requests-oauthlib==1.3.1
646
+ requests-toolbelt==0.10.1
647
+ requests==2.31.0
648
+ retrying==1.3.3
649
+ retrying==1.3.4
650
+ rfc3339-validator==0.1.4
651
+ rfc3986-validator==0.1.1
652
+ rgf-python==3.12.0
653
+ rich-click==1.7.4
654
+ rich==13.7.0
655
+ rich==13.7.1
656
+ rmm==23.8.0
657
+ rope==1.13.0
658
+ rpds-py==0.16.2
659
+ rsa==4.9
660
+ ruamel-yaml-conda==0.15.100
661
+ ruamel.yaml.clib==0.2.7
662
+ ruamel.yaml==0.17.40
663
+ s2sphere==0.2.5
664
+ s3fs==2024.2.0
665
+ s3transfer==0.6.2
666
+ safetensors==0.4.2
667
+ scattertext==0.1.19
668
+ scikit-image==0.22.0
669
+ scikit-learn-intelex==2024.2.0
670
+ scikit-learn==1.2.2
671
+ scikit-multilearn==0.2.0
672
+ scikit-optimize==0.10.1
673
+ scikit-plot==0.3.7
674
+ scikit-surprise==1.1.3
675
+ scipy==1.11.4
676
+ scipy==1.12.0
677
+ seaborn==0.12.2
678
+ segment_anything==1.0
679
+ segregation==2.5
680
+ semver==3.0.2
681
+ sentencepiece==0.2.0
682
+ sentry-sdk==1.44.1
683
+ setproctitle==1.3.3
684
+ setuptools-git==1.2
685
+ setuptools-scm==8.0.4
686
+ setuptools==69.0.3
687
+ shap==0.44.1
688
+ shapely==2.0.3
689
+ shellingham==1.5.4
690
+ shtab==1.7.1
691
+ simpervisor==1.0.0
692
+ simplejson==3.19.2
693
+ six==1.16.0
694
+ sklearn-pandas==2.2.0
695
+ slicer==0.0.7
696
+ smart-open==6.4.0
697
+ smmap==5.0.1
698
+ sniffio==1.3.0
699
+ snowballstemmer==2.2.0
700
+ snuggs==1.4.7
701
+ sortedcontainers==2.4.0
702
+ soundfile==0.12.1
703
+ soupsieve==2.5
704
+ soxr==0.3.7
705
+ spacy-legacy==3.0.12
706
+ spacy-loggers==1.0.5
707
+ spacy==3.7.2
708
+ spaghetti==1.7.5.post1
709
+ spectral==0.23.1
710
+ spglm==1.1.0
711
+ sphinx-rtd-theme==0.2.4
712
+ spint==1.0.7
713
+ splot==1.1.5.post1
714
+ spopt==0.6.0
715
+ spreg==1.4.2
716
+ spvcm==0.3.0
717
+ sqlparse==0.4.4
718
+ squarify==0.4.3
719
+ srsly==2.4.8
720
+ stable-baselines3==2.1.0
721
+ stack-data==0.6.2
722
+ stack-data==0.6.3
723
+ stanio==0.5.0
724
+ starlette==0.32.0.post1
725
+ statsmodels==0.14.1
726
+ stemming==1.0.1
727
+ stop-words==2018.7.23
728
+ stopit==1.1.2
729
+ stumpy==1.12.0
730
+ sympy==1.12
731
+ tables==3.9.2
732
+ tabulate==0.9.0
733
+ tangled-up-in-unicode==0.2.0
734
+ tbb==2021.12.0
735
+ tblib==3.0.0
736
+ tenacity==8.2.3
737
+ tensorboard-data-server==0.7.2
738
+ tensorboard-plugin-profile==2.15.0
739
+ tensorboard==2.15.1
740
+ tensorboardX==2.6.2.2
741
+ tensorflow-cloud==0.1.16
742
+ tensorflow-datasets==4.9.4
743
+ tensorflow-decision-forests==1.8.1
744
+ tensorflow-estimator==2.15.0
745
+ tensorflow-hub==0.16.1
746
+ tensorflow-io-gcs-filesystem==0.35.0
747
+ tensorflow-io==0.35.0
748
+ tensorflow-metadata==0.14.0
749
+ tensorflow-probability==0.23.0
750
+ tensorflow-serving-api==2.14.1
751
+ tensorflow-text==2.15.0
752
+ tensorflow-transform==0.14.0
753
+ tensorflow==2.15.0
754
+ tensorstore==0.1.56
755
+ termcolor==2.4.0
756
+ terminado==0.18.0
757
+ testpath==0.6.0
758
+ text-unidecode==1.3
759
+ textblob==0.18.0.post0
760
+ texttable==1.7.0
761
+ tf_keras==2.15.1
762
+ tfp-nightly==0.24.0.dev0
763
+ thinc==8.2.2
764
+ threadpoolctl==3.2.0
765
+ tifffile==2023.12.9
766
+ timm==0.9.16
767
+ tinycss2==1.2.1
768
+ tobler==0.11.2
769
+ tokenizers==0.15.2
770
+ toml==0.10.2
771
+ tomli==2.0.1
772
+ tomlkit==0.12.4
773
+ toolz==0.12.1
774
+ torch==2.1.2
775
+ torchaudio==2.1.2
776
+ torchdata==0.7.1
777
+ torchinfo==1.8.0
778
+ torchmetrics==1.3.2
779
+ torchtext==0.16.2
780
+ torchvision==0.16.2
781
+ tornado==6.3.3
782
+ tqdm==4.66.1
783
+ traceml==1.0.8
784
+ traitlets==5.9.0
785
+ traittypes==0.2.1
786
+ transformers==4.39.3
787
+ treelite-runtime==3.2.0
788
+ treelite==3.2.0
789
+ trl==0.8.1
790
+ truststore==0.8.0
791
+ trx-python==0.2.9
792
+ tsfresh==0.20.2
793
+ typeguard==4.1.5
794
+ typer==0.9.0
795
+ typer==0.9.4
796
+ types-python-dateutil==2.8.19.20240106
797
+ typing-inspect==0.9.0
798
+ typing-utils==0.1.0
799
+ typing_extensions==4.9.0
800
+ tyro==0.8.3
801
+ tzdata==2023.4
802
+ uc-micro-py==1.0.3
803
+ ucx-py==0.33.0
804
+ ujson==5.9.0
805
+ umap-learn==0.5.5
806
+ unicodedata2==15.1.0
807
+ update-checker==0.18.0
808
+ uri-template==1.3.0
809
+ uritemplate==3.0.1
810
+ urllib3==1.26.18
811
+ urllib3==2.1.0
812
+ urwid==2.6.10
813
+ urwid_readline==0.14
814
+ uvicorn==0.25.0
815
+ uvloop==0.19.0
816
+ vaex-astro==0.9.3
817
+ vaex-core==4.17.1
818
+ vaex-hdf5==0.14.1
819
+ vaex-jupyter==0.8.2
820
+ vaex-ml==0.18.3
821
+ vaex-server==0.9.0
822
+ vaex-viz==0.5.4
823
+ vaex==4.17.0
824
+ vec_noise==1.1.4
825
+ vecstack==0.4.0
826
+ virtualenv==20.21.0
827
+ visions==0.7.5
828
+ vowpalwabbit==9.9.0
829
+ vtk==9.3.0
830
+ wandb==0.16.5
831
+ wasabi==1.1.2
832
+ watchfiles==0.21.0
833
+ wavio==0.0.8
834
+ wcwidth==0.2.13
835
+ weasel==0.3.4
836
+ webcolors==1.13
837
+ webencodings==0.5.1
838
+ websocket-client==1.7.0
839
+ websockets==12.0
840
+ wfdb==4.1.2
841
+ whatthepatch==1.0.5
842
+ wheel==0.42.0
843
+ widgetsnbextension==3.6.6
844
+ witwidget==1.8.1
845
+ woodwork==0.29.0
846
+ wordcloud==1.9.3
847
+ wordsegment==1.3.1
848
+ wrapt==1.14.1
849
+ xarray-einstats==0.7.0
850
+ xarray==2024.3.0
851
+ xgboost==2.0.3
852
+ xvfbwrapper==0.2.9
853
+ xxhash==3.4.1
854
+ xyzservices==2023.10.1
855
+ y-py==0.6.2
856
+ yapf==0.40.2
857
+ yarl==1.9.3
858
+ yarl==1.9.4
859
+ ydata-profiling==4.6.4
860
+ yellowbrick==1.5
861
+ ypy-websocket==0.8.4
862
+ zict==3.0.0
863
+ zipp==3.17.0
864
+ zstandard==0.22.0
wandb/run-20240410_005137-yg20qnb4/files/wandb-metadata.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.15.133+-x86_64-with-glibc2.31",
3
+ "python": "3.10.13",
4
+ "heartbeatAt": "2024-04-10T00:51:38.343172",
5
+ "startedAt": "2024-04-10T00:51:37.649166",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [],
9
+ "state": "running",
10
+ "program": "kaggle.ipynb",
11
+ "codePathLocal": null,
12
+ "root": "/kaggle/working",
13
+ "host": "d91c9dc8354a",
14
+ "username": "root",
15
+ "executable": "/opt/conda/bin/python3.10",
16
+ "cpu_count": 2,
17
+ "cpu_count_logical": 4,
18
+ "cpu_freq": {
19
+ "current": 2000.156,
20
+ "min": 0.0,
21
+ "max": 0.0
22
+ },
23
+ "cpu_freq_per_core": [
24
+ {
25
+ "current": 2000.156,
26
+ "min": 0.0,
27
+ "max": 0.0
28
+ },
29
+ {
30
+ "current": 2000.156,
31
+ "min": 0.0,
32
+ "max": 0.0
33
+ },
34
+ {
35
+ "current": 2000.156,
36
+ "min": 0.0,
37
+ "max": 0.0
38
+ },
39
+ {
40
+ "current": 2000.156,
41
+ "min": 0.0,
42
+ "max": 0.0
43
+ }
44
+ ],
45
+ "disk": {
46
+ "/": {
47
+ "total": 8062.387607574463,
48
+ "used": 5569.163803100586
49
+ }
50
+ },
51
+ "gpu": "Tesla T4",
52
+ "gpu_count": 2,
53
+ "gpu_devices": [
54
+ {
55
+ "name": "Tesla T4",
56
+ "memory_total": 16106127360
57
+ },
58
+ {
59
+ "name": "Tesla T4",
60
+ "memory_total": 16106127360
61
+ }
62
+ ],
63
+ "memory": {
64
+ "total": 31.357559204101562
65
+ }
66
+ }
wandb/run-20240410_005137-yg20qnb4/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/loss": 2.7614, "train/grad_norm": 0.0, "train/learning_rate": 1.1666666666666666e-06, "train/epoch": 0.0, "train/global_step": 100, "_timestamp": 1712710567.6870174, "_runtime": 270.0323574542999, "_step": 0, "_wandb": {"runtime": 331}}
wandb/run-20240410_005137-yg20qnb4/logs/debug-internal.log ADDED
@@ -0,0 +1,353 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-10 00:51:37,655 INFO StreamThr :295 [internal.py:wandb_internal():86] W&B internal server running at pid: 295, started at: 2024-04-10 00:51:37.654780
2
+ 2024-04-10 00:51:37,657 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-10 00:51:38,045 INFO WriterThread:295 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/run-yg20qnb4.wandb
4
+ 2024-04-10 00:51:38,045 DEBUG SenderThread:295 [sender.py:send():379] send: header
5
+ 2024-04-10 00:51:38,048 DEBUG SenderThread:295 [sender.py:send():379] send: run
6
+ 2024-04-10 00:51:38,244 INFO SenderThread:295 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files
7
+ 2024-04-10 00:51:38,244 INFO SenderThread:295 [sender.py:_start_run_threads():1124] run started: yg20qnb4 with start time 1712710297.65466
8
+ 2024-04-10 00:51:38,252 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-10 00:51:38,252 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-10 00:51:38,316 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-10 00:51:38,328 DEBUG HandlerThread:295 [system_info.py:__init__():26] System info init
12
+ 2024-04-10 00:51:38,328 DEBUG HandlerThread:295 [system_info.py:__init__():41] System info init done
13
+ 2024-04-10 00:51:38,328 INFO HandlerThread:295 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-10 00:51:38,328 INFO SystemMonitor:295 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-10 00:51:38,328 INFO HandlerThread:295 [system_monitor.py:probe():214] Collecting system info
16
+ 2024-04-10 00:51:38,329 INFO SystemMonitor:295 [interfaces.py:start():190] Started cpu monitoring
17
+ 2024-04-10 00:51:38,329 INFO SystemMonitor:295 [interfaces.py:start():190] Started disk monitoring
18
+ 2024-04-10 00:51:38,330 INFO SystemMonitor:295 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-10 00:51:38,331 INFO SystemMonitor:295 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-10 00:51:38,332 INFO SystemMonitor:295 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-10 00:51:38,343 DEBUG HandlerThread:295 [system_info.py:probe():150] Probing system
22
+ 2024-04-10 00:51:38,345 DEBUG HandlerThread:295 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-10 00:51:38,345 DEBUG HandlerThread:295 [system_info.py:probe():198] Probing system done
24
+ 2024-04-10 00:51:38,345 DEBUG HandlerThread:295 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-10T00:51:38.343172', 'startedAt': '2024-04-10T00:51:37.649166', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': 'd91c9dc8354a', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.156, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.156, 'min': 0.0, 'max': 0.0}, {'current': 2000.156, 'min': 0.0, 'max': 0.0}, {'current': 2000.156, 'min': 0.0, 'max': 0.0}, {'current': 2000.156, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5569.163803100586}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-10 00:51:38,345 INFO HandlerThread:295 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-10 00:51:38,345 INFO HandlerThread:295 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-10 00:51:38,345 DEBUG HandlerThread:295 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-10 00:51:39,246 INFO Thread-12 :295 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/conda-environment.yaml
29
+ 2024-04-10 00:51:53,359 ERROR HandlerThread:295 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
+ Traceback (most recent call last):
31
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
+ subprocess.call(
33
+ File "/opt/conda/lib/python3.10/subprocess.py", line 347, in call
34
+ return p.wait(timeout=timeout)
35
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1209, in wait
36
+ return self._wait(timeout=timeout)
37
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
+ raise TimeoutExpired(self.args, timeout)
39
+ subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-10 00:51:53,359 DEBUG HandlerThread:295 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-10 00:51:53,360 INFO HandlerThread:295 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-10 00:51:53,365 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-10 00:51:53,365 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-10 00:51:53,365 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-10 00:51:53,366 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-10 00:51:53,366 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-10 00:51:53,366 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: keepalive
48
+ 2024-04-10 00:51:53,366 DEBUG SenderThread:295 [sender.py:send():379] send: files
49
+ 2024-04-10 00:51:53,367 INFO SenderThread:295 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
+ 2024-04-10 00:51:53,725 INFO wandb-upload_0:295 [upload_job.py:push():131] Uploaded file /tmp/tmpvu1p8ejawandb/9o6rhbsf-wandb-metadata.json
51
+ 2024-04-10 00:51:54,249 INFO Thread-12 :295 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/wandb-metadata.json
52
+ 2024-04-10 00:51:54,295 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: python_packages
53
+ 2024-04-10 00:51:54,295 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: python_packages
54
+ 2024-04-10 00:51:54,300 DEBUG SenderThread:295 [sender.py:send():379] send: telemetry
55
+ 2024-04-10 00:51:54,309 DEBUG SenderThread:295 [sender.py:send():379] send: config
56
+ 2024-04-10 00:51:54,311 DEBUG SenderThread:295 [sender.py:send():379] send: metric
57
+ 2024-04-10 00:51:54,312 DEBUG SenderThread:295 [sender.py:send():379] send: telemetry
58
+ 2024-04-10 00:51:54,312 DEBUG SenderThread:295 [sender.py:send():379] send: metric
59
+ 2024-04-10 00:51:54,313 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
60
+ 2024-04-10 00:51:54,314 WARNING SenderThread:295 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
61
+ 2024-04-10 00:51:54,314 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
62
+ 2024-04-10 00:51:54,314 DEBUG SenderThread:295 [sender.py:send():379] send: telemetry
63
+ 2024-04-10 00:51:54,316 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
64
+ 2024-04-10 00:51:55,249 INFO Thread-12 :295 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/requirements.txt
65
+ 2024-04-10 00:51:55,250 INFO Thread-12 :295 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/output.log
66
+ 2024-04-10 00:51:57,250 INFO Thread-12 :295 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/output.log
67
+ 2024-04-10 00:51:58,461 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
68
+ 2024-04-10 00:52:03,461 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
69
+ 2024-04-10 00:52:08,467 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
70
+ 2024-04-10 00:52:09,254 INFO Thread-12 :295 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/config.yaml
71
+ 2024-04-10 00:52:09,297 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
72
+ 2024-04-10 00:52:09,298 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
73
+ 2024-04-10 00:52:09,298 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
74
+ 2024-04-10 00:52:14,444 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
75
+ 2024-04-10 00:52:19,444 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
76
+ 2024-04-10 00:52:24,296 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
77
+ 2024-04-10 00:52:24,297 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
78
+ 2024-04-10 00:52:24,337 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
79
+ 2024-04-10 00:52:25,427 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
80
+ 2024-04-10 00:52:30,428 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
81
+ 2024-04-10 00:52:35,429 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
82
+ 2024-04-10 00:52:38,332 DEBUG SystemMonitor:295 [system_monitor.py:_start():172] Starting system metrics aggregation loop
83
+ 2024-04-10 00:52:38,333 DEBUG SenderThread:295 [sender.py:send():379] send: stats
84
+ 2024-04-10 00:52:39,296 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
85
+ 2024-04-10 00:52:39,296 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
86
+ 2024-04-10 00:52:39,337 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
87
+ 2024-04-10 00:52:41,396 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
88
+ 2024-04-10 00:52:46,397 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
89
+ 2024-04-10 00:52:51,398 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
90
+ 2024-04-10 00:52:54,296 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
91
+ 2024-04-10 00:52:54,297 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
92
+ 2024-04-10 00:52:54,337 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
93
+ 2024-04-10 00:52:56,461 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
94
+ 2024-04-10 00:53:01,462 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
95
+ 2024-04-10 00:53:06,463 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
96
+ 2024-04-10 00:53:08,334 DEBUG SenderThread:295 [sender.py:send():379] send: stats
97
+ 2024-04-10 00:53:09,296 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
98
+ 2024-04-10 00:53:09,297 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
99
+ 2024-04-10 00:53:09,337 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
100
+ 2024-04-10 00:53:12,403 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
101
+ 2024-04-10 00:53:17,404 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
102
+ 2024-04-10 00:53:22,405 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
103
+ 2024-04-10 00:53:24,296 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
104
+ 2024-04-10 00:53:24,297 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
105
+ 2024-04-10 00:53:24,337 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
106
+ 2024-04-10 00:53:28,394 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
107
+ 2024-04-10 00:53:33,394 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
108
+ 2024-04-10 00:53:38,335 DEBUG SenderThread:295 [sender.py:send():379] send: stats
109
+ 2024-04-10 00:53:39,297 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
110
+ 2024-04-10 00:53:39,298 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
111
+ 2024-04-10 00:53:39,338 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
112
+ 2024-04-10 00:53:39,421 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
113
+ 2024-04-10 00:53:44,422 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
114
+ 2024-04-10 00:53:49,423 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
115
+ 2024-04-10 00:53:54,297 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
116
+ 2024-04-10 00:53:54,297 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
117
+ 2024-04-10 00:53:54,338 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
118
+ 2024-04-10 00:53:54,465 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
119
+ 2024-04-10 00:53:59,465 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
120
+ 2024-04-10 00:54:04,466 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
121
+ 2024-04-10 00:54:08,336 DEBUG SenderThread:295 [sender.py:send():379] send: stats
122
+ 2024-04-10 00:54:09,299 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
123
+ 2024-04-10 00:54:09,299 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
124
+ 2024-04-10 00:54:09,340 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
125
+ 2024-04-10 00:54:09,483 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
126
+ 2024-04-10 00:54:14,484 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
127
+ 2024-04-10 00:54:19,485 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
128
+ 2024-04-10 00:54:24,298 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
129
+ 2024-04-10 00:54:24,299 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
130
+ 2024-04-10 00:54:24,339 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
131
+ 2024-04-10 00:54:25,392 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
132
+ 2024-04-10 00:54:30,392 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
133
+ 2024-04-10 00:54:35,393 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
134
+ 2024-04-10 00:54:38,337 DEBUG SenderThread:295 [sender.py:send():379] send: stats
135
+ 2024-04-10 00:54:39,299 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
136
+ 2024-04-10 00:54:39,299 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
137
+ 2024-04-10 00:54:39,340 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
138
+ 2024-04-10 00:54:40,426 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
139
+ 2024-04-10 00:54:45,427 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
140
+ 2024-04-10 00:54:50,428 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
141
+ 2024-04-10 00:54:54,298 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
142
+ 2024-04-10 00:54:54,299 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
143
+ 2024-04-10 00:54:54,339 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
144
+ 2024-04-10 00:54:55,464 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
145
+ 2024-04-10 00:55:00,465 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
146
+ 2024-04-10 00:55:05,466 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
147
+ 2024-04-10 00:55:08,338 DEBUG SenderThread:295 [sender.py:send():379] send: stats
148
+ 2024-04-10 00:55:09,299 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
149
+ 2024-04-10 00:55:09,300 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
150
+ 2024-04-10 00:55:09,341 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
151
+ 2024-04-10 00:55:11,386 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
152
+ 2024-04-10 00:55:16,387 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
153
+ 2024-04-10 00:55:21,388 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
154
+ 2024-04-10 00:55:24,298 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
155
+ 2024-04-10 00:55:24,298 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
156
+ 2024-04-10 00:55:24,339 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
157
+ 2024-04-10 00:55:26,413 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
158
+ 2024-04-10 00:55:31,414 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
159
+ 2024-04-10 00:55:36,414 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
160
+ 2024-04-10 00:55:38,339 DEBUG SenderThread:295 [sender.py:send():379] send: stats
161
+ 2024-04-10 00:55:39,299 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
162
+ 2024-04-10 00:55:39,299 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
163
+ 2024-04-10 00:55:39,340 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
164
+ 2024-04-10 00:55:42,402 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
165
+ 2024-04-10 00:55:47,403 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
166
+ 2024-04-10 00:55:52,404 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
167
+ 2024-04-10 00:55:54,298 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
168
+ 2024-04-10 00:55:54,298 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
169
+ 2024-04-10 00:55:54,339 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
170
+ 2024-04-10 00:55:58,378 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
171
+ 2024-04-10 00:56:03,379 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
172
+ 2024-04-10 00:56:07,687 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: partial_history
173
+ 2024-04-10 00:56:07,689 DEBUG SenderThread:295 [sender.py:send():379] send: metric
174
+ 2024-04-10 00:56:07,689 DEBUG SenderThread:295 [sender.py:send():379] send: metric
175
+ 2024-04-10 00:56:07,689 DEBUG SenderThread:295 [sender.py:send():379] send: metric
176
+ 2024-04-10 00:56:07,689 DEBUG SenderThread:295 [sender.py:send():379] send: metric
177
+ 2024-04-10 00:56:07,689 DEBUG SenderThread:295 [sender.py:send():379] send: history
178
+ 2024-04-10 00:56:07,689 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: summary_record
179
+ 2024-04-10 00:56:07,691 INFO SenderThread:295 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
180
+ 2024-04-10 00:56:08,340 DEBUG SenderThread:295 [sender.py:send():379] send: stats
181
+ 2024-04-10 00:56:08,344 INFO Thread-12 :295 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/wandb-summary.json
182
+ 2024-04-10 00:56:08,421 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
183
+ 2024-04-10 00:56:09,298 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
184
+ 2024-04-10 00:56:09,299 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
185
+ 2024-04-10 00:56:09,301 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
186
+ 2024-04-10 00:56:11,345 INFO Thread-12 :295 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/output.log
187
+ 2024-04-10 00:56:13,432 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
188
+ 2024-04-10 00:56:18,438 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
189
+ 2024-04-10 00:56:19,348 INFO Thread-12 :295 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/config.yaml
190
+ 2024-04-10 00:56:23,577 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
191
+ 2024-04-10 00:56:24,298 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
192
+ 2024-04-10 00:56:24,299 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
193
+ 2024-04-10 00:56:24,339 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
194
+ 2024-04-10 00:56:29,379 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
195
+ 2024-04-10 00:56:34,380 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
196
+ 2024-04-10 00:56:38,341 DEBUG SenderThread:295 [sender.py:send():379] send: stats
197
+ 2024-04-10 00:56:39,298 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
198
+ 2024-04-10 00:56:39,299 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
199
+ 2024-04-10 00:56:39,339 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
200
+ 2024-04-10 00:56:39,392 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
201
+ 2024-04-10 00:56:44,393 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
202
+ 2024-04-10 00:56:49,394 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
203
+ 2024-04-10 00:56:54,298 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
204
+ 2024-04-10 00:56:54,299 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
205
+ 2024-04-10 00:56:54,339 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
206
+ 2024-04-10 00:56:54,458 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
207
+ 2024-04-10 00:56:59,459 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
208
+ 2024-04-10 00:57:04,460 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
209
+ 2024-04-10 00:57:08,343 DEBUG SenderThread:295 [sender.py:send():379] send: stats
210
+ 2024-04-10 00:57:09,510 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: stop_status
211
+ 2024-04-10 00:57:09,511 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: stop_status
212
+ 2024-04-10 00:57:09,551 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
213
+ 2024-04-10 00:57:09,623 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
214
+ 2024-04-10 00:57:09,988 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: pause
215
+ 2024-04-10 00:57:09,988 INFO HandlerThread:295 [handler.py:handle_request_pause():708] stopping system metrics thread
216
+ 2024-04-10 00:57:09,988 INFO HandlerThread:295 [system_monitor.py:finish():203] Stopping system monitor
217
+ 2024-04-10 00:57:09,989 DEBUG SystemMonitor:295 [system_monitor.py:_start():179] Finished system metrics aggregation loop
218
+ 2024-04-10 00:57:09,989 INFO HandlerThread:295 [interfaces.py:finish():202] Joined cpu monitor
219
+ 2024-04-10 00:57:09,989 DEBUG SystemMonitor:295 [system_monitor.py:_start():183] Publishing last batch of metrics
220
+ 2024-04-10 00:57:09,989 INFO HandlerThread:295 [interfaces.py:finish():202] Joined disk monitor
221
+ 2024-04-10 00:57:09,999 INFO HandlerThread:295 [interfaces.py:finish():202] Joined gpu monitor
222
+ 2024-04-10 00:57:10,000 INFO HandlerThread:295 [interfaces.py:finish():202] Joined memory monitor
223
+ 2024-04-10 00:57:10,000 INFO HandlerThread:295 [interfaces.py:finish():202] Joined network monitor
224
+ 2024-04-10 00:57:10,000 DEBUG SenderThread:295 [sender.py:send():379] send: stats
225
+ 2024-04-10 00:57:14,966 DEBUG SenderThread:295 [sender.py:send():379] send: exit
226
+ 2024-04-10 00:57:14,966 INFO SenderThread:295 [sender.py:send_exit():586] handling exit code: 0
227
+ 2024-04-10 00:57:14,967 INFO SenderThread:295 [sender.py:send_exit():588] handling runtime: 331
228
+ 2024-04-10 00:57:14,967 INFO SenderThread:295 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
229
+ 2024-04-10 00:57:14,967 INFO SenderThread:295 [sender.py:send_exit():594] send defer
230
+ 2024-04-10 00:57:14,968 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
231
+ 2024-04-10 00:57:14,968 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 0
232
+ 2024-04-10 00:57:14,968 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
233
+ 2024-04-10 00:57:14,968 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
234
+ 2024-04-10 00:57:14,968 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 0
235
+ 2024-04-10 00:57:14,968 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 1
236
+ 2024-04-10 00:57:14,968 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
237
+ 2024-04-10 00:57:14,968 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 1
238
+ 2024-04-10 00:57:14,969 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
239
+ 2024-04-10 00:57:14,969 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 1
240
+ 2024-04-10 00:57:14,969 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 2
241
+ 2024-04-10 00:57:14,969 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
242
+ 2024-04-10 00:57:14,969 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 2
243
+ 2024-04-10 00:57:14,969 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
244
+ 2024-04-10 00:57:14,969 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 2
245
+ 2024-04-10 00:57:14,969 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 3
246
+ 2024-04-10 00:57:14,969 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
247
+ 2024-04-10 00:57:14,969 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 3
248
+ 2024-04-10 00:57:14,970 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
249
+ 2024-04-10 00:57:14,970 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 3
250
+ 2024-04-10 00:57:14,970 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 4
251
+ 2024-04-10 00:57:14,970 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
252
+ 2024-04-10 00:57:14,970 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 4
253
+ 2024-04-10 00:57:14,970 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
254
+ 2024-04-10 00:57:14,970 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 4
255
+ 2024-04-10 00:57:14,970 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 5
256
+ 2024-04-10 00:57:14,970 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
257
+ 2024-04-10 00:57:14,971 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 5
258
+ 2024-04-10 00:57:14,971 DEBUG SenderThread:295 [sender.py:send():379] send: summary
259
+ 2024-04-10 00:57:14,971 INFO SenderThread:295 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
260
+ 2024-04-10 00:57:14,972 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
261
+ 2024-04-10 00:57:14,972 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 5
262
+ 2024-04-10 00:57:14,972 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 6
263
+ 2024-04-10 00:57:14,972 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
264
+ 2024-04-10 00:57:14,972 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 6
265
+ 2024-04-10 00:57:14,972 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
266
+ 2024-04-10 00:57:14,972 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 6
267
+ 2024-04-10 00:57:14,972 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 7
268
+ 2024-04-10 00:57:14,972 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: status_report
269
+ 2024-04-10 00:57:14,973 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
270
+ 2024-04-10 00:57:14,973 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 7
271
+ 2024-04-10 00:57:14,973 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
272
+ 2024-04-10 00:57:14,973 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 7
273
+ 2024-04-10 00:57:15,368 INFO Thread-12 :295 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/wandb-summary.json
274
+ 2024-04-10 00:57:15,966 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: poll_exit
275
+ 2024-04-10 00:57:16,656 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 8
276
+ 2024-04-10 00:57:16,656 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: poll_exit
277
+ 2024-04-10 00:57:16,657 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
278
+ 2024-04-10 00:57:16,657 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 8
279
+ 2024-04-10 00:57:16,657 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
280
+ 2024-04-10 00:57:16,657 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 8
281
+ 2024-04-10 00:57:16,657 INFO SenderThread:295 [job_builder.py:build():318] Attempting to build job artifact
282
+ 2024-04-10 00:57:16,659 INFO SenderThread:295 [job_builder.py:_get_source_type():466] no source found
283
+ 2024-04-10 00:57:16,659 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 9
284
+ 2024-04-10 00:57:16,659 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
285
+ 2024-04-10 00:57:16,659 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 9
286
+ 2024-04-10 00:57:16,659 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
287
+ 2024-04-10 00:57:16,660 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 9
288
+ 2024-04-10 00:57:16,660 INFO SenderThread:295 [dir_watcher.py:finish():358] shutting down directory watcher
289
+ 2024-04-10 00:57:16,967 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: poll_exit
290
+ 2024-04-10 00:57:17,370 INFO SenderThread:295 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/output.log
291
+ 2024-04-10 00:57:17,370 INFO SenderThread:295 [dir_watcher.py:finish():388] scan: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files
292
+ 2024-04-10 00:57:17,370 INFO SenderThread:295 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/wandb-summary.json wandb-summary.json
293
+ 2024-04-10 00:57:17,371 INFO SenderThread:295 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/conda-environment.yaml conda-environment.yaml
294
+ 2024-04-10 00:57:17,375 INFO SenderThread:295 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/wandb-metadata.json wandb-metadata.json
295
+ 2024-04-10 00:57:17,375 INFO SenderThread:295 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/requirements.txt requirements.txt
296
+ 2024-04-10 00:57:17,376 INFO SenderThread:295 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/output.log output.log
297
+ 2024-04-10 00:57:17,379 INFO SenderThread:295 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/config.yaml config.yaml
298
+ 2024-04-10 00:57:17,380 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 10
299
+ 2024-04-10 00:57:17,384 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: poll_exit
300
+ 2024-04-10 00:57:17,385 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
301
+ 2024-04-10 00:57:17,385 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 10
302
+ 2024-04-10 00:57:17,388 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
303
+ 2024-04-10 00:57:17,389 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 10
304
+ 2024-04-10 00:57:17,390 INFO SenderThread:295 [file_pusher.py:finish():172] shutting down file pusher
305
+ 2024-04-10 00:57:17,621 INFO wandb-upload_0:295 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/wandb-summary.json
306
+ 2024-04-10 00:57:17,641 INFO wandb-upload_2:295 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/output.log
307
+ 2024-04-10 00:57:17,657 INFO wandb-upload_1:295 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/requirements.txt
308
+ 2024-04-10 00:57:17,666 INFO wandb-upload_3:295 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_005137-yg20qnb4/files/config.yaml
309
+ 2024-04-10 00:57:17,867 INFO Thread-11 (_thread_body):295 [sender.py:transition_state():614] send defer: 11
310
+ 2024-04-10 00:57:17,867 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
311
+ 2024-04-10 00:57:17,867 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 11
312
+ 2024-04-10 00:57:17,867 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
313
+ 2024-04-10 00:57:17,867 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 11
314
+ 2024-04-10 00:57:17,868 INFO SenderThread:295 [file_pusher.py:join():178] waiting for file pusher
315
+ 2024-04-10 00:57:17,868 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 12
316
+ 2024-04-10 00:57:17,868 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
317
+ 2024-04-10 00:57:17,868 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 12
318
+ 2024-04-10 00:57:17,868 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
319
+ 2024-04-10 00:57:17,869 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 12
320
+ 2024-04-10 00:57:17,869 INFO SenderThread:295 [file_stream.py:finish():614] file stream finish called
321
+ 2024-04-10 00:57:17,969 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: poll_exit
322
+ 2024-04-10 00:57:18,068 INFO SenderThread:295 [file_stream.py:finish():618] file stream finish is done
323
+ 2024-04-10 00:57:18,068 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 13
324
+ 2024-04-10 00:57:18,069 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: poll_exit
325
+ 2024-04-10 00:57:18,069 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
326
+ 2024-04-10 00:57:18,069 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 13
327
+ 2024-04-10 00:57:18,069 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
328
+ 2024-04-10 00:57:18,069 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 13
329
+ 2024-04-10 00:57:18,070 INFO SenderThread:295 [sender.py:transition_state():614] send defer: 14
330
+ 2024-04-10 00:57:18,070 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: defer
331
+ 2024-04-10 00:57:18,070 INFO HandlerThread:295 [handler.py:handle_request_defer():172] handle defer: 14
332
+ 2024-04-10 00:57:18,070 DEBUG SenderThread:295 [sender.py:send():379] send: final
333
+ 2024-04-10 00:57:18,071 DEBUG SenderThread:295 [sender.py:send():379] send: footer
334
+ 2024-04-10 00:57:18,071 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: defer
335
+ 2024-04-10 00:57:18,071 INFO SenderThread:295 [sender.py:send_request_defer():610] handle sender defer: 14
336
+ 2024-04-10 00:57:18,071 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: poll_exit
337
+ 2024-04-10 00:57:18,072 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: poll_exit
338
+ 2024-04-10 00:57:18,072 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: poll_exit
339
+ 2024-04-10 00:57:18,073 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: poll_exit
340
+ 2024-04-10 00:57:18,073 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: server_info
341
+ 2024-04-10 00:57:18,073 DEBUG SenderThread:295 [sender.py:send_request():406] send_request: server_info
342
+ 2024-04-10 00:57:18,076 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: get_summary
343
+ 2024-04-10 00:57:18,077 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: sampled_history
344
+ 2024-04-10 00:57:18,078 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: internal_messages
345
+ 2024-04-10 00:57:18,141 INFO MainThread:295 [wandb_run.py:_footer_history_summary_info():3920] rendering history
346
+ 2024-04-10 00:57:18,141 INFO MainThread:295 [wandb_run.py:_footer_history_summary_info():3952] rendering summary
347
+ 2024-04-10 00:57:18,141 INFO MainThread:295 [wandb_run.py:_footer_sync_info():3879] logging synced files
348
+ 2024-04-10 00:57:18,142 DEBUG HandlerThread:295 [handler.py:handle_request():146] handle_request: shutdown
349
+ 2024-04-10 00:57:18,142 INFO HandlerThread:295 [handler.py:finish():866] shutting down handler
350
+ 2024-04-10 00:57:19,074 INFO WriterThread:295 [datastore.py:close():296] close: /kaggle/working/wandb/run-20240410_005137-yg20qnb4/run-yg20qnb4.wandb
351
+ 2024-04-10 00:57:19,141 INFO SenderThread:295 [sender.py:finish():1546] shutting down sender
352
+ 2024-04-10 00:57:19,141 INFO SenderThread:295 [file_pusher.py:finish():172] shutting down file pusher
353
+ 2024-04-10 00:57:19,141 INFO SenderThread:295 [file_pusher.py:join():178] waiting for file pusher
wandb/run-20240410_005137-yg20qnb4/logs/debug.log ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-10 00:51:37,650 INFO MainThread:248 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-10 00:51:37,650 INFO MainThread:248 [wandb_setup.py:_flush():76] Configure stats pid to 248
3
+ 2024-04-10 00:51:37,650 INFO MainThread:248 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240410_005137-yg20qnb4/logs/debug.log
10
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240410_005137-yg20qnb4/logs/debug-internal.log
11
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7a7f6437ded0>
12
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_init.py:init():567] calling init triggers
13
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
+ config: {}
15
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_init.py:init():617] starting backend
16
+ 2024-04-10 00:51:37,651 INFO MainThread:248 [wandb_init.py:init():621] setting up manager
17
+ 2024-04-10 00:51:37,653 INFO MainThread:248 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
+ 2024-04-10 00:51:37,654 INFO MainThread:248 [wandb_init.py:init():629] backend started and connected
19
+ 2024-04-10 00:51:37,666 INFO MainThread:248 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
+ 2024-04-10 00:51:38,044 INFO MainThread:248 [wandb_init.py:init():721] updated telemetry
21
+ 2024-04-10 00:51:38,047 INFO MainThread:248 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
+ 2024-04-10 00:51:38,252 INFO MainThread:248 [wandb_run.py:_on_init():2344] communicating current version
23
+ 2024-04-10 00:51:38,311 INFO MainThread:248 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
+
25
+ 2024-04-10 00:51:38,311 INFO MainThread:248 [wandb_init.py:init():805] starting run threads in backend
26
+ 2024-04-10 00:51:54,297 INFO MainThread:248 [wandb_run.py:_console_start():2323] atexit reg
27
+ 2024-04-10 00:51:54,297 INFO MainThread:248 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
+ 2024-04-10 00:51:54,299 INFO MainThread:248 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
+ 2024-04-10 00:51:54,299 INFO MainThread:248 [wandb_run.py:_redirect():2268] Redirects installed.
30
+ 2024-04-10 00:51:54,300 INFO MainThread:248 [wandb_init.py:init():848] run started, returning control to user process
31
+ 2024-04-10 00:51:54,305 INFO MainThread:248 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.39.3', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 7e-06, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.07, 'num_train_epochs': 5, 'max_steps': 20000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr10_00-49-34_d91c9dc8354a', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 100, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
32
+ 2024-04-10 00:57:09,987 INFO MainThread:248 [jupyter.py:save_ipynb():373] not saving jupyter notebook
33
+ 2024-04-10 00:57:09,988 INFO MainThread:248 [wandb_init.py:_pause_backend():438] pausing backend
wandb/run-20240410_005137-yg20qnb4/run-yg20qnb4.wandb ADDED
Binary file (21.6 kB). View file
 
wandb/run-20240410_005959-52om3vq0/files/conda-environment.yaml ADDED
File without changes
wandb/run-20240410_005959-52om3vq0/files/config.yaml ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.10.13
7
+ cli_version: 0.16.5
8
+ framework: huggingface
9
+ huggingface_version: 4.39.3
10
+ is_jupyter_run: true
11
+ is_kaggle_kernel: true
12
+ start_time: 1712710799.0
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 84
27
+ - 98
28
+ - 105
29
+ 3:
30
+ - 23
31
+ 4: 3.10.13
32
+ 5: 0.16.5
33
+ 6: 4.39.3
34
+ 8:
35
+ - 1
36
+ - 2
37
+ - 5
38
+ 13: linux-x86_64
wandb/run-20240410_005959-52om3vq0/files/wandb-metadata.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.15.133+-x86_64-with-glibc2.31",
3
+ "python": "3.10.13",
4
+ "heartbeatAt": "2024-04-10T01:00:00.075285",
5
+ "startedAt": "2024-04-10T00:59:59.340959",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [],
9
+ "state": "running",
10
+ "program": "kaggle.ipynb",
11
+ "codePathLocal": null,
12
+ "root": "/kaggle/working",
13
+ "host": "d91c9dc8354a",
14
+ "username": "root",
15
+ "executable": "/opt/conda/bin/python3.10",
16
+ "cpu_count": 2,
17
+ "cpu_count_logical": 4,
18
+ "cpu_freq": {
19
+ "current": 2000.156,
20
+ "min": 0.0,
21
+ "max": 0.0
22
+ },
23
+ "cpu_freq_per_core": [
24
+ {
25
+ "current": 2000.156,
26
+ "min": 0.0,
27
+ "max": 0.0
28
+ },
29
+ {
30
+ "current": 2000.156,
31
+ "min": 0.0,
32
+ "max": 0.0
33
+ },
34
+ {
35
+ "current": 2000.156,
36
+ "min": 0.0,
37
+ "max": 0.0
38
+ },
39
+ {
40
+ "current": 2000.156,
41
+ "min": 0.0,
42
+ "max": 0.0
43
+ }
44
+ ],
45
+ "disk": {
46
+ "/": {
47
+ "total": 8062.387607574463,
48
+ "used": 5569.50146484375
49
+ }
50
+ },
51
+ "gpu": "Tesla T4",
52
+ "gpu_count": 2,
53
+ "gpu_devices": [
54
+ {
55
+ "name": "Tesla T4",
56
+ "memory_total": 16106127360
57
+ },
58
+ {
59
+ "name": "Tesla T4",
60
+ "memory_total": 16106127360
61
+ }
62
+ ],
63
+ "memory": {
64
+ "total": 31.357559204101562
65
+ }
66
+ }
wandb/run-20240410_005959-52om3vq0/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_wandb": {"runtime": 17}}
wandb/run-20240410_005959-52om3vq0/logs/debug-internal.log ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-10 00:59:59,347 INFO StreamThr :446 [internal.py:wandb_internal():86] W&B internal server running at pid: 446, started at: 2024-04-10 00:59:59.347095
2
+ 2024-04-10 00:59:59,349 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-10 00:59:59,733 INFO WriterThread:446 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240410_005959-52om3vq0/run-52om3vq0.wandb
4
+ 2024-04-10 00:59:59,733 DEBUG SenderThread:446 [sender.py:send():379] send: header
5
+ 2024-04-10 00:59:59,736 DEBUG SenderThread:446 [sender.py:send():379] send: run
6
+ 2024-04-10 00:59:59,972 INFO SenderThread:446 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240410_005959-52om3vq0/files
7
+ 2024-04-10 00:59:59,972 INFO SenderThread:446 [sender.py:_start_run_threads():1124] run started: 52om3vq0 with start time 1712710799.346898
8
+ 2024-04-10 00:59:59,980 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-10 00:59:59,980 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-10 01:00:00,050 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-10 01:00:00,061 DEBUG HandlerThread:446 [system_info.py:__init__():26] System info init
12
+ 2024-04-10 01:00:00,061 DEBUG HandlerThread:446 [system_info.py:__init__():41] System info init done
13
+ 2024-04-10 01:00:00,061 INFO HandlerThread:446 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-10 01:00:00,061 INFO SystemMonitor:446 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-10 01:00:00,061 INFO SystemMonitor:446 [interfaces.py:start():190] Started cpu monitoring
16
+ 2024-04-10 01:00:00,062 INFO SystemMonitor:446 [interfaces.py:start():190] Started disk monitoring
17
+ 2024-04-10 01:00:00,062 INFO HandlerThread:446 [system_monitor.py:probe():214] Collecting system info
18
+ 2024-04-10 01:00:00,063 INFO SystemMonitor:446 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-10 01:00:00,064 INFO SystemMonitor:446 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-10 01:00:00,065 INFO SystemMonitor:446 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-10 01:00:00,075 DEBUG HandlerThread:446 [system_info.py:probe():150] Probing system
22
+ 2024-04-10 01:00:00,077 DEBUG HandlerThread:446 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-10 01:00:00,077 DEBUG HandlerThread:446 [system_info.py:probe():198] Probing system done
24
+ 2024-04-10 01:00:00,077 DEBUG HandlerThread:446 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-10T01:00:00.075285', 'startedAt': '2024-04-10T00:59:59.340959', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': 'd91c9dc8354a', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.156, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.156, 'min': 0.0, 'max': 0.0}, {'current': 2000.156, 'min': 0.0, 'max': 0.0}, {'current': 2000.156, 'min': 0.0, 'max': 0.0}, {'current': 2000.156, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5569.50146484375}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-10 01:00:00,077 INFO HandlerThread:446 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-10 01:00:00,077 INFO HandlerThread:446 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-10 01:00:00,077 DEBUG HandlerThread:446 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-10 01:00:00,974 INFO Thread-12 :446 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_005959-52om3vq0/files/conda-environment.yaml
29
+ 2024-04-10 01:00:15,091 ERROR HandlerThread:446 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
+ Traceback (most recent call last):
31
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
+ subprocess.call(
33
+ File "/opt/conda/lib/python3.10/subprocess.py", line 347, in call
34
+ return p.wait(timeout=timeout)
35
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1209, in wait
36
+ return self._wait(timeout=timeout)
37
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
+ raise TimeoutExpired(self.args, timeout)
39
+ subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-10 01:00:15,092 DEBUG HandlerThread:446 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-10 01:00:15,092 INFO HandlerThread:446 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-10 01:00:15,097 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-10 01:00:15,097 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-10 01:00:15,098 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-10 01:00:15,098 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-10 01:00:15,098 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-10 01:00:15,098 DEBUG SenderThread:446 [sender.py:send():379] send: files
48
+ 2024-04-10 01:00:15,098 INFO SenderThread:446 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
49
+ 2024-04-10 01:00:15,388 INFO wandb-upload_0:446 [upload_job.py:push():131] Uploaded file /tmp/tmp0ozzi1ivwandb/9tvoe406-wandb-metadata.json
50
+ 2024-04-10 01:00:15,977 INFO Thread-12 :446 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_005959-52om3vq0/files/wandb-metadata.json
51
+ 2024-04-10 01:00:17,470 DEBUG SenderThread:446 [sender.py:send():379] send: exit
52
+ 2024-04-10 01:00:17,470 INFO SenderThread:446 [sender.py:send_exit():586] handling exit code: 0
53
+ 2024-04-10 01:00:17,470 INFO SenderThread:446 [sender.py:send_exit():588] handling runtime: 17
54
+ 2024-04-10 01:00:17,472 INFO SenderThread:446 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
55
+ 2024-04-10 01:00:17,472 INFO SenderThread:446 [sender.py:send_exit():594] send defer
56
+ 2024-04-10 01:00:17,472 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
57
+ 2024-04-10 01:00:17,472 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 0
58
+ 2024-04-10 01:00:17,472 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
59
+ 2024-04-10 01:00:17,472 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 0
60
+ 2024-04-10 01:00:17,473 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 1
61
+ 2024-04-10 01:00:17,473 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
62
+ 2024-04-10 01:00:17,473 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 1
63
+ 2024-04-10 01:00:17,473 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
64
+ 2024-04-10 01:00:17,473 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 1
65
+ 2024-04-10 01:00:17,473 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 2
66
+ 2024-04-10 01:00:17,473 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
67
+ 2024-04-10 01:00:17,473 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 2
68
+ 2024-04-10 01:00:17,473 INFO HandlerThread:446 [system_monitor.py:finish():203] Stopping system monitor
69
+ 2024-04-10 01:00:17,474 DEBUG SystemMonitor:446 [system_monitor.py:_start():172] Starting system metrics aggregation loop
70
+ 2024-04-10 01:00:17,474 DEBUG SystemMonitor:446 [system_monitor.py:_start():179] Finished system metrics aggregation loop
71
+ 2024-04-10 01:00:17,474 DEBUG SystemMonitor:446 [system_monitor.py:_start():183] Publishing last batch of metrics
72
+ 2024-04-10 01:00:17,474 INFO HandlerThread:446 [interfaces.py:finish():202] Joined cpu monitor
73
+ 2024-04-10 01:00:17,475 INFO HandlerThread:446 [interfaces.py:finish():202] Joined disk monitor
74
+ 2024-04-10 01:00:17,489 INFO HandlerThread:446 [interfaces.py:finish():202] Joined gpu monitor
75
+ 2024-04-10 01:00:17,489 INFO HandlerThread:446 [interfaces.py:finish():202] Joined memory monitor
76
+ 2024-04-10 01:00:17,489 INFO HandlerThread:446 [interfaces.py:finish():202] Joined network monitor
77
+ 2024-04-10 01:00:17,490 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
78
+ 2024-04-10 01:00:17,490 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 2
79
+ 2024-04-10 01:00:17,490 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 3
80
+ 2024-04-10 01:00:17,490 DEBUG SenderThread:446 [sender.py:send():379] send: stats
81
+ 2024-04-10 01:00:17,490 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
82
+ 2024-04-10 01:00:17,490 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 3
83
+ 2024-04-10 01:00:17,490 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
84
+ 2024-04-10 01:00:17,491 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 3
85
+ 2024-04-10 01:00:17,491 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 4
86
+ 2024-04-10 01:00:17,491 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
87
+ 2024-04-10 01:00:17,491 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 4
88
+ 2024-04-10 01:00:17,491 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
89
+ 2024-04-10 01:00:17,491 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 4
90
+ 2024-04-10 01:00:17,491 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 5
91
+ 2024-04-10 01:00:17,491 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
92
+ 2024-04-10 01:00:17,491 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 5
93
+ 2024-04-10 01:00:17,492 DEBUG SenderThread:446 [sender.py:send():379] send: summary
94
+ 2024-04-10 01:00:17,492 INFO SenderThread:446 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
95
+ 2024-04-10 01:00:17,492 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
96
+ 2024-04-10 01:00:17,492 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 5
97
+ 2024-04-10 01:00:17,492 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 6
98
+ 2024-04-10 01:00:17,492 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
99
+ 2024-04-10 01:00:17,492 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 6
100
+ 2024-04-10 01:00:17,493 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
101
+ 2024-04-10 01:00:17,493 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 6
102
+ 2024-04-10 01:00:17,493 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 7
103
+ 2024-04-10 01:00:17,493 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: status_report
104
+ 2024-04-10 01:00:17,493 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
105
+ 2024-04-10 01:00:17,493 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 7
106
+ 2024-04-10 01:00:17,493 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
107
+ 2024-04-10 01:00:17,493 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 7
108
+ 2024-04-10 01:00:17,494 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 8
109
+ 2024-04-10 01:00:17,494 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
110
+ 2024-04-10 01:00:17,494 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 8
111
+ 2024-04-10 01:00:17,494 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
112
+ 2024-04-10 01:00:17,494 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 8
113
+ 2024-04-10 01:00:17,494 INFO SenderThread:446 [job_builder.py:build():318] Attempting to build job artifact
114
+ 2024-04-10 01:00:17,494 WARNING SenderThread:446 [job_builder.py:_log_if_verbose():210] No requirements.txt found, not creating job artifact. See https://docs.wandb.ai/guides/launch/create-job
115
+ 2024-04-10 01:00:17,494 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 9
116
+ 2024-04-10 01:00:17,494 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
117
+ 2024-04-10 01:00:17,495 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 9
118
+ 2024-04-10 01:00:17,495 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
119
+ 2024-04-10 01:00:17,495 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 9
120
+ 2024-04-10 01:00:17,495 INFO SenderThread:446 [dir_watcher.py:finish():358] shutting down directory watcher
121
+ 2024-04-10 01:00:17,978 INFO SenderThread:446 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240410_005959-52om3vq0/files/wandb-summary.json
122
+ 2024-04-10 01:00:17,979 INFO SenderThread:446 [dir_watcher.py:finish():388] scan: /kaggle/working/wandb/run-20240410_005959-52om3vq0/files
123
+ 2024-04-10 01:00:17,979 INFO SenderThread:446 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005959-52om3vq0/files/wandb-summary.json wandb-summary.json
124
+ 2024-04-10 01:00:17,979 INFO SenderThread:446 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005959-52om3vq0/files/conda-environment.yaml conda-environment.yaml
125
+ 2024-04-10 01:00:17,979 INFO SenderThread:446 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005959-52om3vq0/files/wandb-metadata.json wandb-metadata.json
126
+ 2024-04-10 01:00:17,982 INFO SenderThread:446 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240410_005959-52om3vq0/files/config.yaml config.yaml
127
+ 2024-04-10 01:00:17,983 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 10
128
+ 2024-04-10 01:00:17,986 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
129
+ 2024-04-10 01:00:17,987 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 10
130
+ 2024-04-10 01:00:17,988 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
131
+ 2024-04-10 01:00:17,988 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 10
132
+ 2024-04-10 01:00:17,988 INFO SenderThread:446 [file_pusher.py:finish():172] shutting down file pusher
133
+ 2024-04-10 01:00:18,197 INFO wandb-upload_0:446 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_005959-52om3vq0/files/wandb-summary.json
134
+ 2024-04-10 01:00:18,241 INFO wandb-upload_1:446 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240410_005959-52om3vq0/files/config.yaml
135
+ 2024-04-10 01:00:18,442 INFO Thread-11 (_thread_body):446 [sender.py:transition_state():614] send defer: 11
136
+ 2024-04-10 01:00:18,442 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
137
+ 2024-04-10 01:00:18,442 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 11
138
+ 2024-04-10 01:00:18,442 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
139
+ 2024-04-10 01:00:18,442 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 11
140
+ 2024-04-10 01:00:18,442 INFO SenderThread:446 [file_pusher.py:join():178] waiting for file pusher
141
+ 2024-04-10 01:00:18,443 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 12
142
+ 2024-04-10 01:00:18,443 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
143
+ 2024-04-10 01:00:18,443 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 12
144
+ 2024-04-10 01:00:18,443 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
145
+ 2024-04-10 01:00:18,443 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 12
146
+ 2024-04-10 01:00:18,443 INFO SenderThread:446 [file_stream.py:finish():614] file stream finish called
147
+ 2024-04-10 01:00:18,469 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: poll_exit
148
+ 2024-04-10 01:00:18,628 INFO SenderThread:446 [file_stream.py:finish():618] file stream finish is done
149
+ 2024-04-10 01:00:18,628 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 13
150
+ 2024-04-10 01:00:18,628 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: poll_exit
151
+ 2024-04-10 01:00:18,628 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
152
+ 2024-04-10 01:00:18,628 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 13
153
+ 2024-04-10 01:00:18,629 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
154
+ 2024-04-10 01:00:18,629 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 13
155
+ 2024-04-10 01:00:18,629 INFO SenderThread:446 [sender.py:transition_state():614] send defer: 14
156
+ 2024-04-10 01:00:18,629 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: defer
157
+ 2024-04-10 01:00:18,629 INFO HandlerThread:446 [handler.py:handle_request_defer():172] handle defer: 14
158
+ 2024-04-10 01:00:18,629 DEBUG SenderThread:446 [sender.py:send():379] send: final
159
+ 2024-04-10 01:00:18,629 DEBUG SenderThread:446 [sender.py:send():379] send: footer
160
+ 2024-04-10 01:00:18,629 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: defer
161
+ 2024-04-10 01:00:18,629 INFO SenderThread:446 [sender.py:send_request_defer():610] handle sender defer: 14
162
+ 2024-04-10 01:00:18,631 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: poll_exit
163
+ 2024-04-10 01:00:18,631 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: poll_exit
164
+ 2024-04-10 01:00:18,631 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: poll_exit
165
+ 2024-04-10 01:00:18,632 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: poll_exit
166
+ 2024-04-10 01:00:18,632 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: server_info
167
+ 2024-04-10 01:00:18,632 DEBUG SenderThread:446 [sender.py:send_request():406] send_request: server_info
168
+ 2024-04-10 01:00:18,635 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: get_summary
169
+ 2024-04-10 01:00:18,636 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: sampled_history
170
+ 2024-04-10 01:00:18,636 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: internal_messages
171
+ 2024-04-10 01:00:18,691 INFO MainThread:446 [wandb_run.py:_footer_history_summary_info():3920] rendering history
172
+ 2024-04-10 01:00:18,691 INFO MainThread:446 [wandb_run.py:_footer_history_summary_info():3952] rendering summary
173
+ 2024-04-10 01:00:18,691 INFO MainThread:446 [wandb_run.py:_footer_sync_info():3879] logging synced files
174
+ 2024-04-10 01:00:18,692 DEBUG HandlerThread:446 [handler.py:handle_request():146] handle_request: shutdown
175
+ 2024-04-10 01:00:18,692 INFO HandlerThread:446 [handler.py:finish():866] shutting down handler
176
+ 2024-04-10 01:00:19,633 INFO WriterThread:446 [datastore.py:close():296] close: /kaggle/working/wandb/run-20240410_005959-52om3vq0/run-52om3vq0.wandb
177
+ 2024-04-10 01:00:19,691 INFO SenderThread:446 [sender.py:finish():1546] shutting down sender
178
+ 2024-04-10 01:00:19,691 INFO SenderThread:446 [file_pusher.py:finish():172] shutting down file pusher
179
+ 2024-04-10 01:00:19,691 INFO SenderThread:446 [file_pusher.py:join():178] waiting for file pusher
wandb/run-20240410_005959-52om3vq0/logs/debug.log ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-10 00:59:59,342 INFO MainThread:399 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-10 00:59:59,342 INFO MainThread:399 [wandb_setup.py:_flush():76] Configure stats pid to 399
3
+ 2024-04-10 00:59:59,342 INFO MainThread:399 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-10 00:59:59,342 INFO MainThread:399 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240410_005959-52om3vq0/logs/debug.log
10
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240410_005959-52om3vq0/logs/debug-internal.log
11
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7d1efce690f0>
12
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_init.py:init():567] calling init triggers
13
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
+ config: {}
15
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_init.py:init():617] starting backend
16
+ 2024-04-10 00:59:59,343 INFO MainThread:399 [wandb_init.py:init():621] setting up manager
17
+ 2024-04-10 00:59:59,345 INFO MainThread:399 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
+ 2024-04-10 00:59:59,346 INFO MainThread:399 [wandb_init.py:init():629] backend started and connected
19
+ 2024-04-10 00:59:59,359 INFO MainThread:399 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
+ 2024-04-10 00:59:59,732 INFO MainThread:399 [wandb_init.py:init():721] updated telemetry
21
+ 2024-04-10 00:59:59,735 INFO MainThread:399 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
+ 2024-04-10 00:59:59,979 INFO MainThread:399 [wandb_run.py:_on_init():2344] communicating current version
23
+ 2024-04-10 01:00:00,044 INFO MainThread:399 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
+
25
+ 2024-04-10 01:00:00,044 INFO MainThread:399 [wandb_init.py:init():805] starting run threads in backend
26
+ 2024-04-10 01:00:11,161 ERROR MainThread:399 [wandb_init.py:init():1215] error
27
+ Traceback (most recent call last):
28
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/wandb_init.py", line 1187, in init
29
+ run = wi.init()
30
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/wandb_init.py", line 818, in init
31
+ run_start_result = run_start_handle.wait(timeout=30)
32
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/lib/mailbox.py", line 283, in wait
33
+ found, abandoned = self._slot._get_and_clear(timeout=wait_timeout)
34
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/lib/mailbox.py", line 130, in _get_and_clear
35
+ if self._wait(timeout=timeout):
36
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/lib/mailbox.py", line 126, in _wait
37
+ return self._event.wait(timeout=timeout)
38
+ File "/opt/conda/lib/python3.10/threading.py", line 607, in wait
39
+ signaled = self._cond.wait(timeout)
40
+ File "/opt/conda/lib/python3.10/threading.py", line 324, in wait
41
+ gotit = waiter.acquire(True, timeout)
42
+ KeyboardInterrupt
43
+
44
+ During handling of the above exception, another exception occurred:
45
+
46
+ Traceback (most recent call last):
47
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/wandb_init.py", line 1195, in init
48
+ getcaller()
49
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/wandb_init.py", line 855, in getcaller
50
+ src, line, func, stack = logger.findCaller(stack_info=True)
51
+ File "/root/.local/lib/python3.10/site-packages/log.py", line 45, in findCaller
52
+ traceback.print_stack(f, file=sio)
53
+ NameError: name 'traceback' is not defined
wandb/run-20240410_005959-52om3vq0/run-52om3vq0.wandb ADDED
Binary file (494 Bytes). View file
 
wandb/run-20240410_010250-ft4a6i5j/files/conda-environment.yaml ADDED
File without changes
wandb/run-20240410_010250-ft4a6i5j/files/config.yaml ADDED
@@ -0,0 +1,686 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.10.13
7
+ cli_version: 0.16.5
8
+ framework: huggingface
9
+ huggingface_version: 4.39.3
10
+ is_jupyter_run: true
11
+ is_kaggle_kernel: true
12
+ start_time: 1712710970.0
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 84
27
+ - 98
28
+ - 105
29
+ 2:
30
+ - 1
31
+ - 2
32
+ - 3
33
+ - 5
34
+ - 11
35
+ - 12
36
+ - 49
37
+ - 51
38
+ - 53
39
+ - 55
40
+ - 71
41
+ - 84
42
+ - 98
43
+ - 105
44
+ 3:
45
+ - 7
46
+ - 23
47
+ 4: 3.10.13
48
+ 5: 0.16.5
49
+ 6: 4.39.3
50
+ 8:
51
+ - 1
52
+ - 2
53
+ - 5
54
+ 9:
55
+ 1: transformers_trainer
56
+ 13: linux-x86_64
57
+ m:
58
+ - 1: train/global_step
59
+ 6:
60
+ - 3
61
+ - 1: train/loss
62
+ 5: 1
63
+ 6:
64
+ - 1
65
+ - 1: train/grad_norm
66
+ 5: 1
67
+ 6:
68
+ - 1
69
+ - 1: train/learning_rate
70
+ 5: 1
71
+ 6:
72
+ - 1
73
+ - 1: train/epoch
74
+ 5: 1
75
+ 6:
76
+ - 1
77
+ vocab_size:
78
+ desc: null
79
+ value: 32000
80
+ max_position_embeddings:
81
+ desc: null
82
+ value: 2048
83
+ hidden_size:
84
+ desc: null
85
+ value: 2048
86
+ intermediate_size:
87
+ desc: null
88
+ value: 5632
89
+ num_hidden_layers:
90
+ desc: null
91
+ value: 22
92
+ num_attention_heads:
93
+ desc: null
94
+ value: 32
95
+ num_key_value_heads:
96
+ desc: null
97
+ value: 4
98
+ hidden_act:
99
+ desc: null
100
+ value: silu
101
+ initializer_range:
102
+ desc: null
103
+ value: 0.02
104
+ rms_norm_eps:
105
+ desc: null
106
+ value: 1.0e-05
107
+ pretraining_tp:
108
+ desc: null
109
+ value: 1
110
+ use_cache:
111
+ desc: null
112
+ value: false
113
+ rope_theta:
114
+ desc: null
115
+ value: 10000.0
116
+ rope_scaling:
117
+ desc: null
118
+ value: null
119
+ attention_bias:
120
+ desc: null
121
+ value: false
122
+ attention_dropout:
123
+ desc: null
124
+ value: 0.0
125
+ return_dict:
126
+ desc: null
127
+ value: true
128
+ output_hidden_states:
129
+ desc: null
130
+ value: false
131
+ output_attentions:
132
+ desc: null
133
+ value: false
134
+ torchscript:
135
+ desc: null
136
+ value: false
137
+ torch_dtype:
138
+ desc: null
139
+ value: float32
140
+ use_bfloat16:
141
+ desc: null
142
+ value: false
143
+ tf_legacy_loss:
144
+ desc: null
145
+ value: false
146
+ pruned_heads:
147
+ desc: null
148
+ value: {}
149
+ tie_word_embeddings:
150
+ desc: null
151
+ value: false
152
+ chunk_size_feed_forward:
153
+ desc: null
154
+ value: 0
155
+ is_encoder_decoder:
156
+ desc: null
157
+ value: false
158
+ is_decoder:
159
+ desc: null
160
+ value: false
161
+ cross_attention_hidden_size:
162
+ desc: null
163
+ value: null
164
+ add_cross_attention:
165
+ desc: null
166
+ value: false
167
+ tie_encoder_decoder:
168
+ desc: null
169
+ value: false
170
+ max_length:
171
+ desc: null
172
+ value: 20
173
+ min_length:
174
+ desc: null
175
+ value: 0
176
+ do_sample:
177
+ desc: null
178
+ value: false
179
+ early_stopping:
180
+ desc: null
181
+ value: false
182
+ num_beams:
183
+ desc: null
184
+ value: 1
185
+ num_beam_groups:
186
+ desc: null
187
+ value: 1
188
+ diversity_penalty:
189
+ desc: null
190
+ value: 0.0
191
+ temperature:
192
+ desc: null
193
+ value: 1.0
194
+ top_k:
195
+ desc: null
196
+ value: 50
197
+ top_p:
198
+ desc: null
199
+ value: 1.0
200
+ typical_p:
201
+ desc: null
202
+ value: 1.0
203
+ repetition_penalty:
204
+ desc: null
205
+ value: 1.0
206
+ length_penalty:
207
+ desc: null
208
+ value: 1.0
209
+ no_repeat_ngram_size:
210
+ desc: null
211
+ value: 0
212
+ encoder_no_repeat_ngram_size:
213
+ desc: null
214
+ value: 0
215
+ bad_words_ids:
216
+ desc: null
217
+ value: null
218
+ num_return_sequences:
219
+ desc: null
220
+ value: 1
221
+ output_scores:
222
+ desc: null
223
+ value: false
224
+ return_dict_in_generate:
225
+ desc: null
226
+ value: false
227
+ forced_bos_token_id:
228
+ desc: null
229
+ value: null
230
+ forced_eos_token_id:
231
+ desc: null
232
+ value: null
233
+ remove_invalid_values:
234
+ desc: null
235
+ value: false
236
+ exponential_decay_length_penalty:
237
+ desc: null
238
+ value: null
239
+ suppress_tokens:
240
+ desc: null
241
+ value: null
242
+ begin_suppress_tokens:
243
+ desc: null
244
+ value: null
245
+ architectures:
246
+ desc: null
247
+ value:
248
+ - LlamaForCausalLM
249
+ finetuning_task:
250
+ desc: null
251
+ value: null
252
+ id2label:
253
+ desc: null
254
+ value:
255
+ '0': LABEL_0
256
+ '1': LABEL_1
257
+ label2id:
258
+ desc: null
259
+ value:
260
+ LABEL_0: 0
261
+ LABEL_1: 1
262
+ tokenizer_class:
263
+ desc: null
264
+ value: null
265
+ prefix:
266
+ desc: null
267
+ value: null
268
+ bos_token_id:
269
+ desc: null
270
+ value: 1
271
+ pad_token_id:
272
+ desc: null
273
+ value: null
274
+ eos_token_id:
275
+ desc: null
276
+ value: 2
277
+ sep_token_id:
278
+ desc: null
279
+ value: null
280
+ decoder_start_token_id:
281
+ desc: null
282
+ value: null
283
+ task_specific_params:
284
+ desc: null
285
+ value: null
286
+ problem_type:
287
+ desc: null
288
+ value: null
289
+ _name_or_path:
290
+ desc: null
291
+ value: TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
292
+ transformers_version:
293
+ desc: null
294
+ value: 4.39.3
295
+ model_type:
296
+ desc: null
297
+ value: llama
298
+ quantization_config:
299
+ desc: null
300
+ value:
301
+ quant_method: QuantizationMethod.BITS_AND_BYTES
302
+ _load_in_8bit: false
303
+ _load_in_4bit: true
304
+ llm_int8_threshold: 6.0
305
+ llm_int8_skip_modules: null
306
+ llm_int8_enable_fp32_cpu_offload: false
307
+ llm_int8_has_fp16_weight: false
308
+ bnb_4bit_quant_type: nf4
309
+ bnb_4bit_use_double_quant: false
310
+ bnb_4bit_compute_dtype: float16
311
+ bnb_4bit_quant_storage: uint8
312
+ load_in_4bit: true
313
+ load_in_8bit: false
314
+ output_dir:
315
+ desc: null
316
+ value: /kaggle/working/
317
+ overwrite_output_dir:
318
+ desc: null
319
+ value: false
320
+ do_train:
321
+ desc: null
322
+ value: false
323
+ do_eval:
324
+ desc: null
325
+ value: false
326
+ do_predict:
327
+ desc: null
328
+ value: false
329
+ evaluation_strategy:
330
+ desc: null
331
+ value: 'no'
332
+ prediction_loss_only:
333
+ desc: null
334
+ value: false
335
+ per_device_train_batch_size:
336
+ desc: null
337
+ value: 8
338
+ per_device_eval_batch_size:
339
+ desc: null
340
+ value: 8
341
+ per_gpu_train_batch_size:
342
+ desc: null
343
+ value: null
344
+ per_gpu_eval_batch_size:
345
+ desc: null
346
+ value: null
347
+ gradient_accumulation_steps:
348
+ desc: null
349
+ value: 1
350
+ eval_accumulation_steps:
351
+ desc: null
352
+ value: null
353
+ eval_delay:
354
+ desc: null
355
+ value: 0
356
+ learning_rate:
357
+ desc: null
358
+ value: 7.0e-06
359
+ weight_decay:
360
+ desc: null
361
+ value: 0.001
362
+ adam_beta1:
363
+ desc: null
364
+ value: 0.9
365
+ adam_beta2:
366
+ desc: null
367
+ value: 0.999
368
+ adam_epsilon:
369
+ desc: null
370
+ value: 1.0e-08
371
+ max_grad_norm:
372
+ desc: null
373
+ value: 0.07
374
+ num_train_epochs:
375
+ desc: null
376
+ value: 5
377
+ max_steps:
378
+ desc: null
379
+ value: 15000
380
+ lr_scheduler_type:
381
+ desc: null
382
+ value: cosine
383
+ lr_scheduler_kwargs:
384
+ desc: null
385
+ value: {}
386
+ warmup_ratio:
387
+ desc: null
388
+ value: 0.03
389
+ warmup_steps:
390
+ desc: null
391
+ value: 0
392
+ log_level:
393
+ desc: null
394
+ value: passive
395
+ log_level_replica:
396
+ desc: null
397
+ value: warning
398
+ log_on_each_node:
399
+ desc: null
400
+ value: true
401
+ logging_dir:
402
+ desc: null
403
+ value: /kaggle/working/runs/Apr10_01-00-47_d91c9dc8354a
404
+ logging_strategy:
405
+ desc: null
406
+ value: steps
407
+ logging_first_step:
408
+ desc: null
409
+ value: false
410
+ logging_steps:
411
+ desc: null
412
+ value: 100
413
+ logging_nan_inf_filter:
414
+ desc: null
415
+ value: true
416
+ save_strategy:
417
+ desc: null
418
+ value: steps
419
+ save_steps:
420
+ desc: null
421
+ value: 100
422
+ save_total_limit:
423
+ desc: null
424
+ value: 1
425
+ save_safetensors:
426
+ desc: null
427
+ value: true
428
+ save_on_each_node:
429
+ desc: null
430
+ value: false
431
+ save_only_model:
432
+ desc: null
433
+ value: false
434
+ no_cuda:
435
+ desc: null
436
+ value: false
437
+ use_cpu:
438
+ desc: null
439
+ value: false
440
+ use_mps_device:
441
+ desc: null
442
+ value: false
443
+ seed:
444
+ desc: null
445
+ value: 42
446
+ data_seed:
447
+ desc: null
448
+ value: null
449
+ jit_mode_eval:
450
+ desc: null
451
+ value: false
452
+ use_ipex:
453
+ desc: null
454
+ value: false
455
+ bf16:
456
+ desc: null
457
+ value: false
458
+ fp16:
459
+ desc: null
460
+ value: false
461
+ fp16_opt_level:
462
+ desc: null
463
+ value: O1
464
+ half_precision_backend:
465
+ desc: null
466
+ value: auto
467
+ bf16_full_eval:
468
+ desc: null
469
+ value: false
470
+ fp16_full_eval:
471
+ desc: null
472
+ value: false
473
+ tf32:
474
+ desc: null
475
+ value: null
476
+ local_rank:
477
+ desc: null
478
+ value: 0
479
+ ddp_backend:
480
+ desc: null
481
+ value: null
482
+ tpu_num_cores:
483
+ desc: null
484
+ value: null
485
+ tpu_metrics_debug:
486
+ desc: null
487
+ value: false
488
+ debug:
489
+ desc: null
490
+ value: []
491
+ dataloader_drop_last:
492
+ desc: null
493
+ value: false
494
+ eval_steps:
495
+ desc: null
496
+ value: null
497
+ dataloader_num_workers:
498
+ desc: null
499
+ value: 8
500
+ dataloader_prefetch_factor:
501
+ desc: null
502
+ value: null
503
+ past_index:
504
+ desc: null
505
+ value: -1
506
+ run_name:
507
+ desc: null
508
+ value: /kaggle/working/
509
+ disable_tqdm:
510
+ desc: null
511
+ value: false
512
+ remove_unused_columns:
513
+ desc: null
514
+ value: true
515
+ label_names:
516
+ desc: null
517
+ value: null
518
+ load_best_model_at_end:
519
+ desc: null
520
+ value: false
521
+ metric_for_best_model:
522
+ desc: null
523
+ value: null
524
+ greater_is_better:
525
+ desc: null
526
+ value: null
527
+ ignore_data_skip:
528
+ desc: null
529
+ value: false
530
+ fsdp:
531
+ desc: null
532
+ value: []
533
+ fsdp_min_num_params:
534
+ desc: null
535
+ value: 0
536
+ fsdp_config:
537
+ desc: null
538
+ value:
539
+ min_num_params: 0
540
+ xla: false
541
+ xla_fsdp_v2: false
542
+ xla_fsdp_grad_ckpt: false
543
+ fsdp_transformer_layer_cls_to_wrap:
544
+ desc: null
545
+ value: null
546
+ accelerator_config:
547
+ desc: null
548
+ value:
549
+ split_batches: false
550
+ dispatch_batches: null
551
+ even_batches: true
552
+ use_seedable_sampler: true
553
+ deepspeed:
554
+ desc: null
555
+ value: null
556
+ label_smoothing_factor:
557
+ desc: null
558
+ value: 0.0
559
+ optim:
560
+ desc: null
561
+ value: paged_adamw_32bit
562
+ optim_args:
563
+ desc: null
564
+ value: null
565
+ adafactor:
566
+ desc: null
567
+ value: false
568
+ group_by_length:
569
+ desc: null
570
+ value: false
571
+ length_column_name:
572
+ desc: null
573
+ value: length
574
+ report_to:
575
+ desc: null
576
+ value:
577
+ - tensorboard
578
+ - wandb
579
+ ddp_find_unused_parameters:
580
+ desc: null
581
+ value: null
582
+ ddp_bucket_cap_mb:
583
+ desc: null
584
+ value: null
585
+ ddp_broadcast_buffers:
586
+ desc: null
587
+ value: null
588
+ dataloader_pin_memory:
589
+ desc: null
590
+ value: true
591
+ dataloader_persistent_workers:
592
+ desc: null
593
+ value: false
594
+ skip_memory_metrics:
595
+ desc: null
596
+ value: true
597
+ use_legacy_prediction_loop:
598
+ desc: null
599
+ value: false
600
+ push_to_hub:
601
+ desc: null
602
+ value: false
603
+ resume_from_checkpoint:
604
+ desc: null
605
+ value: null
606
+ hub_model_id:
607
+ desc: null
608
+ value: null
609
+ hub_strategy:
610
+ desc: null
611
+ value: every_save
612
+ hub_token:
613
+ desc: null
614
+ value: <HUB_TOKEN>
615
+ hub_private_repo:
616
+ desc: null
617
+ value: false
618
+ hub_always_push:
619
+ desc: null
620
+ value: false
621
+ gradient_checkpointing:
622
+ desc: null
623
+ value: true
624
+ gradient_checkpointing_kwargs:
625
+ desc: null
626
+ value: null
627
+ include_inputs_for_metrics:
628
+ desc: null
629
+ value: false
630
+ fp16_backend:
631
+ desc: null
632
+ value: auto
633
+ push_to_hub_model_id:
634
+ desc: null
635
+ value: null
636
+ push_to_hub_organization:
637
+ desc: null
638
+ value: null
639
+ push_to_hub_token:
640
+ desc: null
641
+ value: <PUSH_TO_HUB_TOKEN>
642
+ mp_parameters:
643
+ desc: null
644
+ value: ''
645
+ auto_find_batch_size:
646
+ desc: null
647
+ value: true
648
+ full_determinism:
649
+ desc: null
650
+ value: false
651
+ torchdynamo:
652
+ desc: null
653
+ value: null
654
+ ray_scope:
655
+ desc: null
656
+ value: last
657
+ ddp_timeout:
658
+ desc: null
659
+ value: 1800
660
+ torch_compile:
661
+ desc: null
662
+ value: false
663
+ torch_compile_backend:
664
+ desc: null
665
+ value: null
666
+ torch_compile_mode:
667
+ desc: null
668
+ value: null
669
+ dispatch_batches:
670
+ desc: null
671
+ value: null
672
+ split_batches:
673
+ desc: null
674
+ value: null
675
+ include_tokens_per_second:
676
+ desc: null
677
+ value: false
678
+ include_num_input_tokens_seen:
679
+ desc: null
680
+ value: false
681
+ neftune_noise_alpha:
682
+ desc: null
683
+ value: null
684
+ optim_target_modules:
685
+ desc: null
686
+ value: null
wandb/run-20240410_010250-ft4a6i5j/files/output.log ADDED
@@ -0,0 +1,334 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:557: UserWarning: This DataLoader will create 8 worker processes in total. Our suggested max number of worker in current system is 4, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
2
+ warnings.warn(_create_warning_msg(
3
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
4
+ To disable this warning, you can either:
5
+ - Avoid using `tokenizers` before the fork if possible
6
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
7
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
8
+ To disable this warning, you can either:
9
+ - Avoid using `tokenizers` before the fork if possible
10
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
11
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
12
+ To disable this warning, you can either:
13
+ - Avoid using `tokenizers` before the fork if possible
14
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
15
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
16
+ To disable this warning, you can either:
17
+ - Avoid using `tokenizers` before the fork if possible
18
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
19
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
20
+ To disable this warning, you can either:
21
+ - Avoid using `tokenizers` before the fork if possible
22
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
23
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
24
+ To disable this warning, you can either:
25
+ - Avoid using `tokenizers` before the fork if possible
26
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
27
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
28
+ To disable this warning, you can either:
29
+ - Avoid using `tokenizers` before the fork if possible
30
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
31
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
32
+ To disable this warning, you can either:
33
+ - Avoid using `tokenizers` before the fork if possible
34
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
35
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
36
+ warnings.warn(
37
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
38
+ warnings.warn(
39
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
40
+ warnings.warn(
41
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
42
+ warnings.warn(
43
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
44
+ warnings.warn(
45
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
46
+ warnings.warn(
47
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
48
+ warnings.warn(
49
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
50
+ warnings.warn(
51
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
52
+ warnings.warn(
53
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
54
+ warnings.warn(
55
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
56
+ warnings.warn(
57
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
58
+ warnings.warn(
59
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
60
+ warnings.warn(
61
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
62
+ warnings.warn(
63
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
64
+ warnings.warn(
65
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
66
+ warnings.warn(
67
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
68
+ warnings.warn(
69
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
70
+ warnings.warn(
71
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
72
+ warnings.warn(
73
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
74
+ warnings.warn(
75
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
76
+ warnings.warn(
77
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
78
+ warnings.warn(
79
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
80
+ warnings.warn(
81
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
82
+ warnings.warn(
83
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
84
+ warnings.warn(
85
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
86
+ warnings.warn(
87
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
88
+ warnings.warn(
89
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
90
+ warnings.warn(
91
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
92
+ warnings.warn(
93
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
94
+ warnings.warn(
95
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
96
+ warnings.warn(
97
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
98
+ warnings.warn(
99
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
100
+ warnings.warn(
101
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
102
+ warnings.warn(
103
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
104
+ warnings.warn(
105
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
106
+ warnings.warn(
107
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
108
+ warnings.warn(
109
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
110
+ warnings.warn(
111
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
112
+ warnings.warn(
113
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
114
+ warnings.warn(
115
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
116
+ warnings.warn(
117
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
118
+ warnings.warn(
119
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
120
+ warnings.warn(
121
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
122
+ warnings.warn(
123
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
124
+ warnings.warn(
125
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
126
+ warnings.warn(
127
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
128
+ warnings.warn(
129
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
130
+ warnings.warn(
131
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
132
+ warnings.warn(
133
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
134
+ warnings.warn(
135
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
136
+ warnings.warn(
137
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
138
+ warnings.warn(
139
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
140
+ warnings.warn(
141
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
142
+ warnings.warn(
143
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
144
+ warnings.warn(
145
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
146
+ warnings.warn(
147
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
148
+ warnings.warn(
149
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
150
+ warnings.warn(
151
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
152
+ warnings.warn(
153
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
154
+ warnings.warn(
155
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
156
+ warnings.warn(
157
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
158
+ warnings.warn(
159
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
160
+ warnings.warn(
161
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
162
+ warnings.warn(
163
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
164
+ warnings.warn(
165
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
166
+ warnings.warn(
167
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
168
+ warnings.warn(
169
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
170
+ warnings.warn(
171
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
172
+ warnings.warn(
173
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
174
+ warnings.warn(
175
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
176
+ warnings.warn(
177
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
178
+ warnings.warn(
179
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
180
+ warnings.warn(
181
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
182
+ warnings.warn(
183
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
184
+ warnings.warn(
185
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
186
+ warnings.warn(
187
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
188
+ warnings.warn(
189
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
190
+ warnings.warn(
191
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
192
+ warnings.warn(
193
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
194
+ warnings.warn(
195
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
196
+ warnings.warn(
197
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
198
+ warnings.warn(
199
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
200
+ warnings.warn(
201
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
202
+ warnings.warn(
203
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
204
+ warnings.warn(
205
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
206
+ warnings.warn(
207
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
208
+ warnings.warn(
209
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
210
+ warnings.warn(
211
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
212
+ warnings.warn(
213
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
214
+ warnings.warn(
215
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
216
+ warnings.warn(
217
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
218
+ warnings.warn(
219
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
220
+ warnings.warn(
221
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
222
+ warnings.warn(
223
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
224
+ warnings.warn(
225
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
226
+ warnings.warn(
227
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
228
+ warnings.warn(
229
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
230
+ warnings.warn(
231
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
232
+ warnings.warn(
233
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
234
+ warnings.warn(
235
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
236
+ warnings.warn(
237
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
238
+ warnings.warn(
239
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
240
+ warnings.warn(
241
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
242
+ warnings.warn(
243
+ wandb: Network error (ReadTimeout), entering retry loop.
244
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
245
+ warnings.warn(
246
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
247
+ warnings.warn(
248
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
249
+ warnings.warn(
250
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
251
+ warnings.warn(
252
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
253
+ warnings.warn(
254
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
255
+ warnings.warn(
256
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
257
+ warnings.warn(
258
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
259
+ warnings.warn(
260
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
261
+ warnings.warn(
262
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
263
+ warnings.warn(
264
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
265
+ warnings.warn(
266
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
267
+ warnings.warn(
268
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
269
+ warnings.warn(
270
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
271
+ warnings.warn(
272
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
273
+ warnings.warn(
274
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
275
+ warnings.warn(
276
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
277
+ warnings.warn(
278
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
279
+ warnings.warn(
280
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
281
+ warnings.warn(
282
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
283
+ warnings.warn(
284
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
285
+ warnings.warn(
286
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
287
+ warnings.warn(
288
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
289
+ warnings.warn(
290
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
291
+ warnings.warn(
292
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
293
+ warnings.warn(
294
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
295
+ warnings.warn(
296
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
297
+ warnings.warn(
298
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
299
+ warnings.warn(
300
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
301
+ warnings.warn(
302
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
303
+ warnings.warn(
304
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
305
+ warnings.warn(
306
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
307
+ warnings.warn(
308
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
309
+ warnings.warn(
310
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
311
+ warnings.warn(
312
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
313
+ warnings.warn(
314
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
315
+ warnings.warn(
316
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
317
+ warnings.warn(
318
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
319
+ warnings.warn(
320
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
321
+ warnings.warn(
322
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
323
+ warnings.warn(
324
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
325
+ warnings.warn(
326
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
327
+ warnings.warn(
328
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
329
+ warnings.warn(
330
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
331
+ warnings.warn(
332
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
333
+ warnings.warn(
334
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
wandb/run-20240410_010250-ft4a6i5j/files/requirements.txt ADDED
@@ -0,0 +1,864 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Babel==2.14.0
2
+ Boruta==0.3
3
+ Brotli==1.0.9
4
+ CVXcanon==0.1.2
5
+ Cartopy==0.22.0
6
+ Cython==3.0.8
7
+ Deprecated==1.2.14
8
+ Farama-Notifications==0.0.4
9
+ Flask==3.0.2
10
+ Geohash==1.0
11
+ GitPython==3.1.41
12
+ ImageHash==4.3.1
13
+ Janome==0.5.0
14
+ Jinja2==3.1.2
15
+ LunarCalendar==0.0.9
16
+ Mako==1.3.2
17
+ Markdown==3.5.2
18
+ MarkupSafe==2.1.3
19
+ MarkupSafe==2.1.5
20
+ Pillow==9.5.0
21
+ PuLP==2.8.0
22
+ PyArabic==0.6.15
23
+ PyJWT==2.8.0
24
+ PyMeeus==0.5.12
25
+ PySocks==1.7.1
26
+ PyUpSet==0.1.1.post7
27
+ PyWavelets==1.5.0
28
+ PyYAML==6.0.1
29
+ Pygments==2.17.2
30
+ Pympler==1.0.1
31
+ QtPy==2.4.1
32
+ Rtree==1.2.0
33
+ SQLAlchemy==2.0.25
34
+ SecretStorage==3.3.3
35
+ Send2Trash==1.8.2
36
+ Shapely==1.8.5.post1
37
+ Shimmy==1.3.0
38
+ SimpleITK==2.3.1
39
+ TPOT==0.12.1
40
+ Theano-PyMC==1.1.2
41
+ Theano==1.0.5
42
+ Wand==0.6.13
43
+ Werkzeug==3.0.2
44
+ absl-py==1.4.0
45
+ accelerate==0.28.0
46
+ access==1.1.9
47
+ affine==2.4.0
48
+ aiobotocore==2.12.2
49
+ aiofiles==22.1.0
50
+ aiohttp-cors==0.7.0
51
+ aiohttp==3.9.1
52
+ aioitertools==0.11.0
53
+ aiorwlock==1.3.0
54
+ aiosignal==1.3.1
55
+ aiosqlite==0.19.0
56
+ albumentations==1.4.0
57
+ alembic==1.13.1
58
+ altair==5.3.0
59
+ annotated-types==0.6.0
60
+ annoy==1.17.3
61
+ anyio==4.2.0
62
+ apache-beam==2.46.0
63
+ aplus==0.11.0
64
+ appdirs==1.4.4
65
+ archspec==0.2.3
66
+ argon2-cffi-bindings==21.2.0
67
+ argon2-cffi==23.1.0
68
+ array-record==0.5.0
69
+ arrow==1.3.0
70
+ arviz==0.17.1
71
+ astroid==3.1.0
72
+ astropy-iers-data==0.2024.4.1.0.33.14
73
+ astropy==6.0.1
74
+ asttokens==2.4.1
75
+ astunparse==1.6.3
76
+ async-lru==2.0.4
77
+ async-timeout==4.0.3
78
+ attrs==23.2.0
79
+ audioread==3.0.1
80
+ autopep8==2.0.4
81
+ backoff==2.2.1
82
+ bayesian-optimization==1.4.3
83
+ beatrix_jupyterlab==2023.128.151533
84
+ beautifulsoup4==4.12.2
85
+ bitsandbytes==0.43.0
86
+ blake3==0.2.1
87
+ bleach==6.1.0
88
+ blessed==1.20.0
89
+ blinker==1.7.0
90
+ blis==0.7.10
91
+ blosc2==2.6.0
92
+ bokeh==3.3.4
93
+ boltons==23.1.1
94
+ boto3==1.26.100
95
+ botocore==1.34.51
96
+ bq_helper==0.4.1
97
+ bqplot==0.12.43
98
+ branca==0.7.1
99
+ brewer2mpl==1.4.1
100
+ brotlipy==0.7.0
101
+ cached-property==1.5.2
102
+ cachetools==4.2.4
103
+ cachetools==5.3.2
104
+ catalogue==2.0.10
105
+ catalyst==22.4
106
+ catboost==1.2.3
107
+ category-encoders==2.6.3
108
+ certifi==2024.2.2
109
+ cesium==0.12.1
110
+ cffi==1.16.0
111
+ charset-normalizer==3.3.2
112
+ chex==0.1.86
113
+ cleverhans==4.0.0
114
+ click-plugins==1.1.1
115
+ click==8.1.7
116
+ cligj==0.7.2
117
+ cloud-tpu-client==0.10
118
+ cloud-tpu-profiler==2.4.0
119
+ cloudpathlib==0.16.0
120
+ cloudpickle==2.2.1
121
+ cloudpickle==3.0.0
122
+ cmdstanpy==1.2.2
123
+ colorama==0.4.6
124
+ colorcet==3.1.0
125
+ colorful==0.5.6
126
+ colorlog==6.8.2
127
+ colorlover==0.3.0
128
+ comm==0.2.1
129
+ conda-libmamba-solver==23.7.0
130
+ conda-package-handling==2.2.0
131
+ conda==23.7.4
132
+ conda_package_streaming==0.9.0
133
+ confection==0.1.4
134
+ contextily==1.6.0
135
+ contourpy==1.2.0
136
+ convertdate==2.4.0
137
+ crcmod==1.7
138
+ cryptography==41.0.7
139
+ cuda-python==12.4.0
140
+ cudf==23.8.0
141
+ cufflinks==0.17.3
142
+ cuml==23.8.0
143
+ cupy==13.0.0
144
+ cycler==0.12.1
145
+ cymem==2.0.8
146
+ cytoolz==0.12.3
147
+ daal4py==2024.2.0
148
+ daal==2024.2.0
149
+ dacite==1.8.1
150
+ dask-cuda==23.8.0
151
+ dask-cudf==23.8.0
152
+ dask-expr==1.0.9
153
+ dask==2024.4.0
154
+ dataclasses-json==0.6.4
155
+ dataproc_jupyter_plugin==0.1.66
156
+ datasets==2.16.0
157
+ datashader==0.16.0
158
+ datatile==1.0.3
159
+ db-dtypes==1.2.0
160
+ deap==1.4.1
161
+ debugpy==1.8.0
162
+ decorator==5.1.1
163
+ deepdiff==6.7.1
164
+ defusedxml==0.7.1
165
+ deprecation==2.1.0
166
+ descartes==1.1.0
167
+ dill==0.3.7
168
+ dipy==1.9.0
169
+ distlib==0.3.8
170
+ distributed==2023.7.1
171
+ distro==1.9.0
172
+ dm-tree==0.1.8
173
+ docker-pycreds==0.4.0
174
+ docker==7.0.0
175
+ docopt==0.6.2
176
+ docstring-parser==0.15
177
+ docstring-to-markdown==0.15
178
+ docutils==0.20.1
179
+ earthengine-api==0.1.395
180
+ easydict==1.13
181
+ easyocr==1.7.1
182
+ ecos==2.0.13
183
+ eli5==0.13.0
184
+ emoji==2.11.0
185
+ en-core-web-lg==3.7.1
186
+ en-core-web-sm==3.7.1
187
+ entrypoints==0.4
188
+ ephem==4.1.5
189
+ esda==2.5.1
190
+ essentia==2.1b6.dev1110
191
+ et-xmlfile==1.1.0
192
+ etils==1.6.0
193
+ exceptiongroup==1.2.0
194
+ executing==2.0.1
195
+ explainable-ai-sdk==1.3.3
196
+ fastai==2.7.14
197
+ fastapi==0.108.0
198
+ fastavro==1.9.3
199
+ fastcore==1.5.29
200
+ fastdownload==0.0.7
201
+ fasteners==0.19
202
+ fastjsonschema==2.19.1
203
+ fastprogress==1.0.3
204
+ fastrlock==0.8.2
205
+ fasttext==0.9.2
206
+ feather-format==0.4.1
207
+ featuretools==1.30.0
208
+ filelock==3.13.1
209
+ fiona==1.9.6
210
+ fitter==1.7.0
211
+ flake8==7.0.0
212
+ flashtext==2.7
213
+ flatbuffers==23.5.26
214
+ flax==0.8.2
215
+ folium==0.16.0
216
+ fonttools==4.47.0
217
+ fonttools==4.50.0
218
+ fqdn==1.5.1
219
+ frozendict==2.4.1
220
+ frozenlist==1.4.1
221
+ fsspec==2023.10.0
222
+ fsspec==2024.3.1
223
+ funcy==2.0
224
+ fury==0.10.0
225
+ future==1.0.0
226
+ fuzzywuzzy==0.18.0
227
+ gast==0.5.4
228
+ gatspy==0.3
229
+ gcsfs==2024.2.0
230
+ gensim==4.3.2
231
+ geographiclib==2.0
232
+ geojson==3.1.0
233
+ geopandas==0.14.3
234
+ geoplot==0.5.1
235
+ geopy==2.4.1
236
+ geoviews==1.11.1
237
+ ggplot==0.11.5
238
+ giddy==2.3.5
239
+ gitdb==4.0.11
240
+ google-ai-generativelanguage==0.4.0
241
+ google-api-core==2.11.1
242
+ google-api-core==2.18.0
243
+ google-api-python-client==2.125.0
244
+ google-apitools==0.5.31
245
+ google-auth-httplib2==0.2.0
246
+ google-auth-oauthlib==1.2.0
247
+ google-auth==2.26.1
248
+ google-cloud-aiplatform==0.6.0a1
249
+ google-cloud-artifact-registry==1.10.0
250
+ google-cloud-automl==1.0.1
251
+ google-cloud-bigquery==2.34.4
252
+ google-cloud-bigtable==1.7.3
253
+ google-cloud-core==2.4.1
254
+ google-cloud-datastore==2.19.0
255
+ google-cloud-dlp==3.14.0
256
+ google-cloud-jupyter-config==0.0.5
257
+ google-cloud-language==2.13.3
258
+ google-cloud-monitoring==2.18.0
259
+ google-cloud-pubsub==2.19.0
260
+ google-cloud-pubsublite==1.9.0
261
+ google-cloud-recommendations-ai==0.7.1
262
+ google-cloud-resource-manager==1.11.0
263
+ google-cloud-spanner==3.40.1
264
+ google-cloud-storage==1.44.0
265
+ google-cloud-translate==3.12.1
266
+ google-cloud-videointelligence==2.13.3
267
+ google-cloud-vision==2.8.0
268
+ google-crc32c==1.5.0
269
+ google-generativeai==0.4.1
270
+ google-pasta==0.2.0
271
+ google-resumable-media==2.7.0
272
+ googleapis-common-protos==1.62.0
273
+ gplearn==0.4.2
274
+ gpustat==1.0.0
275
+ gpxpy==1.6.2
276
+ graphviz==0.20.3
277
+ greenlet==3.0.3
278
+ grpc-google-iam-v1==0.12.7
279
+ grpcio-status==1.48.1
280
+ grpcio-status==1.48.2
281
+ grpcio==1.51.1
282
+ grpcio==1.60.0
283
+ gviz-api==1.10.0
284
+ gym-notices==0.0.8
285
+ gym==0.26.2
286
+ gymnasium==0.29.0
287
+ h11==0.14.0
288
+ h2o==3.46.0.1
289
+ h5netcdf==1.3.0
290
+ h5py==3.10.0
291
+ haversine==2.8.1
292
+ hdfs==2.7.3
293
+ hep-ml==0.7.2
294
+ hijri-converter==2.3.1
295
+ hmmlearn==0.3.2
296
+ holidays==0.24
297
+ holoviews==1.18.3
298
+ hpsklearn==0.1.0
299
+ html5lib==1.1
300
+ htmlmin==0.1.12
301
+ httpcore==1.0.5
302
+ httplib2==0.21.0
303
+ httptools==0.6.1
304
+ httpx==0.27.0
305
+ huggingface-hub==0.22.2
306
+ hunspell==0.5.5
307
+ hydra-slayer==0.5.0
308
+ hyperopt==0.2.7
309
+ hypertools==0.8.0
310
+ idna==3.6
311
+ igraph==0.11.4
312
+ imagecodecs==2024.1.1
313
+ imageio==2.33.1
314
+ imbalanced-learn==0.12.2
315
+ imgaug==0.4.0
316
+ importlib-metadata==6.11.0
317
+ importlib-metadata==7.0.1
318
+ importlib-resources==6.1.1
319
+ inequality==1.0.1
320
+ iniconfig==2.0.0
321
+ ipydatawidgets==4.3.5
322
+ ipykernel==6.28.0
323
+ ipyleaflet==0.18.2
324
+ ipympl==0.7.0
325
+ ipython-genutils==0.2.0
326
+ ipython-genutils==0.2.0
327
+ ipython-sql==0.5.0
328
+ ipython==8.20.0
329
+ ipyvolume==0.6.3
330
+ ipyvue==1.10.2
331
+ ipyvuetify==1.9.3
332
+ ipywebrtc==0.6.0
333
+ ipywidgets==7.7.1
334
+ isoduration==20.11.0
335
+ isort==5.13.2
336
+ isoweek==1.3.3
337
+ itsdangerous==2.1.2
338
+ jaraco.classes==3.3.0
339
+ jax-jumpy==1.0.0
340
+ jax==0.4.23
341
+ jaxlib==0.4.23.dev20240116
342
+ jedi==0.19.1
343
+ jeepney==0.8.0
344
+ jieba==0.42.1
345
+ jmespath==1.0.1
346
+ joblib==1.3.2
347
+ json5==0.9.14
348
+ jsonpatch==1.33
349
+ jsonpointer==2.4
350
+ jsonschema-specifications==2023.12.1
351
+ jsonschema==4.20.0
352
+ jupyter-console==6.6.3
353
+ jupyter-events==0.9.0
354
+ jupyter-http-over-ws==0.0.8
355
+ jupyter-lsp==1.5.1
356
+ jupyter-server-mathjax==0.2.6
357
+ jupyter-ydoc==0.2.5
358
+ jupyter_client==7.4.9
359
+ jupyter_client==8.6.0
360
+ jupyter_core==5.7.1
361
+ jupyter_server==2.13.0
362
+ jupyter_server_fileid==0.9.1
363
+ jupyter_server_proxy==4.1.0
364
+ jupyter_server_terminals==0.5.1
365
+ jupyter_server_ydoc==0.8.0
366
+ jupyterlab-lsp==5.1.0
367
+ jupyterlab-widgets==3.0.9
368
+ jupyterlab==4.1.5
369
+ jupyterlab_git==0.44.0
370
+ jupyterlab_pygments==0.3.0
371
+ jupyterlab_server==2.25.2
372
+ jupytext==1.16.0
373
+ kaggle-environments==1.14.3
374
+ kaggle==1.6.8
375
+ kagglehub==0.2.2
376
+ keras-cv==0.8.2
377
+ keras-nlp==0.8.2
378
+ keras-tuner==1.4.6
379
+ keras==3.1.1
380
+ kernels-mixer==0.0.7
381
+ keyring==24.3.0
382
+ keyrings.google-artifactregistry-auth==1.1.2
383
+ kfp-pipeline-spec==0.2.2
384
+ kfp-server-api==2.0.5
385
+ kfp==2.5.0
386
+ kiwisolver==1.4.5
387
+ kmapper==2.0.1
388
+ kmodes==0.12.2
389
+ korean-lunar-calendar==0.3.1
390
+ kornia==0.7.2
391
+ kornia_rs==0.1.3
392
+ kt-legacy==1.0.5
393
+ kubernetes==26.1.0
394
+ langcodes==3.3.0
395
+ langid==1.1.6
396
+ lazy_loader==0.3
397
+ learntools==0.3.4
398
+ leven==1.0.4
399
+ libclang==16.0.6
400
+ libmambapy==1.5.0
401
+ libpysal==4.9.2
402
+ librosa==0.10.1
403
+ lightgbm==4.2.0
404
+ lightning-utilities==0.11.2
405
+ lime==0.2.0.1
406
+ line-profiler==4.1.2
407
+ linkify-it-py==2.0.3
408
+ llvmlite==0.41.1
409
+ llvmlite==0.42.0
410
+ lml==0.1.0
411
+ locket==1.0.0
412
+ loguru==0.7.2
413
+ lxml==5.2.1
414
+ lz4==4.3.3
415
+ mamba==1.5.0
416
+ mapclassify==2.6.1
417
+ markdown-it-py==3.0.0
418
+ marshmallow==3.21.1
419
+ matplotlib-inline==0.1.6
420
+ matplotlib-venn==0.11.10
421
+ matplotlib==3.7.5
422
+ matplotlib==3.8.3
423
+ mccabe==0.7.0
424
+ mdit-py-plugins==0.4.0
425
+ mdurl==0.1.2
426
+ memory-profiler==0.61.0
427
+ menuinst==2.0.1
428
+ mercantile==1.2.1
429
+ mgwr==2.2.1
430
+ missingno==0.5.2
431
+ mistune==0.8.4
432
+ mizani==0.11.1
433
+ ml-dtypes==0.2.0
434
+ mlcrate==0.2.0
435
+ mlens==0.2.3
436
+ mlxtend==0.23.1
437
+ mne==1.6.1
438
+ mnist==0.2.2
439
+ momepy==0.7.0
440
+ more-itertools==10.2.0
441
+ mpld3==0.5.10
442
+ mpmath==1.3.0
443
+ msgpack==1.0.7
444
+ multidict==6.0.4
445
+ multimethod==1.10
446
+ multipledispatch==1.0.0
447
+ multiprocess==0.70.15
448
+ munkres==1.1.4
449
+ murmurhash==1.0.10
450
+ mypy-extensions==1.0.0
451
+ namex==0.0.7
452
+ nb-conda-kernels==2.3.1
453
+ nb_conda==2.2.1
454
+ nbclassic==1.0.0
455
+ nbclient==0.5.13
456
+ nbconvert==6.4.5
457
+ nbdime==3.2.0
458
+ nbformat==5.9.2
459
+ ndindex==1.8
460
+ nest-asyncio==1.5.8
461
+ networkx==3.2.1
462
+ nibabel==5.2.1
463
+ nilearn==0.10.3
464
+ ninja==1.11.1.1
465
+ nltk==3.2.4
466
+ nose==1.3.7
467
+ notebook==6.5.4
468
+ notebook==6.5.6
469
+ notebook_executor==0.2
470
+ notebook_shim==0.2.3
471
+ numba==0.58.1
472
+ numba==0.59.1
473
+ numexpr==2.10.0
474
+ numpy==1.26.4
475
+ nvidia-ml-py==11.495.46
476
+ nvtx==0.2.10
477
+ oauth2client==4.1.3
478
+ oauthlib==3.2.2
479
+ objsize==0.6.1
480
+ odfpy==1.4.1
481
+ olefile==0.47
482
+ onnx==1.16.0
483
+ opencensus-context==0.1.3
484
+ opencensus==0.11.4
485
+ opencv-contrib-python==4.9.0.80
486
+ opencv-python-headless==4.9.0.80
487
+ opencv-python==4.9.0.80
488
+ openpyxl==3.1.2
489
+ openslide-python==1.3.1
490
+ opentelemetry-api==1.22.0
491
+ opentelemetry-exporter-otlp-proto-common==1.22.0
492
+ opentelemetry-exporter-otlp-proto-grpc==1.22.0
493
+ opentelemetry-exporter-otlp-proto-http==1.22.0
494
+ opentelemetry-exporter-otlp==1.22.0
495
+ opentelemetry-proto==1.22.0
496
+ opentelemetry-sdk==1.22.0
497
+ opentelemetry-semantic-conventions==0.43b0
498
+ opt-einsum==3.3.0
499
+ optax==0.2.2
500
+ optree==0.11.0
501
+ optuna==3.6.1
502
+ orbax-checkpoint==0.5.7
503
+ ordered-set==4.1.0
504
+ orjson==3.9.10
505
+ ortools==9.4.1874
506
+ osmnx==1.9.2
507
+ overrides==7.4.0
508
+ packaging==21.3
509
+ pandas-datareader==0.10.0
510
+ pandas-profiling==3.6.6
511
+ pandas-summary==0.2.0
512
+ pandas==2.1.4
513
+ pandas==2.2.1
514
+ pandasql==0.7.3
515
+ pandocfilters==1.5.0
516
+ panel==1.3.8
517
+ papermill==2.5.0
518
+ param==2.1.0
519
+ parso==0.8.3
520
+ partd==1.4.1
521
+ path.py==12.5.0
522
+ path==16.10.0
523
+ pathos==0.3.2
524
+ pathy==0.10.3
525
+ patsy==0.5.6
526
+ pdf2image==1.17.0
527
+ peft==0.10.0
528
+ pettingzoo==1.24.0
529
+ pexpect==4.8.0
530
+ pexpect==4.9.0
531
+ phik==0.12.4
532
+ pickleshare==0.7.5
533
+ pillow==10.3.0
534
+ pip==23.3.2
535
+ pkgutil_resolve_name==1.3.10
536
+ platformdirs==4.2.0
537
+ plotly-express==0.4.1
538
+ plotly==5.18.0
539
+ plotnine==0.13.4
540
+ pluggy==1.4.0
541
+ pointpats==2.4.0
542
+ polars==0.20.18
543
+ polyglot==16.7.4
544
+ pooch==1.8.1
545
+ pox==0.3.4
546
+ ppca==0.0.4
547
+ ppft==1.7.6.8
548
+ preprocessing==0.1.13
549
+ preshed==3.0.9
550
+ prettytable==3.9.0
551
+ progressbar2==4.4.2
552
+ prometheus-client==0.19.0
553
+ promise==2.3
554
+ prompt-toolkit==3.0.42
555
+ prompt-toolkit==3.0.43
556
+ prophet==1.1.1
557
+ proto-plus==1.23.0
558
+ protobuf==3.20.3
559
+ protobuf==4.21.12
560
+ psutil==5.9.3
561
+ psutil==5.9.7
562
+ ptyprocess==0.7.0
563
+ pudb==2024.1
564
+ pure-eval==0.2.2
565
+ py-cpuinfo==9.0.0
566
+ py-spy==0.3.14
567
+ py4j==0.10.9.7
568
+ pyLDAvis==3.4.1
569
+ pyOpenSSL==23.3.0
570
+ pyaml==23.12.0
571
+ pyarrow-hotfix==0.6
572
+ pyarrow==15.0.2
573
+ pyasn1-modules==0.3.0
574
+ pyasn1==0.5.1
575
+ pybind11==2.12.0
576
+ pyclipper==1.3.0.post5
577
+ pycodestyle==2.11.1
578
+ pycosat==0.6.6
579
+ pycparser==2.21
580
+ pycryptodome==3.20.0
581
+ pyct==0.5.0
582
+ pycuda==2024.1
583
+ pydantic==2.5.3
584
+ pydantic==2.6.4
585
+ pydantic_core==2.14.6
586
+ pydantic_core==2.16.3
587
+ pydegensac==0.1.2
588
+ pydicom==2.4.4
589
+ pydocstyle==6.3.0
590
+ pydot==1.4.2
591
+ pydub==0.25.1
592
+ pyemd==1.0.0
593
+ pyerfa==2.0.1.1
594
+ pyexcel-io==0.6.6
595
+ pyexcel-ods==0.6.0
596
+ pyflakes==3.2.0
597
+ pygltflib==1.16.2
598
+ pykalman==0.9.7
599
+ pylibraft==23.8.0
600
+ pylint==3.1.0
601
+ pymc3==3.11.4
602
+ pymongo==3.13.0
603
+ pynndescent==0.5.12
604
+ pynvml==11.4.1
605
+ pynvrtc==9.2
606
+ pyparsing==3.1.1
607
+ pyparsing==3.1.2
608
+ pypdf==4.1.0
609
+ pyproj==3.6.1
610
+ pysal==24.1
611
+ pyshp==2.3.1
612
+ pytesseract==0.3.10
613
+ pytest==8.1.1
614
+ python-bidi==0.4.2
615
+ python-dateutil==2.9.0.post0
616
+ python-dotenv==1.0.0
617
+ python-json-logger==2.0.7
618
+ python-louvain==0.16
619
+ python-lsp-jsonrpc==1.1.2
620
+ python-lsp-server==1.11.0
621
+ python-slugify==8.0.4
622
+ python-utils==3.8.2
623
+ pythreejs==2.4.2
624
+ pytoolconfig==1.3.1
625
+ pytools==2024.1.1
626
+ pytorch-ignite==0.5.0.post2
627
+ pytorch-lightning==2.2.1
628
+ pytz==2023.3.post1
629
+ pytz==2024.1
630
+ pyu2f==0.1.5
631
+ pyviz_comms==3.0.2
632
+ pyzmq==24.0.1
633
+ pyzmq==25.1.2
634
+ qgrid==1.3.1
635
+ qtconsole==5.5.1
636
+ quantecon==0.7.2
637
+ qudida==0.0.4
638
+ raft-dask==23.8.0
639
+ rasterio==1.3.9
640
+ rasterstats==0.19.0
641
+ ray-cpp==2.9.0
642
+ ray==2.9.0
643
+ referencing==0.32.1
644
+ regex==2023.12.25
645
+ requests-oauthlib==1.3.1
646
+ requests-toolbelt==0.10.1
647
+ requests==2.31.0
648
+ retrying==1.3.3
649
+ retrying==1.3.4
650
+ rfc3339-validator==0.1.4
651
+ rfc3986-validator==0.1.1
652
+ rgf-python==3.12.0
653
+ rich-click==1.7.4
654
+ rich==13.7.0
655
+ rich==13.7.1
656
+ rmm==23.8.0
657
+ rope==1.13.0
658
+ rpds-py==0.16.2
659
+ rsa==4.9
660
+ ruamel-yaml-conda==0.15.100
661
+ ruamel.yaml.clib==0.2.7
662
+ ruamel.yaml==0.17.40
663
+ s2sphere==0.2.5
664
+ s3fs==2024.2.0
665
+ s3transfer==0.6.2
666
+ safetensors==0.4.2
667
+ scattertext==0.1.19
668
+ scikit-image==0.22.0
669
+ scikit-learn-intelex==2024.2.0
670
+ scikit-learn==1.2.2
671
+ scikit-multilearn==0.2.0
672
+ scikit-optimize==0.10.1
673
+ scikit-plot==0.3.7
674
+ scikit-surprise==1.1.3
675
+ scipy==1.11.4
676
+ scipy==1.12.0
677
+ seaborn==0.12.2
678
+ segment_anything==1.0
679
+ segregation==2.5
680
+ semver==3.0.2
681
+ sentencepiece==0.2.0
682
+ sentry-sdk==1.44.1
683
+ setproctitle==1.3.3
684
+ setuptools-git==1.2
685
+ setuptools-scm==8.0.4
686
+ setuptools==69.0.3
687
+ shap==0.44.1
688
+ shapely==2.0.3
689
+ shellingham==1.5.4
690
+ shtab==1.7.1
691
+ simpervisor==1.0.0
692
+ simplejson==3.19.2
693
+ six==1.16.0
694
+ sklearn-pandas==2.2.0
695
+ slicer==0.0.7
696
+ smart-open==6.4.0
697
+ smmap==5.0.1
698
+ sniffio==1.3.0
699
+ snowballstemmer==2.2.0
700
+ snuggs==1.4.7
701
+ sortedcontainers==2.4.0
702
+ soundfile==0.12.1
703
+ soupsieve==2.5
704
+ soxr==0.3.7
705
+ spacy-legacy==3.0.12
706
+ spacy-loggers==1.0.5
707
+ spacy==3.7.2
708
+ spaghetti==1.7.5.post1
709
+ spectral==0.23.1
710
+ spglm==1.1.0
711
+ sphinx-rtd-theme==0.2.4
712
+ spint==1.0.7
713
+ splot==1.1.5.post1
714
+ spopt==0.6.0
715
+ spreg==1.4.2
716
+ spvcm==0.3.0
717
+ sqlparse==0.4.4
718
+ squarify==0.4.3
719
+ srsly==2.4.8
720
+ stable-baselines3==2.1.0
721
+ stack-data==0.6.2
722
+ stack-data==0.6.3
723
+ stanio==0.5.0
724
+ starlette==0.32.0.post1
725
+ statsmodels==0.14.1
726
+ stemming==1.0.1
727
+ stop-words==2018.7.23
728
+ stopit==1.1.2
729
+ stumpy==1.12.0
730
+ sympy==1.12
731
+ tables==3.9.2
732
+ tabulate==0.9.0
733
+ tangled-up-in-unicode==0.2.0
734
+ tbb==2021.12.0
735
+ tblib==3.0.0
736
+ tenacity==8.2.3
737
+ tensorboard-data-server==0.7.2
738
+ tensorboard-plugin-profile==2.15.0
739
+ tensorboard==2.15.1
740
+ tensorboardX==2.6.2.2
741
+ tensorflow-cloud==0.1.16
742
+ tensorflow-datasets==4.9.4
743
+ tensorflow-decision-forests==1.8.1
744
+ tensorflow-estimator==2.15.0
745
+ tensorflow-hub==0.16.1
746
+ tensorflow-io-gcs-filesystem==0.35.0
747
+ tensorflow-io==0.35.0
748
+ tensorflow-metadata==0.14.0
749
+ tensorflow-probability==0.23.0
750
+ tensorflow-serving-api==2.14.1
751
+ tensorflow-text==2.15.0
752
+ tensorflow-transform==0.14.0
753
+ tensorflow==2.15.0
754
+ tensorstore==0.1.56
755
+ termcolor==2.4.0
756
+ terminado==0.18.0
757
+ testpath==0.6.0
758
+ text-unidecode==1.3
759
+ textblob==0.18.0.post0
760
+ texttable==1.7.0
761
+ tf_keras==2.15.1
762
+ tfp-nightly==0.24.0.dev0
763
+ thinc==8.2.2
764
+ threadpoolctl==3.2.0
765
+ tifffile==2023.12.9
766
+ timm==0.9.16
767
+ tinycss2==1.2.1
768
+ tobler==0.11.2
769
+ tokenizers==0.15.2
770
+ toml==0.10.2
771
+ tomli==2.0.1
772
+ tomlkit==0.12.4
773
+ toolz==0.12.1
774
+ torch==2.1.2
775
+ torchaudio==2.1.2
776
+ torchdata==0.7.1
777
+ torchinfo==1.8.0
778
+ torchmetrics==1.3.2
779
+ torchtext==0.16.2
780
+ torchvision==0.16.2
781
+ tornado==6.3.3
782
+ tqdm==4.66.1
783
+ traceml==1.0.8
784
+ traitlets==5.9.0
785
+ traittypes==0.2.1
786
+ transformers==4.39.3
787
+ treelite-runtime==3.2.0
788
+ treelite==3.2.0
789
+ trl==0.8.1
790
+ truststore==0.8.0
791
+ trx-python==0.2.9
792
+ tsfresh==0.20.2
793
+ typeguard==4.1.5
794
+ typer==0.9.0
795
+ typer==0.9.4
796
+ types-python-dateutil==2.8.19.20240106
797
+ typing-inspect==0.9.0
798
+ typing-utils==0.1.0
799
+ typing_extensions==4.9.0
800
+ tyro==0.8.3
801
+ tzdata==2023.4
802
+ uc-micro-py==1.0.3
803
+ ucx-py==0.33.0
804
+ ujson==5.9.0
805
+ umap-learn==0.5.5
806
+ unicodedata2==15.1.0
807
+ update-checker==0.18.0
808
+ uri-template==1.3.0
809
+ uritemplate==3.0.1
810
+ urllib3==1.26.18
811
+ urllib3==2.1.0
812
+ urwid==2.6.10
813
+ urwid_readline==0.14
814
+ uvicorn==0.25.0
815
+ uvloop==0.19.0
816
+ vaex-astro==0.9.3
817
+ vaex-core==4.17.1
818
+ vaex-hdf5==0.14.1
819
+ vaex-jupyter==0.8.2
820
+ vaex-ml==0.18.3
821
+ vaex-server==0.9.0
822
+ vaex-viz==0.5.4
823
+ vaex==4.17.0
824
+ vec_noise==1.1.4
825
+ vecstack==0.4.0
826
+ virtualenv==20.21.0
827
+ visions==0.7.5
828
+ vowpalwabbit==9.9.0
829
+ vtk==9.3.0
830
+ wandb==0.16.5
831
+ wasabi==1.1.2
832
+ watchfiles==0.21.0
833
+ wavio==0.0.8
834
+ wcwidth==0.2.13
835
+ weasel==0.3.4
836
+ webcolors==1.13
837
+ webencodings==0.5.1
838
+ websocket-client==1.7.0
839
+ websockets==12.0
840
+ wfdb==4.1.2
841
+ whatthepatch==1.0.5
842
+ wheel==0.42.0
843
+ widgetsnbextension==3.6.6
844
+ witwidget==1.8.1
845
+ woodwork==0.29.0
846
+ wordcloud==1.9.3
847
+ wordsegment==1.3.1
848
+ wrapt==1.14.1
849
+ xarray-einstats==0.7.0
850
+ xarray==2024.3.0
851
+ xgboost==2.0.3
852
+ xvfbwrapper==0.2.9
853
+ xxhash==3.4.1
854
+ xyzservices==2023.10.1
855
+ y-py==0.6.2
856
+ yapf==0.40.2
857
+ yarl==1.9.3
858
+ yarl==1.9.4
859
+ ydata-profiling==4.6.4
860
+ yellowbrick==1.5
861
+ ypy-websocket==0.8.4
862
+ zict==3.0.0
863
+ zipp==3.17.0
864
+ zstandard==0.22.0
wandb/run-20240410_010250-ft4a6i5j/files/wandb-metadata.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.15.133+-x86_64-with-glibc2.31",
3
+ "python": "3.10.13",
4
+ "heartbeatAt": "2024-04-10T01:02:51.154989",
5
+ "startedAt": "2024-04-10T01:02:50.454746",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [],
9
+ "state": "running",
10
+ "program": "kaggle.ipynb",
11
+ "codePathLocal": null,
12
+ "root": "/kaggle/working",
13
+ "host": "d91c9dc8354a",
14
+ "username": "root",
15
+ "executable": "/opt/conda/bin/python3.10",
16
+ "cpu_count": 2,
17
+ "cpu_count_logical": 4,
18
+ "cpu_freq": {
19
+ "current": 2000.156,
20
+ "min": 0.0,
21
+ "max": 0.0
22
+ },
23
+ "cpu_freq_per_core": [
24
+ {
25
+ "current": 2000.156,
26
+ "min": 0.0,
27
+ "max": 0.0
28
+ },
29
+ {
30
+ "current": 2000.156,
31
+ "min": 0.0,
32
+ "max": 0.0
33
+ },
34
+ {
35
+ "current": 2000.156,
36
+ "min": 0.0,
37
+ "max": 0.0
38
+ },
39
+ {
40
+ "current": 2000.156,
41
+ "min": 0.0,
42
+ "max": 0.0
43
+ }
44
+ ],
45
+ "disk": {
46
+ "/": {
47
+ "total": 8062.387607574463,
48
+ "used": 5569.839107513428
49
+ }
50
+ },
51
+ "gpu": "Tesla T4",
52
+ "gpu_count": 2,
53
+ "gpu_devices": [
54
+ {
55
+ "name": "Tesla T4",
56
+ "memory_total": 16106127360
57
+ },
58
+ {
59
+ "name": "Tesla T4",
60
+ "memory_total": 16106127360
61
+ }
62
+ ],
63
+ "memory": {
64
+ "total": 31.357559204101562
65
+ }
66
+ }
wandb/run-20240410_010250-ft4a6i5j/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/loss": 2.7774, "train/grad_norm": 0.0, "train/learning_rate": 0.0, "train/epoch": 0.09, "train/global_step": 15000, "_timestamp": 1712747942.6924593, "_runtime": 36972.23140645027, "_step": 150, "train_runtime": 36973.9047, "train_samples_per_second": 3.246, "train_steps_per_second": 0.406, "total_flos": 1.0729434332685926e+17, "train_loss": 2.775368127441406}
wandb/run-20240410_010250-ft4a6i5j/logs/debug-internal.log ADDED
The diff for this file is too large to render. See raw diff
 
wandb/run-20240410_010250-ft4a6i5j/logs/debug.log ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-10 01:02:50,456 INFO MainThread:483 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Configure stats pid to 483
3
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240410_010250-ft4a6i5j/logs/debug.log
10
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240410_010250-ft4a6i5j/logs/debug-internal.log
11
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x78e1476822c0>
12
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:init():567] calling init triggers
13
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
+ config: {}
15
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:init():617] starting backend
16
+ 2024-04-10 01:02:50,457 INFO MainThread:483 [wandb_init.py:init():621] setting up manager
17
+ 2024-04-10 01:02:50,459 INFO MainThread:483 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
+ 2024-04-10 01:02:50,460 INFO MainThread:483 [wandb_init.py:init():629] backend started and connected
19
+ 2024-04-10 01:02:50,473 INFO MainThread:483 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
+ 2024-04-10 01:02:50,810 INFO MainThread:483 [wandb_init.py:init():721] updated telemetry
21
+ 2024-04-10 01:02:50,813 INFO MainThread:483 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
+ 2024-04-10 01:02:51,049 INFO MainThread:483 [wandb_run.py:_on_init():2344] communicating current version
23
+ 2024-04-10 01:02:51,115 INFO MainThread:483 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
+
25
+ 2024-04-10 01:02:51,115 INFO MainThread:483 [wandb_init.py:init():805] starting run threads in backend
26
+ 2024-04-10 01:03:07,089 INFO MainThread:483 [wandb_run.py:_console_start():2323] atexit reg
27
+ 2024-04-10 01:03:07,089 INFO MainThread:483 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
+ 2024-04-10 01:03:07,091 INFO MainThread:483 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
+ 2024-04-10 01:03:07,091 INFO MainThread:483 [wandb_run.py:_redirect():2268] Redirects installed.
30
+ 2024-04-10 01:03:07,092 INFO MainThread:483 [wandb_init.py:init():848] run started, returning control to user process
31
+ 2024-04-10 01:03:07,097 INFO MainThread:483 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.39.3', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 7e-06, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.07, 'num_train_epochs': 5, 'max_steps': 15000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr10_01-00-47_d91c9dc8354a', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 100, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
wandb/run-20240410_010250-ft4a6i5j/run-ft4a6i5j.wandb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:015f1ac5c08588804f530daee10e9f99c469cafb39c8953c23a309447444a1ec
3
+ size 1183852