sanchit-gandhi HF staff commited on
Commit
0ee2684
1 Parent(s): 415a5f5

Model save

Browse files
.gitattributes CHANGED
@@ -66,3 +66,4 @@ wandb/run-20220303_033953-1eigbhyo/run-1eigbhyo.wandb filter=lfs diff=lfs merge=
66
  wandb/run-20220303_055131-e96a3lxb/run-e96a3lxb.wandb filter=lfs diff=lfs merge=lfs -text
67
  wandb/run-20220303_074415-2c9ds5of/run-2c9ds5of.wandb filter=lfs diff=lfs merge=lfs -text
68
  wandb/run-20220303_095952-ifpugwa3/run-ifpugwa3.wandb filter=lfs diff=lfs merge=lfs -text
 
66
  wandb/run-20220303_055131-e96a3lxb/run-e96a3lxb.wandb filter=lfs diff=lfs merge=lfs -text
67
  wandb/run-20220303_074415-2c9ds5of/run-2c9ds5of.wandb filter=lfs diff=lfs merge=lfs -text
68
  wandb/run-20220303_095952-ifpugwa3/run-ifpugwa3.wandb filter=lfs diff=lfs merge=lfs -text
69
+ wandb/run-20220303_115825-26v3sm6w/run-26v3sm6w.wandb filter=lfs diff=lfs merge=lfs -text
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f03d12c2ba383bf7a6285de068d3ac929f8c534d72415c3ef1a8984c6bb1152
3
  size 3210531882
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6dab9de1d4906e3e1475cd4f620b23d27ce0253d84a75d5fd043423ba8a16ad7
3
  size 3210531882
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dae94a8146b9b563f6fde123bb464956f024899886a10d9ef878bc6e4249ff76
3
  size 3119
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1113dc5cad66ffaca9b47be3e59d9904c7521237288f2e192fe839cf04d21b0c
3
  size 3119
wandb/debug-internal.log CHANGED
@@ -1 +1 @@
1
- run-20220303_095952-ifpugwa3/logs/debug-internal.log
1
+ run-20220303_115825-26v3sm6w/logs/debug-internal.log
wandb/debug.log CHANGED
@@ -1 +1 @@
1
- run-20220303_095952-ifpugwa3/logs/debug.log
1
+ run-20220303_115825-26v3sm6w/logs/debug.log
wandb/latest-run CHANGED
@@ -1 +1 @@
1
- run-20220303_095952-ifpugwa3
1
+ run-20220303_115825-26v3sm6w
wandb/run-20220303_095952-ifpugwa3/files/config.yaml CHANGED
@@ -10673,7 +10673,14 @@ _wandb:
10673
  - 1
10674
  - 5
10675
  - 11
 
 
 
 
 
10676
  3:
 
 
10677
  - 13
10678
  4: 3.9.5
10679
  5: 0.12.10
10673
  - 1
10674
  - 5
10675
  - 11
10676
+ 2:
10677
+ - 1
10678
+ - 5
10679
+ - 11
10680
+ - 12
10681
  3:
10682
+ - 1
10683
+ - 7
10684
  - 13
10685
  4: 3.9.5
10686
  5: 0.12.10
wandb/run-20220303_095952-ifpugwa3/files/output.log CHANGED
@@ -3328,3 +3328,10 @@ Upload file wandb/run-20220303_095952-ifpugwa3/run-ifpugwa3.wandb: 43%|██
3328
  eval_samples_per_second = 2.842
3329
  eval_steps_per_second = 0.238
3330
  [INFO|modeling_utils.py:1081] 2022-03-03 11:56:15,702 >> Model weights saved in ./pytorch_model.bin:06<08:55, 2.45s/it] argument in `SpeechEncoderDecoderModel.forward` and have been ignored: input_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.ut_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.
 
 
 
 
 
 
 
3328
  eval_samples_per_second = 2.842
3329
  eval_steps_per_second = 0.238
3330
  [INFO|modeling_utils.py:1081] 2022-03-03 11:56:15,702 >> Model weights saved in ./pytorch_model.bin:06<08:55, 2.45s/it] argument in `SpeechEncoderDecoderModel.forward` and have been ignored: input_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.ut_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.
3331
+ Upload file wandb/run-20220303_095952-ifpugwa3/run-ifpugwa3.wandb: 0%| | 32.0k/35.4M [00:00<?, ?B/s] argument in `SpeechEncoderDecoderModel.forward` and have been ignored: input_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.ut_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.
3332
+ Upload file wandb/run-20220303_095952-ifpugwa3/run-ifpugwa3.wandb: 0%| | 32.0k/35.4M [00:00<?, ?B/s] argument in `SpeechEncoderDecoderModel.forward` and have been ignored: input_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.ut_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.
3333
+ 03/03/2022 11:56:44 - WARNING - huggingface_hub.repository - To https://huggingface.co/sanchit-gandhi/wav2vec2-gpt2-wandb-grid-search
3334
+ Upload file wandb/run-20220303_095952-ifpugwa3/run-ifpugwa3.wandb: 100%|███████████| 35.4M/35.4M [00:02<00:00, 18.5MB/s] argument in `SpeechEncoderDecoderModel.forward` and have been ignored: input_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.ut_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.
3335
+ return ModelInfo(**d)f.finetuned_from)formers/src/transformers/modelcard.py", line 611, in from_trainercard31, in mainule>ent in `SpeechEncoderDecoderModel.forward` and have been ignored: input_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.ut_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.
3336
+ return ModelInfo(**d)f.finetuned_from)formers/src/transformers/modelcard.py", line 611, in from_trainercard31, in mainule>ent in `SpeechEncoderDecoderModel.forward` and have been ignored: input_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.ut_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.
3337
+ return ModelInfo(**d)f.finetuned_from)formers/src/transformers/modelcard.py", line 611, in from_trainercard31, in mainule>ent in `SpeechEncoderDecoderModel.forward` and have been ignored: input_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.ut_length. If input_length are not expected by `SpeechEncoderDecoderModel.forward`, you can safely ignore this message.
wandb/run-20220303_095952-ifpugwa3/files/wandb-summary.json CHANGED
The diff for this file is too large to render. See raw diff
wandb/run-20220303_095952-ifpugwa3/logs/debug-internal.log CHANGED
@@ -9234,3 +9234,114 @@
9234
  2022-03-03 11:56:36,036 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: stop_status
9235
  2022-03-03 11:56:36,037 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: stop_status
9236
  2022-03-03 11:56:36,400 DEBUG SenderThread:275130 [sender.py:send():235] send: stats
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9234
  2022-03-03 11:56:36,036 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: stop_status
9235
  2022-03-03 11:56:36,037 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: stop_status
9236
  2022-03-03 11:56:36,400 DEBUG SenderThread:275130 [sender.py:send():235] send: stats
9237
+ 2022-03-03 11:56:44,371 INFO Thread-8 :275130 [dir_watcher.py:_on_file_modified():230] file/dir modified: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/output.log
9238
+ 2022-03-03 11:56:45,372 INFO Thread-8 :275130 [dir_watcher.py:_on_file_modified():230] file/dir modified: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/output.log
9239
+ 2022-03-03 11:56:46,372 INFO Thread-8 :275130 [dir_watcher.py:_on_file_modified():230] file/dir modified: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/output.log
9240
+ 2022-03-03 11:56:50,373 INFO Thread-8 :275130 [dir_watcher.py:_on_file_modified():230] file/dir modified: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/output.log
9241
+ 2022-03-03 11:56:51,071 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9242
+ 2022-03-03 11:56:51,071 DEBUG SenderThread:275130 [sender.py:send():235] send: telemetry
9243
+ 2022-03-03 11:56:51,071 DEBUG SenderThread:275130 [sender.py:send():235] send: exit
9244
+ 2022-03-03 11:56:51,071 INFO SenderThread:275130 [sender.py:send_exit():371] handling exit code: 1
9245
+ 2022-03-03 11:56:51,071 INFO SenderThread:275130 [sender.py:send_exit():373] handling runtime: 7017
9246
+ 2022-03-03 11:56:51,126 INFO SenderThread:275130 [sender.py:_save_file():944] saving file wandb-summary.json with policy end
9247
+ 2022-03-03 11:56:51,126 INFO SenderThread:275130 [sender.py:send_exit():379] send defer
9248
+ 2022-03-03 11:56:51,126 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9249
+ 2022-03-03 11:56:51,127 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9250
+ 2022-03-03 11:56:51,127 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 0
9251
+ 2022-03-03 11:56:51,127 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9252
+ 2022-03-03 11:56:51,127 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 0
9253
+ 2022-03-03 11:56:51,127 INFO SenderThread:275130 [sender.py:transition_state():392] send defer: 1
9254
+ 2022-03-03 11:56:51,127 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9255
+ 2022-03-03 11:56:51,127 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 1
9256
+ 2022-03-03 11:56:51,184 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9257
+ 2022-03-03 11:56:51,185 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 1
9258
+ 2022-03-03 11:56:51,185 INFO SenderThread:275130 [sender.py:transition_state():392] send defer: 2
9259
+ 2022-03-03 11:56:51,185 DEBUG SenderThread:275130 [sender.py:send():235] send: stats
9260
+ 2022-03-03 11:56:51,185 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9261
+ 2022-03-03 11:56:51,185 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 2
9262
+ 2022-03-03 11:56:51,186 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9263
+ 2022-03-03 11:56:51,186 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 2
9264
+ 2022-03-03 11:56:51,186 INFO SenderThread:275130 [sender.py:transition_state():392] send defer: 3
9265
+ 2022-03-03 11:56:51,186 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9266
+ 2022-03-03 11:56:51,186 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 3
9267
+ 2022-03-03 11:56:51,251 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9268
+ 2022-03-03 11:56:51,251 DEBUG SenderThread:275130 [sender.py:send():235] send: summary
9269
+ 2022-03-03 11:56:51,340 INFO SenderThread:275130 [sender.py:_save_file():944] saving file wandb-summary.json with policy end
9270
+ 2022-03-03 11:56:51,340 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9271
+ 2022-03-03 11:56:51,340 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 3
9272
+ 2022-03-03 11:56:51,340 INFO SenderThread:275130 [sender.py:transition_state():392] send defer: 4
9273
+ 2022-03-03 11:56:51,340 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9274
+ 2022-03-03 11:56:51,341 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9275
+ 2022-03-03 11:56:51,341 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 4
9276
+ 2022-03-03 11:56:51,341 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9277
+ 2022-03-03 11:56:51,341 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 4
9278
+ 2022-03-03 11:56:51,373 INFO Thread-8 :275130 [dir_watcher.py:_on_file_modified():230] file/dir modified: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/output.log
9279
+ 2022-03-03 11:56:51,374 INFO Thread-8 :275130 [dir_watcher.py:_on_file_modified():230] file/dir modified: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/wandb-summary.json
9280
+ 2022-03-03 11:56:51,442 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9281
+ 2022-03-03 11:56:52,213 INFO SenderThread:275130 [sender.py:transition_state():392] send defer: 5
9282
+ 2022-03-03 11:56:52,213 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9283
+ 2022-03-03 11:56:52,214 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9284
+ 2022-03-03 11:56:52,214 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 5
9285
+ 2022-03-03 11:56:52,214 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9286
+ 2022-03-03 11:56:52,215 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 5
9287
+ 2022-03-03 11:56:52,215 INFO SenderThread:275130 [dir_watcher.py:finish():283] shutting down directory watcher
9288
+ 2022-03-03 11:56:52,315 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9289
+ 2022-03-03 11:56:52,374 INFO Thread-8 :275130 [dir_watcher.py:_on_file_modified():230] file/dir modified: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/config.yaml
9290
+ 2022-03-03 11:56:52,374 INFO SenderThread:275130 [dir_watcher.py:finish():313] scan: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files
9291
+ 2022-03-03 11:56:52,374 INFO SenderThread:275130 [dir_watcher.py:finish():327] scan save: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/wandb-metadata.json wandb-metadata.json
9292
+ 2022-03-03 11:56:52,375 INFO SenderThread:275130 [dir_watcher.py:finish():327] scan save: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/output.log output.log
9293
+ 2022-03-03 11:56:52,375 INFO SenderThread:275130 [dir_watcher.py:finish():327] scan save: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/wandb-summary.json wandb-summary.json
9294
+ 2022-03-03 11:56:52,378 INFO SenderThread:275130 [dir_watcher.py:finish():327] scan save: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/requirements.txt requirements.txt
9295
+ 2022-03-03 11:56:52,380 INFO SenderThread:275130 [dir_watcher.py:finish():327] scan save: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/config.yaml config.yaml
9296
+ 2022-03-03 11:56:52,383 INFO SenderThread:275130 [sender.py:transition_state():392] send defer: 6
9297
+ 2022-03-03 11:56:52,384 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9298
+ 2022-03-03 11:56:52,384 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9299
+ 2022-03-03 11:56:52,385 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 6
9300
+ 2022-03-03 11:56:52,387 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9301
+ 2022-03-03 11:56:52,387 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 6
9302
+ 2022-03-03 11:56:52,387 INFO SenderThread:275130 [file_pusher.py:finish():177] shutting down file pusher
9303
+ 2022-03-03 11:56:52,486 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9304
+ 2022-03-03 11:56:52,486 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9305
+ 2022-03-03 11:56:52,588 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9306
+ 2022-03-03 11:56:52,588 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9307
+ 2022-03-03 11:56:52,676 INFO Thread-14 :275130 [upload_job.py:push():137] Uploaded file /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/requirements.txt
9308
+ 2022-03-03 11:56:52,686 INFO Thread-15 :275130 [upload_job.py:push():137] Uploaded file /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/config.yaml
9309
+ 2022-03-03 11:56:52,689 INFO Thread-12 :275130 [upload_job.py:push():137] Uploaded file /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/output.log
9310
+ 2022-03-03 11:56:52,690 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9311
+ 2022-03-03 11:56:52,690 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9312
+ 2022-03-03 11:56:52,749 INFO Thread-13 :275130 [upload_job.py:push():137] Uploaded file /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/files/wandb-summary.json
9313
+ 2022-03-03 11:56:52,792 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9314
+ 2022-03-03 11:56:52,792 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9315
+ 2022-03-03 11:56:52,893 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9316
+ 2022-03-03 11:56:52,894 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9317
+ 2022-03-03 11:56:52,950 INFO Thread-7 :275130 [sender.py:transition_state():392] send defer: 7
9318
+ 2022-03-03 11:56:52,951 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9319
+ 2022-03-03 11:56:52,951 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 7
9320
+ 2022-03-03 11:56:52,951 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9321
+ 2022-03-03 11:56:52,951 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 7
9322
+ 2022-03-03 11:56:52,995 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9323
+ 2022-03-03 11:56:54,218 INFO SenderThread:275130 [sender.py:transition_state():392] send defer: 8
9324
+ 2022-03-03 11:56:54,218 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9325
+ 2022-03-03 11:56:54,219 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9326
+ 2022-03-03 11:56:54,219 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 8
9327
+ 2022-03-03 11:56:54,219 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9328
+ 2022-03-03 11:56:54,219 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 8
9329
+ 2022-03-03 11:56:54,219 INFO SenderThread:275130 [sender.py:transition_state():392] send defer: 9
9330
+ 2022-03-03 11:56:54,221 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: defer
9331
+ 2022-03-03 11:56:54,221 INFO HandlerThread:275130 [handler.py:handle_request_defer():154] handle defer: 9
9332
+ 2022-03-03 11:56:54,221 DEBUG SenderThread:275130 [sender.py:send():235] send: final
9333
+ 2022-03-03 11:56:54,222 DEBUG SenderThread:275130 [sender.py:send():235] send: footer
9334
+ 2022-03-03 11:56:54,222 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: defer
9335
+ 2022-03-03 11:56:54,222 INFO SenderThread:275130 [sender.py:send_request_defer():388] handle sender defer: 9
9336
+ 2022-03-03 11:56:54,320 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: poll_exit
9337
+ 2022-03-03 11:56:54,320 DEBUG SenderThread:275130 [sender.py:send_request():249] send_request: poll_exit
9338
+ 2022-03-03 11:56:54,320 INFO SenderThread:275130 [file_pusher.py:join():182] waiting for file pusher
9339
+ 2022-03-03 11:56:54,377 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: get_summary
9340
+ 2022-03-03 11:56:54,484 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: sampled_history
9341
+ 2022-03-03 11:56:54,487 DEBUG HandlerThread:275130 [handler.py:handle_request():131] handle_request: shutdown
9342
+ 2022-03-03 11:56:54,488 INFO HandlerThread:275130 [handler.py:finish():739] shutting down handler
9343
+ 2022-03-03 11:56:55,221 INFO WriterThread:275130 [datastore.py:close():281] close: /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_095952-ifpugwa3/run-ifpugwa3.wandb
9344
+ 2022-03-03 11:56:55,376 INFO SenderThread:275130 [sender.py:finish():1075] shutting down sender
9345
+ 2022-03-03 11:56:55,376 INFO SenderThread:275130 [file_pusher.py:finish():177] shutting down file pusher
9346
+ 2022-03-03 11:56:55,376 INFO SenderThread:275130 [file_pusher.py:join():182] waiting for file pusher
9347
+ 2022-03-03 11:56:55,383 INFO MainThread:275130 [internal.py:handle_exit():79] Internal process exited
wandb/run-20220303_095952-ifpugwa3/logs/debug.log CHANGED
@@ -25,3 +25,101 @@ config: {}
25
  2022-03-03 09:59:53,517 INFO MainThread:275028 [wandb_init.py:init():651] run started, returning control to user process
26
  2022-03-03 09:59:53,520 INFO MainThread:275028 [wandb_run.py:_config_callback():966] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'torch.float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 50, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['SpeechEncoderDecoderModel'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': None, 'pad_token_id': 50256, 'eos_token_id': 50256, 'sep_token_id': None, 'decoder_start_token_id': 50256, 'task_specific_params': None, 'problem_type': None, '_name_or_path': './', 'transformers_version': None, 'decoder': {'vocab_size': 50257, 'n_positions': 1024, 'n_embd': 1024, 'n_layer': 24, 'n_head': 16, 'n_inner': None, 'activation_function': 'gelu_new', 'resid_pdrop': 0.0, 'embd_pdrop': 0.0, 'attn_pdrop': 0.0, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'summary_type': 'cls_index', 'summary_use_proj': True, 'summary_activation': None, 'summary_first_dropout': 0.0, 'summary_proj_to_labels': True, 'scale_attn_weights': True, 'use_cache': False, 'scale_attn_by_inverse_layer_idx': False, 'reorder_and_upcast_attn': False, 'bos_token_id': 50256, 'eos_token_id': 50256, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': None, 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': True, 'cross_attention_hidden_size': None, 'add_cross_attention': True, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['GPT2LMHeadModel'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': None, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': {'text-generation': {'do_sample': True, 'max_length': 50}}, 'problem_type': None, '_name_or_path': 'gpt2-medium', 'transformers_version': '4.17.0.dev0', 'n_ctx': 1024, 'n_special': 0, 'predict_special_tokens': True, 'model_type': 'gpt2'}, 'encoder': {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': None, 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 0, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-large-lv60', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'gradient_checkpointing': False, 'hidden_dropout_prob': 0.0, 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 32, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.0, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'sum', 'ctc_zero_infinity': False, 'add_adapter': True, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'model_type': 'wav2vec2'}, 'model_type': 'speech-encoder-decoder', 'processor_class': 'Wav2Vec2Processor', 'use_cache': False, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 12, 'per_device_eval_batch_size': 12, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 4, 'eval_accumulation_steps': 'None', 'learning_rate': 0.001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 1.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Mar03_09-59-07_sanchit--v100', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 1, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 1, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': False, 'metric_for_best_model': 'None', 'greater_is_better': 'None', 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 40, 'generation_num_beams': 1, 'train_batch_size': 12, 'eval_batch_size': 12}
27
  2022-03-03 09:59:53,523 INFO MainThread:275028 [wandb_watch.py:watch():43] Watching
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  2022-03-03 09:59:53,517 INFO MainThread:275028 [wandb_init.py:init():651] run started, returning control to user process
26
  2022-03-03 09:59:53,520 INFO MainThread:275028 [wandb_run.py:_config_callback():966] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'torch.float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 50, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['SpeechEncoderDecoderModel'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': None, 'pad_token_id': 50256, 'eos_token_id': 50256, 'sep_token_id': None, 'decoder_start_token_id': 50256, 'task_specific_params': None, 'problem_type': None, '_name_or_path': './', 'transformers_version': None, 'decoder': {'vocab_size': 50257, 'n_positions': 1024, 'n_embd': 1024, 'n_layer': 24, 'n_head': 16, 'n_inner': None, 'activation_function': 'gelu_new', 'resid_pdrop': 0.0, 'embd_pdrop': 0.0, 'attn_pdrop': 0.0, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'summary_type': 'cls_index', 'summary_use_proj': True, 'summary_activation': None, 'summary_first_dropout': 0.0, 'summary_proj_to_labels': True, 'scale_attn_weights': True, 'use_cache': False, 'scale_attn_by_inverse_layer_idx': False, 'reorder_and_upcast_attn': False, 'bos_token_id': 50256, 'eos_token_id': 50256, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': None, 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': True, 'cross_attention_hidden_size': None, 'add_cross_attention': True, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['GPT2LMHeadModel'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': None, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': {'text-generation': {'do_sample': True, 'max_length': 50}}, 'problem_type': None, '_name_or_path': 'gpt2-medium', 'transformers_version': '4.17.0.dev0', 'n_ctx': 1024, 'n_special': 0, 'predict_special_tokens': True, 'model_type': 'gpt2'}, 'encoder': {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': None, 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 0, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-large-lv60', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'gradient_checkpointing': False, 'hidden_dropout_prob': 0.0, 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 32, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.0, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'sum', 'ctc_zero_infinity': False, 'add_adapter': True, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'model_type': 'wav2vec2'}, 'model_type': 'speech-encoder-decoder', 'processor_class': 'Wav2Vec2Processor', 'use_cache': False, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 12, 'per_device_eval_batch_size': 12, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 4, 'eval_accumulation_steps': 'None', 'learning_rate': 0.001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 1.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Mar03_09-59-07_sanchit--v100', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 1, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 1, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': False, 'metric_for_best_model': 'None', 'greater_is_better': 'None', 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 40, 'generation_num_beams': 1, 'train_batch_size': 12, 'eval_batch_size': 12}
27
  2022-03-03 09:59:53,523 INFO MainThread:275028 [wandb_watch.py:watch():43] Watching
28
+ 2022-03-03 11:56:48,528 INFO MainThread:275028 [wandb_run.py:_atexit_cleanup():1797] got exitcode: 1
29
+ 2022-03-03 11:56:48,531 INFO MainThread:275028 [wandb_run.py:_restore():1769] restore
30
+ 2022-03-03 11:56:51,127 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
31
+ wandb_count: 1
32
+ }
33
+ pusher_stats {
34
+ uploaded_bytes: 2095
35
+ total_bytes: 2095
36
+ }
37
+
38
+ 2022-03-03 11:56:51,341 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
39
+ wandb_count: 1
40
+ }
41
+ pusher_stats {
42
+ uploaded_bytes: 2095
43
+ total_bytes: 2095
44
+ }
45
+
46
+ 2022-03-03 11:56:52,214 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
47
+ wandb_count: 1
48
+ }
49
+ pusher_stats {
50
+ uploaded_bytes: 2095
51
+ total_bytes: 2095
52
+ }
53
+
54
+ 2022-03-03 11:56:52,385 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
55
+ wandb_count: 5
56
+ }
57
+ pusher_stats {
58
+ uploaded_bytes: 2095
59
+ total_bytes: 3082555
60
+ }
61
+
62
+ 2022-03-03 11:56:52,487 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
63
+ wandb_count: 5
64
+ }
65
+ pusher_stats {
66
+ uploaded_bytes: 300261
67
+ total_bytes: 3082555
68
+ }
69
+
70
+ 2022-03-03 11:56:52,589 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
71
+ wandb_count: 5
72
+ }
73
+ pusher_stats {
74
+ uploaded_bytes: 3082555
75
+ total_bytes: 3082555
76
+ }
77
+
78
+ 2022-03-03 11:56:52,691 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
79
+ wandb_count: 5
80
+ }
81
+ pusher_stats {
82
+ uploaded_bytes: 3082555
83
+ total_bytes: 3082555
84
+ }
85
+
86
+ 2022-03-03 11:56:52,792 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
87
+ wandb_count: 5
88
+ }
89
+ pusher_stats {
90
+ uploaded_bytes: 3082555
91
+ total_bytes: 3082555
92
+ }
93
+
94
+ 2022-03-03 11:56:52,894 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
95
+ wandb_count: 5
96
+ }
97
+ pusher_stats {
98
+ uploaded_bytes: 3082555
99
+ total_bytes: 3082555
100
+ }
101
+
102
+ 2022-03-03 11:56:54,219 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: file_counts {
103
+ wandb_count: 5
104
+ }
105
+ pusher_stats {
106
+ uploaded_bytes: 3082555
107
+ total_bytes: 3082555
108
+ }
109
+
110
+ 2022-03-03 11:56:54,376 INFO MainThread:275028 [wandb_run.py:_wait_for_finish():1929] got exit ret: done: true
111
+ exit_result {
112
+ }
113
+ file_counts {
114
+ wandb_count: 5
115
+ }
116
+ pusher_stats {
117
+ uploaded_bytes: 3082555
118
+ total_bytes: 3082555
119
+ }
120
+ local_info {
121
+ }
122
+
123
+ 2022-03-03 11:56:55,520 INFO MainThread:275028 [wandb_run.py:_append_history():2144] rendering history
124
+ 2022-03-03 11:56:55,521 INFO MainThread:275028 [wandb_run.py:_append_summary():2102] rendering summary
125
+ 2022-03-03 11:56:55,522 INFO MainThread:275028 [wandb_run.py:_append_files():2194] logging synced files
wandb/run-20220303_095952-ifpugwa3/run-ifpugwa3.wandb CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4a67bd625fba5a9497132fcd7eef59284c453db52b621dfe437e7e239735d5e0
3
- size 37101743
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bc05517ed078b88ef97444eb89976b9fb37fe1262f4fc47ae86992ec3a640d1
3
+ size 38700779
wandb/run-20220303_115825-26v3sm6w/files/config.yaml ADDED
The diff for this file is too large to render. See raw diff
wandb/run-20220303_115825-26v3sm6w/files/output.log ADDED
The diff for this file is too large to render. See raw diff
wandb/run-20220303_115825-26v3sm6w/files/requirements.txt ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ absl-py==1.0.0
2
+ aiohttp==3.8.1
3
+ aiosignal==1.2.0
4
+ anyio==3.5.0
5
+ appdirs==1.4.4
6
+ argon2-cffi-bindings==21.2.0
7
+ argon2-cffi==21.3.0
8
+ asttokens==2.0.5
9
+ async-timeout==4.0.2
10
+ attrs==21.4.0
11
+ audioread==2.1.9
12
+ babel==2.9.1
13
+ backcall==0.2.0
14
+ bitsandbytes-cuda113==0.26.0
15
+ black==22.1.0
16
+ bleach==4.1.0
17
+ cachetools==5.0.0
18
+ certifi==2021.10.8
19
+ cffi==1.15.0
20
+ charset-normalizer==2.0.11
21
+ chex==0.1.0
22
+ click==8.0.3
23
+ clldutils==3.10.1
24
+ colorlog==6.6.0
25
+ csvw==1.11.0
26
+ cycler==0.11.0
27
+ datasets==1.18.3
28
+ debugpy==1.5.1
29
+ decorator==5.1.1
30
+ defusedxml==0.7.1
31
+ dill==0.3.4
32
+ dlinfo==1.2.1
33
+ dm-tree==0.1.6
34
+ docker-pycreds==0.4.0
35
+ entrypoints==0.4
36
+ executing==0.8.2
37
+ filelock==3.4.2
38
+ flatbuffers==2.0
39
+ flax==0.4.0
40
+ fonttools==4.29.1
41
+ frozenlist==1.3.0
42
+ fsspec==2022.1.0
43
+ gitdb==4.0.9
44
+ gitpython==3.1.27
45
+ google-auth-oauthlib==0.4.6
46
+ google-auth==2.6.0
47
+ grpcio==1.43.0
48
+ huggingface-hub==0.4.0
49
+ hypothesis==6.36.1
50
+ idna==3.3
51
+ importlib-metadata==4.10.1
52
+ ipykernel==6.8.0
53
+ ipython-genutils==0.2.0
54
+ ipython==8.0.1
55
+ ipywidgets==7.6.5
56
+ isodate==0.6.1
57
+ jax==0.2.28
58
+ jaxlib==0.1.76+cuda11.cudnn82
59
+ jedi==0.18.1
60
+ jinja2==3.0.3
61
+ jiwer==2.3.0
62
+ joblib==1.1.0
63
+ json5==0.9.6
64
+ jsonschema==4.4.0
65
+ jupyter-client==7.1.2
66
+ jupyter-console==6.4.0
67
+ jupyter-core==4.9.1
68
+ jupyter-server==1.13.5
69
+ jupyter==1.0.0
70
+ jupyterlab-pygments==0.1.2
71
+ jupyterlab-server==2.10.3
72
+ jupyterlab-widgets==1.0.2
73
+ jupyterlab==3.2.9
74
+ kiwisolver==1.3.2
75
+ librosa==0.8.1
76
+ llvmlite==0.38.0
77
+ markdown==3.3.6
78
+ markupsafe==2.0.1
79
+ matplotlib-inline==0.1.3
80
+ matplotlib==3.5.1
81
+ mistune==0.8.4
82
+ msgpack==1.0.3
83
+ multidict==6.0.2
84
+ multiprocess==0.70.12.2
85
+ mypy-extensions==0.4.3
86
+ nbclassic==0.3.5
87
+ nbclient==0.5.10
88
+ nbconvert==6.4.1
89
+ nbformat==5.1.3
90
+ nest-asyncio==1.5.4
91
+ notebook==6.4.8
92
+ numba==0.55.1
93
+ numpy==1.21.5
94
+ oauthlib==3.2.0
95
+ opt-einsum==3.3.0
96
+ optax==0.1.0
97
+ packaging==21.3
98
+ pandas==1.4.0
99
+ pandocfilters==1.5.0
100
+ parso==0.8.3
101
+ pathspec==0.9.0
102
+ pathtools==0.1.2
103
+ pexpect==4.8.0
104
+ phonemizer==3.0.1
105
+ pickleshare==0.7.5
106
+ pillow==9.0.0
107
+ pip==22.0.2
108
+ pkg-resources==0.0.0
109
+ platformdirs==2.4.1
110
+ pooch==1.6.0
111
+ prometheus-client==0.13.1
112
+ promise==2.3
113
+ prompt-toolkit==3.0.26
114
+ protobuf==3.19.4
115
+ psutil==5.9.0
116
+ ptyprocess==0.7.0
117
+ pure-eval==0.2.2
118
+ pyarrow==6.0.1
119
+ pyasn1-modules==0.2.8
120
+ pyasn1==0.4.8
121
+ pycparser==2.21
122
+ pyctcdecode==0.3.0
123
+ pygments==2.11.2
124
+ pygtrie==2.4.2
125
+ pyparsing==3.0.7
126
+ pyrsistent==0.18.1
127
+ python-dateutil==2.8.2
128
+ python-levenshtein==0.12.2
129
+ pytz==2021.3
130
+ pyyaml==6.0
131
+ pyzmq==22.3.0
132
+ qtconsole==5.2.2
133
+ qtpy==2.0.1
134
+ regex==2022.1.18
135
+ requests-oauthlib==1.3.1
136
+ requests==2.27.1
137
+ resampy==0.2.2
138
+ rfc3986==2.0.0
139
+ rsa==4.8
140
+ sacremoses==0.0.47
141
+ scikit-learn==1.0.2
142
+ scipy==1.7.3
143
+ segments==2.2.0
144
+ send2trash==1.8.0
145
+ sentry-sdk==1.5.6
146
+ setuptools==44.1.1
147
+ shortuuid==1.0.8
148
+ six==1.16.0
149
+ smmap==5.0.0
150
+ sniffio==1.2.0
151
+ sortedcontainers==2.4.0
152
+ soundfile==0.10.3.post1
153
+ stack-data==0.1.4
154
+ tabulate==0.8.9
155
+ tensorboard-data-server==0.6.1
156
+ tensorboard-plugin-wit==1.8.1
157
+ tensorboard==2.8.0
158
+ termcolor==1.1.0
159
+ terminado==0.13.1
160
+ testpath==0.5.0
161
+ threadpoolctl==3.1.0
162
+ tokenizers==0.11.4
163
+ tomli==2.0.0
164
+ toolz==0.11.2
165
+ torch==1.10.2+cu113
166
+ torchaudio==0.10.2+cu113
167
+ tornado==6.1
168
+ tqdm==4.62.3
169
+ traitlets==5.1.1
170
+ transformers==4.17.0.dev0
171
+ typing-extensions==3.10.0.2
172
+ uritemplate==4.1.1
173
+ urllib3==1.26.8
174
+ wandb==0.12.10
175
+ wcwidth==0.2.5
176
+ webencodings==0.5.1
177
+ websocket-client==1.2.3
178
+ werkzeug==2.0.2
179
+ wheel==0.37.1
180
+ widgetsnbextension==3.5.2
181
+ xxhash==2.0.2
182
+ yarl==1.7.2
183
+ yaspin==2.1.0
184
+ zipp==3.7.0
wandb/run-20220303_115825-26v3sm6w/files/wandb-metadata.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.11.0-1028-gcp-x86_64-with-glibc2.33",
3
+ "python": "3.9.5",
4
+ "heartbeatAt": "2022-03-03T11:58:27.126113",
5
+ "startedAt": "2022-03-03T11:58:25.970300",
6
+ "docker": null,
7
+ "gpu": "Tesla V100-SXM2-16GB",
8
+ "gpu_count": 2,
9
+ "cpu_count": 16,
10
+ "cuda": null,
11
+ "args": [
12
+ "--dataset_name=librispeech_asr",
13
+ "--model_name_or_path=./",
14
+ "--tokenizer_name=./",
15
+ "--dataset_config_name=clean",
16
+ "--train_split_name=train.100",
17
+ "--eval_split_name=validation",
18
+ "--output_dir=./",
19
+ "--preprocessing_num_workers=1",
20
+ "--length_column_name=input_length",
21
+ "--overwrite_output_dir",
22
+ "--num_train_epochs=1",
23
+ "--per_device_train_batch_size=12",
24
+ "--per_device_eval_batch_size=12",
25
+ "--gradient_accumulation_steps=8",
26
+ "--generation_max_length=40",
27
+ "--generation_num_beams=1",
28
+ "--learning_rate=1e-3",
29
+ "--warmup_steps=500",
30
+ "--evaluation_strategy=steps",
31
+ "--text_column_name=text",
32
+ "--save_steps=500",
33
+ "--eval_steps=500",
34
+ "--logging_steps=1",
35
+ "--save_total_limit=1",
36
+ "--freeze_feature_encoder",
37
+ "--gradient_checkpointing",
38
+ "--fp16",
39
+ "--group_by_length",
40
+ "--predict_with_generate",
41
+ "--do_lower_case",
42
+ "--do_train",
43
+ "--do_eval",
44
+ "--report_to=wandb",
45
+ "--push_to_hub",
46
+ "--use_auth_token"
47
+ ],
48
+ "state": "running",
49
+ "program": "/home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/run_speech_recognition_seq2seq.py",
50
+ "codePath": "run_speech_recognition_seq2seq.py",
51
+ "git": {
52
+ "remote": "https://huggingface.co/sanchit-gandhi/wav2vec2-gpt2-wandb-grid-search",
53
+ "commit": "415a5f59ec22bbb7c6110935186f2ec6a0047888"
54
+ },
55
+ "email": "sanchit@huggingface.co",
56
+ "root": "/home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search",
57
+ "host": "sanchit--v100",
58
+ "username": "sanchit_huggingface_co",
59
+ "executable": "/home/sanchit_huggingface_co/gcp/bin/python"
60
+ }
wandb/run-20220303_115825-26v3sm6w/files/wandb-summary.json ADDED
The diff for this file is too large to render. See raw diff
wandb/run-20220303_115825-26v3sm6w/logs/debug-internal.log ADDED
The diff for this file is too large to render. See raw diff
wandb/run-20220303_115825-26v3sm6w/logs/debug.log ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [wandb_setup.py:_flush():75] Loading settings from /home/sanchit_huggingface_co/.config/wandb/settings
2
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [wandb_setup.py:_flush():75] Loading settings from /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/settings
3
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [wandb_setup.py:_flush():75] Loading settings from environment variables: {}
4
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [wandb_setup.py:_flush():75] Inferring run settings from compute environment: {'program_relpath': 'run_speech_recognition_seq2seq.py', 'program': '/home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/run_speech_recognition_seq2seq.py'}
5
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [wandb_init.py:_log_setup():386] Logging user logs to /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_115825-26v3sm6w/logs/debug.log
6
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [wandb_init.py:_log_setup():387] Logging internal logs to /home/sanchit_huggingface_co/wav2vec2-gpt2-wandb-grid-search/wandb/run-20220303_115825-26v3sm6w/logs/debug-internal.log
7
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [wandb_init.py:init():420] calling init triggers
8
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [wandb_init.py:init():425] wandb.init called with sweep_config: {}
9
+ config: {}
10
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [wandb_init.py:init():471] starting backend
11
+ 2022-03-03 11:58:25,976 INFO MainThread:276265 [backend.py:_multiprocessing_setup():99] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
12
+ 2022-03-03 11:58:26,044 INFO MainThread:276265 [backend.py:ensure_launched():219] starting backend process...
13
+ 2022-03-03 11:58:26,108 INFO MainThread:276265 [backend.py:ensure_launched():224] started backend process with pid: 276399
14
+ 2022-03-03 11:58:26,111 INFO MainThread:276265 [wandb_init.py:init():480] backend started and connected
15
+ 2022-03-03 11:58:26,121 INFO MainThread:276265 [wandb_init.py:init():550] updated telemetry
16
+ 2022-03-03 11:58:26,272 INFO MainThread:276265 [wandb_init.py:init():581] communicating current version
17
+ 2022-03-03 11:58:27,005 INFO MainThread:276265 [wandb_init.py:init():586] got version response upgrade_message: "wandb version 0.12.11 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
18
+
19
+ 2022-03-03 11:58:27,005 INFO MainThread:276265 [wandb_init.py:init():596] communicating run to backend with 30 second timeout
20
+ 2022-03-03 11:58:27,119 INFO MainThread:276265 [wandb_init.py:init():624] starting run threads in backend
21
+ 2022-03-03 11:58:27,277 INFO MainThread:276265 [wandb_run.py:_console_start():1827] atexit reg
22
+ 2022-03-03 11:58:27,278 INFO MainThread:276265 [wandb_run.py:_redirect():1701] redirect: SettingsConsole.REDIRECT
23
+ 2022-03-03 11:58:27,278 INFO MainThread:276265 [wandb_run.py:_redirect():1706] Redirecting console.
24
+ 2022-03-03 11:58:27,280 INFO MainThread:276265 [wandb_run.py:_redirect():1762] Redirects installed.
25
+ 2022-03-03 11:58:27,280 INFO MainThread:276265 [wandb_init.py:init():651] run started, returning control to user process
26
+ 2022-03-03 11:58:27,283 INFO MainThread:276265 [wandb_run.py:_config_callback():966] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'torch.float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 50, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['SpeechEncoderDecoderModel'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': None, 'pad_token_id': 50256, 'eos_token_id': 50256, 'sep_token_id': None, 'decoder_start_token_id': 50256, 'task_specific_params': None, 'problem_type': None, '_name_or_path': './', 'transformers_version': None, 'decoder': {'vocab_size': 50257, 'n_positions': 1024, 'n_embd': 1024, 'n_layer': 24, 'n_head': 16, 'n_inner': None, 'activation_function': 'gelu_new', 'resid_pdrop': 0.0, 'embd_pdrop': 0.0, 'attn_pdrop': 0.0, 'layer_norm_epsilon': 1e-05, 'initializer_range': 0.02, 'summary_type': 'cls_index', 'summary_use_proj': True, 'summary_activation': None, 'summary_first_dropout': 0.0, 'summary_proj_to_labels': True, 'scale_attn_weights': True, 'use_cache': False, 'scale_attn_by_inverse_layer_idx': False, 'reorder_and_upcast_attn': False, 'bos_token_id': 50256, 'eos_token_id': 50256, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': None, 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': True, 'cross_attention_hidden_size': None, 'add_cross_attention': True, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['GPT2LMHeadModel'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'pad_token_id': None, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': {'text-generation': {'do_sample': True, 'max_length': 50}}, 'problem_type': None, '_name_or_path': 'gpt2-medium', 'transformers_version': '4.17.0.dev0', 'n_ctx': 1024, 'n_special': 0, 'predict_special_tokens': True, 'model_type': 'gpt2'}, 'encoder': {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': None, 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 0, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-large-lv60', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'gradient_checkpointing': False, 'hidden_dropout_prob': 0.0, 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 32, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.0, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'sum', 'ctc_zero_infinity': False, 'add_adapter': True, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'model_type': 'wav2vec2'}, 'model_type': 'speech-encoder-decoder', 'processor_class': 'Wav2Vec2Processor', 'use_cache': False, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 12, 'per_device_eval_batch_size': 12, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 8, 'eval_accumulation_steps': 'None', 'learning_rate': 0.001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 1.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Mar03_11-57-42_sanchit--v100', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 1, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 1, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': False, 'metric_for_best_model': 'None', 'greater_is_better': 'None', 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 40, 'generation_num_beams': 1, 'train_batch_size': 12, 'eval_batch_size': 12}
27
+ 2022-03-03 11:58:27,286 INFO MainThread:276265 [wandb_watch.py:watch():43] Watching
wandb/run-20220303_115825-26v3sm6w/run-26v3sm6w.wandb ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c66f783fea294ee09c89dbda2ac6a46a36b0e30ecc4e74423686f84e4d52f0e
3
+ size 36461036