Plim commited on
Commit
ab9abf3
1 Parent(s): 1fb68dc

Model save

Browse files
.ipynb_checkpoints/config-checkpoint.json ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/wav2vec2-xls-r-300m",
3
+ "activation_dropout": 0.1,
4
+ "adapter_kernel_size": 3,
5
+ "adapter_stride": 2,
6
+ "add_adapter": false,
7
+ "apply_spec_augment": true,
8
+ "architectures": [
9
+ "Wav2Vec2ForCTC"
10
+ ],
11
+ "attention_dropout": 0.0,
12
+ "bos_token_id": 1,
13
+ "classifier_proj_size": 256,
14
+ "codevector_dim": 768,
15
+ "contrastive_logits_temperature": 0.1,
16
+ "conv_bias": true,
17
+ "conv_dim": [
18
+ 512,
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512
25
+ ],
26
+ "conv_kernel": [
27
+ 10,
28
+ 3,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 2,
33
+ 2
34
+ ],
35
+ "conv_stride": [
36
+ 5,
37
+ 2,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2
43
+ ],
44
+ "ctc_loss_reduction": "mean",
45
+ "ctc_zero_infinity": false,
46
+ "diversity_loss_weight": 0.1,
47
+ "do_stable_layer_norm": true,
48
+ "eos_token_id": 2,
49
+ "feat_extract_activation": "gelu",
50
+ "feat_extract_dropout": 0.0,
51
+ "feat_extract_norm": "layer",
52
+ "feat_proj_dropout": 0.0,
53
+ "feat_quantizer_dropout": 0.0,
54
+ "final_dropout": 0.0,
55
+ "hidden_act": "gelu",
56
+ "hidden_dropout": 0.0,
57
+ "hidden_size": 1024,
58
+ "initializer_range": 0.02,
59
+ "intermediate_size": 4096,
60
+ "layer_norm_eps": 1e-05,
61
+ "layerdrop": 0.0,
62
+ "mask_feature_length": 64,
63
+ "mask_feature_min_masks": 0,
64
+ "mask_feature_prob": 0.25,
65
+ "mask_time_length": 10,
66
+ "mask_time_min_masks": 2,
67
+ "mask_time_prob": 0.75,
68
+ "model_type": "wav2vec2",
69
+ "num_adapter_layers": 3,
70
+ "num_attention_heads": 16,
71
+ "num_codevector_groups": 2,
72
+ "num_codevectors_per_group": 320,
73
+ "num_conv_pos_embedding_groups": 16,
74
+ "num_conv_pos_embeddings": 128,
75
+ "num_feat_extract_layers": 7,
76
+ "num_hidden_layers": 24,
77
+ "num_negatives": 100,
78
+ "output_hidden_size": 1024,
79
+ "pad_token_id": 40,
80
+ "proj_codevector_dim": 768,
81
+ "tdnn_dilation": [
82
+ 1,
83
+ 2,
84
+ 3,
85
+ 1,
86
+ 1
87
+ ],
88
+ "tdnn_dim": [
89
+ 512,
90
+ 512,
91
+ 512,
92
+ 512,
93
+ 1500
94
+ ],
95
+ "tdnn_kernel": [
96
+ 5,
97
+ 3,
98
+ 3,
99
+ 1,
100
+ 1
101
+ ],
102
+ "torch_dtype": "float32",
103
+ "transformers_version": "4.17.0.dev0",
104
+ "use_weighted_layer_sum": false,
105
+ "vocab_size": 41,
106
+ "xvector_output_dim": 512
107
+ }
.ipynb_checkpoints/run-checkpoint.sh CHANGED
@@ -1,4 +1,4 @@
1
- WANDB_PROJECT=xls-r-300-fr
2
  python run_speech_recognition_ctc.py \
3
  --activation_dropout="0.1" \
4
  --dataset_name="mozilla-foundation/common_voice_7_0" \
@@ -23,7 +23,7 @@ python run_speech_recognition_ctc.py \
23
  --max_train_samples="1000" \
24
  --max_eval_samples="200" \
25
  --model_name_or_path="facebook/wav2vec2-xls-r-300m" \
26
- --num_train_epochs="0.2" \
27
  --output_dir="./" \
28
  --overwrite_output_dir \
29
  --per_device_train_batch_size="8" \
1
+ export WANDB_PROJECT="xls-r-300-fr"
2
  python run_speech_recognition_ctc.py \
3
  --activation_dropout="0.1" \
4
  --dataset_name="mozilla-foundation/common_voice_7_0" \
23
  --max_train_samples="1000" \
24
  --max_eval_samples="200" \
25
  --model_name_or_path="facebook/wav2vec2-xls-r-300m" \
26
+ --num_train_epochs="0.4" \
27
  --output_dir="./" \
28
  --overwrite_output_dir \
29
  --per_device_train_batch_size="8" \
.ipynb_checkpoints/vocab-checkpoint.json ADDED
@@ -0,0 +1 @@
 
1
+ {"'": 1, "a": 2, "b": 3, "c": 4, "d": 5, "e": 6, "f": 7, "g": 8, "h": 9, "i": 10, "j": 11, "k": 12, "l": 13, "m": 14, "n": 15, "o": 16, "p": 17, "q": 18, "r": 19, "s": 20, "t": 21, "u": 22, "v": 23, "w": 24, "x": 25, "y": 26, "z": 27, "à": 28, "â": 29, "ç": 30, "è": 31, "é": 32, "ê": 33, "î": 34, "ï": 35, "ô": 36, "ù": 37, "û": 38, "|": 0, "[UNK]": 39, "[PAD]": 40}
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b042f9e3e1a3adceae4bf7dbadeee3279b0dc94f7582075e5635e30c922536ed
3
  size 1262091761
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1d7bd7ffd2ed6a01faf9f143c79eb1c4a1e163dd1a62f92c26af28850511bcd
3
  size 1262091761
run.sh CHANGED
@@ -1,4 +1,4 @@
1
- WANDB_PROJECT=xls-r-300-fr
2
  python run_speech_recognition_ctc.py \
3
  --activation_dropout="0.1" \
4
  --dataset_name="mozilla-foundation/common_voice_7_0" \
@@ -23,7 +23,7 @@ python run_speech_recognition_ctc.py \
23
  --max_train_samples="1000" \
24
  --max_eval_samples="200" \
25
  --model_name_or_path="facebook/wav2vec2-xls-r-300m" \
26
- --num_train_epochs="0.2" \
27
  --output_dir="./" \
28
  --overwrite_output_dir \
29
  --per_device_train_batch_size="8" \
1
+ export WANDB_PROJECT="xls-r-300-fr"
2
  python run_speech_recognition_ctc.py \
3
  --activation_dropout="0.1" \
4
  --dataset_name="mozilla-foundation/common_voice_7_0" \
23
  --max_train_samples="1000" \
24
  --max_eval_samples="200" \
25
  --model_name_or_path="facebook/wav2vec2-xls-r-300m" \
26
+ --num_train_epochs="0.4" \
27
  --output_dir="./" \
28
  --overwrite_output_dir \
29
  --per_device_train_batch_size="8" \
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6e91b39976977c0a605521323547644a3699b13dbfd044f77607812c3c5cc2b4
3
  size 2991
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4f6b5e530f353910710ef95150e349ddf6f70545b8095619803d7da46090983
3
  size 2991
wandb/debug-internal.log CHANGED
@@ -1 +1 @@
1
- run-20220130_224738-2uzt3kt1/logs/debug-internal.log
1
+ run-20220130_230018-ktkg6ghu/logs/debug-internal.log
wandb/debug.log CHANGED
@@ -1 +1 @@
1
- run-20220130_224738-2uzt3kt1/logs/debug.log
1
+ run-20220130_230018-ktkg6ghu/logs/debug.log
wandb/latest-run CHANGED
@@ -1 +1 @@
1
- run-20220130_224738-2uzt3kt1
1
+ run-20220130_230018-ktkg6ghu
wandb/run-20220130_224738-2uzt3kt1/files/config.yaml CHANGED
@@ -69,7 +69,13 @@ _wandb:
69
  - 1
70
  - 5
71
  - 11
 
 
 
 
72
  3:
 
 
73
  - 13
74
  4: 3.8.8
75
  5: 0.12.9
69
  - 1
70
  - 5
71
  - 11
72
+ 2:
73
+ - 1
74
+ - 5
75
+ - 11
76
  3:
77
+ - 1
78
+ - 7
79
  - 13
80
  4: 3.8.8
81
  5: 0.12.9
wandb/run-20220130_224738-2uzt3kt1/files/output.log CHANGED
@@ -57,3 +57,10 @@ Configuration saved in ./preprocessor_config.json
57
  01/30/2022 22:49:41 - WARNING - huggingface_hub.repository - To https://huggingface.co/Plim/xls-r-300m-fr
58
  77260d3..45cb5d4 main -> main
59
  To https://huggingface.co/Plim/xls-r-300m-fr
 
 
 
 
 
 
 
57
  01/30/2022 22:49:41 - WARNING - huggingface_hub.repository - To https://huggingface.co/Plim/xls-r-300m-fr
58
  77260d3..45cb5d4 main -> main
59
  To https://huggingface.co/Plim/xls-r-300m-fr
60
+ 77260d3..45cb5d4 main -> main
61
+ Dropping the following result as it does not have all the necessary fields:
62
+ {}
63
+ 01/30/2022 22:49:47 - WARNING - huggingface_hub.repository - To https://huggingface.co/Plim/xls-r-300m-fr
64
+ 45cb5d4..1fb68dc main -> main
65
+ To https://huggingface.co/Plim/xls-r-300m-fr
66
+ 45cb5d4..1fb68dc main -> main
wandb/run-20220130_224738-2uzt3kt1/files/wandb-summary.json CHANGED
@@ -1 +1 @@
1
- {"train/train_runtime": 12.893, "train/train_samples_per_second": 15.512, "train/train_steps_per_second": 0.233, "train/total_flos": 2.67196543170048e+16, "train/train_loss": 12.496875762939453, "train/epoch": 0.19, "train/global_step": 3, "_runtime": 100, "_timestamp": 1643582958, "_step": 1, "eval/loss": 16.913198471069336, "eval/wer": 2.3629935179728934, "eval/runtime": 8.6705, "eval/samples_per_second": 23.067, "eval/steps_per_second": 2.883}
1
+ {"train/train_runtime": 12.893, "train/train_samples_per_second": 15.512, "train/train_steps_per_second": 0.233, "train/total_flos": 2.67196543170048e+16, "train/train_loss": 12.496875762939453, "train/epoch": 0.19, "train/global_step": 3, "_runtime": 100, "_timestamp": 1643582958, "_step": 1, "eval/loss": 16.913198471069336, "eval/wer": 2.3629935179728934, "eval/runtime": 8.6705, "eval/samples_per_second": 23.067, "eval/steps_per_second": 2.883, "_wandb": {"runtime": 133}}
wandb/run-20220130_224738-2uzt3kt1/logs/debug-internal.log CHANGED
@@ -92,3 +92,119 @@
92
  2022-01-30 22:49:41,814 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: stop_status
93
  2022-01-30 22:49:42,687 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/config.yaml
94
  2022-01-30 22:49:42,688 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
  2022-01-30 22:49:41,814 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: stop_status
93
  2022-01-30 22:49:42,687 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/config.yaml
94
  2022-01-30 22:49:42,688 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
95
+ 2022-01-30 22:49:46,690 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
96
+ 2022-01-30 22:49:48,691 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
97
+ 2022-01-30 22:49:52,753 DEBUG SenderThread:23196 [sender.py:send():234] send: telemetry
98
+ 2022-01-30 22:49:52,754 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
99
+ 2022-01-30 22:49:52,755 DEBUG SenderThread:23196 [sender.py:send():234] send: exit
100
+ 2022-01-30 22:49:52,755 INFO SenderThread:23196 [sender.py:send_exit():366] handling exit code: 0
101
+ 2022-01-30 22:49:52,756 INFO SenderThread:23196 [sender.py:send_exit():368] handling runtime: 133
102
+ 2022-01-30 22:49:52,757 INFO SenderThread:23196 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
103
+ 2022-01-30 22:49:52,757 INFO SenderThread:23196 [sender.py:send_exit():374] send defer
104
+ 2022-01-30 22:49:52,757 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
105
+ 2022-01-30 22:49:52,759 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
106
+ 2022-01-30 22:49:52,759 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 0
107
+ 2022-01-30 22:49:52,759 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
108
+ 2022-01-30 22:49:52,759 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 0
109
+ 2022-01-30 22:49:52,760 INFO SenderThread:23196 [sender.py:transition_state():387] send defer: 1
110
+ 2022-01-30 22:49:52,760 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
111
+ 2022-01-30 22:49:52,760 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 1
112
+ 2022-01-30 22:49:52,860 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
113
+ 2022-01-30 22:49:52,860 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 1
114
+ 2022-01-30 22:49:52,860 INFO SenderThread:23196 [sender.py:transition_state():387] send defer: 2
115
+ 2022-01-30 22:49:52,862 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
116
+ 2022-01-30 22:49:52,862 DEBUG SenderThread:23196 [sender.py:send():234] send: stats
117
+ 2022-01-30 22:49:52,863 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
118
+ 2022-01-30 22:49:52,864 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
119
+ 2022-01-30 22:49:52,864 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 2
120
+ 2022-01-30 22:49:52,865 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
121
+ 2022-01-30 22:49:52,865 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 2
122
+ 2022-01-30 22:49:52,865 INFO SenderThread:23196 [sender.py:transition_state():387] send defer: 3
123
+ 2022-01-30 22:49:52,866 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
124
+ 2022-01-30 22:49:52,866 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 3
125
+ 2022-01-30 22:49:52,869 DEBUG SenderThread:23196 [sender.py:send():234] send: summary
126
+ 2022-01-30 22:49:52,870 INFO SenderThread:23196 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
127
+ 2022-01-30 22:49:52,870 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
128
+ 2022-01-30 22:49:52,871 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 3
129
+ 2022-01-30 22:49:52,871 INFO SenderThread:23196 [sender.py:transition_state():387] send defer: 4
130
+ 2022-01-30 22:49:52,871 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
131
+ 2022-01-30 22:49:52,872 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 4
132
+ 2022-01-30 22:49:52,872 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
133
+ 2022-01-30 22:49:52,872 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 4
134
+ 2022-01-30 22:49:52,966 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
135
+ 2022-01-30 22:49:53,114 INFO SenderThread:23196 [sender.py:transition_state():387] send defer: 5
136
+ 2022-01-30 22:49:53,114 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
137
+ 2022-01-30 22:49:53,116 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
138
+ 2022-01-30 22:49:53,116 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 5
139
+ 2022-01-30 22:49:53,116 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
140
+ 2022-01-30 22:49:53,116 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 5
141
+ 2022-01-30 22:49:53,116 INFO SenderThread:23196 [dir_watcher.py:finish():283] shutting down directory watcher
142
+ 2022-01-30 22:49:53,218 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
143
+ 2022-01-30 22:49:53,695 INFO SenderThread:23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/config.yaml
144
+ 2022-01-30 22:49:53,696 INFO SenderThread:23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
145
+ 2022-01-30 22:49:53,696 INFO SenderThread:23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/wandb-summary.json
146
+ 2022-01-30 22:49:53,697 INFO SenderThread:23196 [dir_watcher.py:finish():313] scan: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files
147
+ 2022-01-30 22:49:53,697 INFO SenderThread:23196 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/conda-environment.yaml conda-environment.yaml
148
+ 2022-01-30 22:49:53,698 INFO SenderThread:23196 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/wandb-metadata.json wandb-metadata.json
149
+ 2022-01-30 22:49:53,698 INFO SenderThread:23196 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log output.log
150
+ 2022-01-30 22:49:53,698 INFO SenderThread:23196 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/requirements.txt requirements.txt
151
+ 2022-01-30 22:49:53,699 INFO SenderThread:23196 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/config.yaml config.yaml
152
+ 2022-01-30 22:49:53,700 INFO SenderThread:23196 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/wandb-summary.json wandb-summary.json
153
+ 2022-01-30 22:49:53,700 INFO SenderThread:23196 [sender.py:transition_state():387] send defer: 6
154
+ 2022-01-30 22:49:53,701 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
155
+ 2022-01-30 22:49:53,713 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
156
+ 2022-01-30 22:49:53,720 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 6
157
+ 2022-01-30 22:49:53,724 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
158
+ 2022-01-30 22:49:53,729 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 6
159
+ 2022-01-30 22:49:53,729 INFO SenderThread:23196 [file_pusher.py:finish():177] shutting down file pusher
160
+ 2022-01-30 22:49:53,804 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
161
+ 2022-01-30 22:49:53,804 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
162
+ 2022-01-30 22:49:53,909 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
163
+ 2022-01-30 22:49:53,910 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
164
+ 2022-01-30 22:49:54,015 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
165
+ 2022-01-30 22:49:54,016 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
166
+ 2022-01-30 22:49:54,119 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
167
+ 2022-01-30 22:49:54,119 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
168
+ 2022-01-30 22:49:54,223 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
169
+ 2022-01-30 22:49:54,223 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
170
+ 2022-01-30 22:49:54,253 INFO Thread-14 :23196 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/config.yaml
171
+ 2022-01-30 22:49:54,257 INFO Thread-13 :23196 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/requirements.txt
172
+ 2022-01-30 22:49:54,291 INFO Thread-15 :23196 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/wandb-summary.json
173
+ 2022-01-30 22:49:54,327 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
174
+ 2022-01-30 22:49:54,327 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
175
+ 2022-01-30 22:49:54,339 INFO Thread-12 :23196 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
176
+ 2022-01-30 22:49:54,431 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
177
+ 2022-01-30 22:49:54,431 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
178
+ 2022-01-30 22:49:54,534 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
179
+ 2022-01-30 22:49:54,535 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
180
+ 2022-01-30 22:49:54,540 INFO Thread-7 :23196 [sender.py:transition_state():387] send defer: 7
181
+ 2022-01-30 22:49:54,541 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
182
+ 2022-01-30 22:49:54,541 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 7
183
+ 2022-01-30 22:49:54,542 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
184
+ 2022-01-30 22:49:54,542 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 7
185
+ 2022-01-30 22:49:54,638 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
186
+ 2022-01-30 22:49:56,342 INFO SenderThread:23196 [sender.py:transition_state():387] send defer: 8
187
+ 2022-01-30 22:49:56,343 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
188
+ 2022-01-30 22:49:56,344 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
189
+ 2022-01-30 22:49:56,345 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 8
190
+ 2022-01-30 22:49:56,345 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
191
+ 2022-01-30 22:49:56,345 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 8
192
+ 2022-01-30 22:49:56,346 INFO SenderThread:23196 [sender.py:transition_state():387] send defer: 9
193
+ 2022-01-30 22:49:56,347 DEBUG SenderThread:23196 [sender.py:send():234] send: final
194
+ 2022-01-30 22:49:56,348 DEBUG SenderThread:23196 [sender.py:send():234] send: footer
195
+ 2022-01-30 22:49:56,348 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: defer
196
+ 2022-01-30 22:49:56,348 INFO HandlerThread:23196 [handler.py:handle_request_defer():147] handle defer: 9
197
+ 2022-01-30 22:49:56,349 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: defer
198
+ 2022-01-30 22:49:56,349 INFO SenderThread:23196 [sender.py:send_request_defer():383] handle sender defer: 9
199
+ 2022-01-30 22:49:56,447 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: poll_exit
200
+ 2022-01-30 22:49:56,447 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: poll_exit
201
+ 2022-01-30 22:49:56,448 INFO SenderThread:23196 [file_pusher.py:join():182] waiting for file pusher
202
+ 2022-01-30 22:49:56,761 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: get_summary
203
+ 2022-01-30 22:49:56,763 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: sampled_history
204
+ 2022-01-30 22:49:56,766 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: shutdown
205
+ 2022-01-30 22:49:56,766 INFO HandlerThread:23196 [handler.py:finish():731] shutting down handler
206
+ 2022-01-30 22:49:57,348 INFO WriterThread:23196 [datastore.py:close():281] close: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/run-2uzt3kt1.wandb
207
+ 2022-01-30 22:49:57,758 INFO SenderThread:23196 [sender.py:finish():1070] shutting down sender
208
+ 2022-01-30 22:49:57,759 INFO SenderThread:23196 [file_pusher.py:finish():177] shutting down file pusher
209
+ 2022-01-30 22:49:57,759 INFO SenderThread:23196 [file_pusher.py:join():182] waiting for file pusher
210
+ 2022-01-30 22:49:57,763 INFO MainThread:23196 [internal.py:handle_exit():77] Internal process exited
wandb/run-20220130_224738-2uzt3kt1/logs/debug.log CHANGED
@@ -22,3 +22,125 @@ config: {}
22
  2022-01-30 22:47:40,203 INFO MainThread:22602 [wandb_init.py:init():633] run started, returning control to user process
23
  2022-01-30 22:47:40,206 INFO MainThread:22602 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 40, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.1, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 41, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.75, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 64, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 8, 'eval_accumulation_steps': 'None', 'learning_rate': 7.5e-05, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 0.2, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2000, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Jan30_22-46-41_job-3261699b-76eb-4c28-8419-66a66c5c9199', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 3, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 8, 'eval_batch_size': 8}
24
  2022-01-30 22:47:40,212 INFO MainThread:22602 [wandb_watch.py:watch():43] Watching
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  2022-01-30 22:47:40,203 INFO MainThread:22602 [wandb_init.py:init():633] run started, returning control to user process
23
  2022-01-30 22:47:40,206 INFO MainThread:22602 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 40, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.1, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 41, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.75, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 64, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 8, 'eval_accumulation_steps': 'None', 'learning_rate': 7.5e-05, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 0.2, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2000, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Jan30_22-46-41_job-3261699b-76eb-4c28-8419-66a66c5c9199', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 3, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 8, 'eval_batch_size': 8}
24
  2022-01-30 22:47:40,212 INFO MainThread:22602 [wandb_watch.py:watch():43] Watching
25
+ 2022-01-30 22:49:50,144 INFO MainThread:22602 [wandb_run.py:_atexit_cleanup():1780] got exitcode: 0
26
+ 2022-01-30 22:49:50,148 INFO MainThread:22602 [wandb_run.py:_restore():1752] restore
27
+ 2022-01-30 22:49:52,759 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
28
+ wandb_count: 1
29
+ }
30
+ pusher_stats {
31
+ uploaded_bytes: 2099
32
+ total_bytes: 2099
33
+ }
34
+
35
+ 2022-01-30 22:49:52,864 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
36
+ wandb_count: 1
37
+ }
38
+ pusher_stats {
39
+ uploaded_bytes: 2099
40
+ total_bytes: 2099
41
+ }
42
+
43
+ 2022-01-30 22:49:53,116 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
44
+ wandb_count: 1
45
+ }
46
+ pusher_stats {
47
+ uploaded_bytes: 2099
48
+ total_bytes: 2099
49
+ }
50
+
51
+ 2022-01-30 22:49:53,702 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
52
+ wandb_count: 4
53
+ }
54
+ pusher_stats {
55
+ uploaded_bytes: 2099
56
+ total_bytes: 21650
57
+ }
58
+
59
+ 2022-01-30 22:49:53,806 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
60
+ wandb_count: 5
61
+ }
62
+ pusher_stats {
63
+ uploaded_bytes: 2099
64
+ total_bytes: 22128
65
+ }
66
+
67
+ 2022-01-30 22:49:53,914 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
68
+ wandb_count: 5
69
+ }
70
+ pusher_stats {
71
+ uploaded_bytes: 2099
72
+ total_bytes: 22128
73
+ }
74
+
75
+ 2022-01-30 22:49:54,017 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
76
+ wandb_count: 5
77
+ }
78
+ pusher_stats {
79
+ uploaded_bytes: 22128
80
+ total_bytes: 22128
81
+ }
82
+
83
+ 2022-01-30 22:49:54,121 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
84
+ wandb_count: 5
85
+ }
86
+ pusher_stats {
87
+ uploaded_bytes: 22128
88
+ total_bytes: 22128
89
+ }
90
+
91
+ 2022-01-30 22:49:54,225 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
92
+ wandb_count: 5
93
+ }
94
+ pusher_stats {
95
+ uploaded_bytes: 22128
96
+ total_bytes: 22128
97
+ }
98
+
99
+ 2022-01-30 22:49:54,329 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
100
+ wandb_count: 5
101
+ }
102
+ pusher_stats {
103
+ uploaded_bytes: 22128
104
+ total_bytes: 22128
105
+ }
106
+
107
+ 2022-01-30 22:49:54,433 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
108
+ wandb_count: 5
109
+ }
110
+ pusher_stats {
111
+ uploaded_bytes: 22128
112
+ total_bytes: 22128
113
+ }
114
+
115
+ 2022-01-30 22:49:54,537 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
116
+ wandb_count: 5
117
+ }
118
+ pusher_stats {
119
+ uploaded_bytes: 22128
120
+ total_bytes: 22128
121
+ }
122
+
123
+ 2022-01-30 22:49:56,345 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
124
+ wandb_count: 5
125
+ }
126
+ pusher_stats {
127
+ uploaded_bytes: 22128
128
+ total_bytes: 22128
129
+ }
130
+
131
+ 2022-01-30 22:49:56,759 INFO MainThread:22602 [wandb_run.py:_wait_for_finish():1912] got exit ret: done: true
132
+ exit_result {
133
+ }
134
+ file_counts {
135
+ wandb_count: 5
136
+ }
137
+ pusher_stats {
138
+ uploaded_bytes: 22128
139
+ total_bytes: 22128
140
+ }
141
+ local_info {
142
+ }
143
+
144
+ 2022-01-30 22:49:57,908 INFO MainThread:22602 [wandb_run.py:_append_history():2130] rendering history
145
+ 2022-01-30 22:49:57,909 INFO MainThread:22602 [wandb_run.py:_append_summary():2085] rendering summary
146
+ 2022-01-30 22:49:57,909 INFO MainThread:22602 [wandb_run.py:_append_files():2180] logging synced files
wandb/run-20220130_224738-2uzt3kt1/run-2uzt3kt1.wandb CHANGED
Binary files a/wandb/run-20220130_224738-2uzt3kt1/run-2uzt3kt1.wandb and b/wandb/run-20220130_224738-2uzt3kt1/run-2uzt3kt1.wandb differ
wandb/run-20220130_230018-ktkg6ghu/files/conda-environment.yaml ADDED
File without changes
wandb/run-20220130_230018-ktkg6ghu/files/config.yaml ADDED
@@ -0,0 +1,672 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _n_gpu:
4
+ desc: null
5
+ value: 1
6
+ _name_or_path:
7
+ desc: null
8
+ value: facebook/wav2vec2-xls-r-300m
9
+ _wandb:
10
+ desc: null
11
+ value:
12
+ cli_version: 0.12.9
13
+ framework: huggingface
14
+ huggingface_version: 4.17.0.dev0
15
+ is_jupyter_run: false
16
+ is_kaggle_kernel: false
17
+ m:
18
+ - 1: train/global_step
19
+ 6:
20
+ - 3
21
+ - 1: train/train_runtime
22
+ 5: 1
23
+ 6:
24
+ - 1
25
+ - 1: train/train_samples_per_second
26
+ 5: 1
27
+ 6:
28
+ - 1
29
+ - 1: train/train_steps_per_second
30
+ 5: 1
31
+ 6:
32
+ - 1
33
+ - 1: train/total_flos
34
+ 5: 1
35
+ 6:
36
+ - 1
37
+ - 1: train/train_loss
38
+ 5: 1
39
+ 6:
40
+ - 1
41
+ - 1: train/epoch
42
+ 5: 1
43
+ 6:
44
+ - 1
45
+ python_version: 3.8.8
46
+ start_time: 1643583619
47
+ t:
48
+ 1:
49
+ - 1
50
+ - 5
51
+ - 11
52
+ 3:
53
+ - 13
54
+ 4: 3.8.8
55
+ 5: 0.12.9
56
+ 6: 4.17.0.dev0
57
+ 8:
58
+ - 5
59
+ activation_dropout:
60
+ desc: null
61
+ value: 0.1
62
+ adafactor:
63
+ desc: null
64
+ value: false
65
+ adam_beta1:
66
+ desc: null
67
+ value: 0.9
68
+ adam_beta2:
69
+ desc: null
70
+ value: 0.999
71
+ adam_epsilon:
72
+ desc: null
73
+ value: 1.0e-08
74
+ adapter_kernel_size:
75
+ desc: null
76
+ value: 3
77
+ adapter_stride:
78
+ desc: null
79
+ value: 2
80
+ add_adapter:
81
+ desc: null
82
+ value: false
83
+ add_cross_attention:
84
+ desc: null
85
+ value: false
86
+ apply_spec_augment:
87
+ desc: null
88
+ value: true
89
+ architectures:
90
+ desc: null
91
+ value:
92
+ - Wav2Vec2ForPreTraining
93
+ attention_dropout:
94
+ desc: null
95
+ value: 0.0
96
+ bad_words_ids:
97
+ desc: null
98
+ value: null
99
+ bf16:
100
+ desc: null
101
+ value: false
102
+ bf16_full_eval:
103
+ desc: null
104
+ value: false
105
+ bos_token_id:
106
+ desc: null
107
+ value: 1
108
+ chunk_size_feed_forward:
109
+ desc: null
110
+ value: 0
111
+ classifier_proj_size:
112
+ desc: null
113
+ value: 256
114
+ codevector_dim:
115
+ desc: null
116
+ value: 768
117
+ contrastive_logits_temperature:
118
+ desc: null
119
+ value: 0.1
120
+ conv_bias:
121
+ desc: null
122
+ value: true
123
+ conv_dim:
124
+ desc: null
125
+ value:
126
+ - 512
127
+ - 512
128
+ - 512
129
+ - 512
130
+ - 512
131
+ - 512
132
+ - 512
133
+ conv_kernel:
134
+ desc: null
135
+ value:
136
+ - 10
137
+ - 3
138
+ - 3
139
+ - 3
140
+ - 3
141
+ - 2
142
+ - 2
143
+ conv_stride:
144
+ desc: null
145
+ value:
146
+ - 5
147
+ - 2
148
+ - 2
149
+ - 2
150
+ - 2
151
+ - 2
152
+ - 2
153
+ cross_attention_hidden_size:
154
+ desc: null
155
+ value: null
156
+ ctc_loss_reduction:
157
+ desc: null
158
+ value: mean
159
+ ctc_zero_infinity:
160
+ desc: null
161
+ value: false
162
+ dataloader_drop_last:
163
+ desc: null
164
+ value: false
165
+ dataloader_num_workers:
166
+ desc: null
167
+ value: 0
168
+ dataloader_pin_memory:
169
+ desc: null
170
+ value: true
171
+ ddp_bucket_cap_mb:
172
+ desc: null
173
+ value: None
174
+ ddp_find_unused_parameters:
175
+ desc: null
176
+ value: None
177
+ debug:
178
+ desc: null
179
+ value: '[]'
180
+ decoder_start_token_id:
181
+ desc: null
182
+ value: null
183
+ deepspeed:
184
+ desc: null
185
+ value: None
186
+ disable_tqdm:
187
+ desc: null
188
+ value: false
189
+ diversity_loss_weight:
190
+ desc: null
191
+ value: 0.1
192
+ diversity_penalty:
193
+ desc: null
194
+ value: 0.0
195
+ do_eval:
196
+ desc: null
197
+ value: true
198
+ do_predict:
199
+ desc: null
200
+ value: false
201
+ do_sample:
202
+ desc: null
203
+ value: false
204
+ do_stable_layer_norm:
205
+ desc: null
206
+ value: true
207
+ do_train:
208
+ desc: null
209
+ value: true
210
+ early_stopping:
211
+ desc: null
212
+ value: false
213
+ encoder_no_repeat_ngram_size:
214
+ desc: null
215
+ value: 0
216
+ eos_token_id:
217
+ desc: null
218
+ value: 2
219
+ eval_accumulation_steps:
220
+ desc: null
221
+ value: None
222
+ eval_batch_size:
223
+ desc: null
224
+ value: 8
225
+ eval_steps:
226
+ desc: null
227
+ value: 500
228
+ evaluation_strategy:
229
+ desc: null
230
+ value: steps
231
+ feat_extract_activation:
232
+ desc: null
233
+ value: gelu
234
+ feat_extract_dropout:
235
+ desc: null
236
+ value: 0.0
237
+ feat_extract_norm:
238
+ desc: null
239
+ value: layer
240
+ feat_proj_dropout:
241
+ desc: null
242
+ value: 0.0
243
+ feat_quantizer_dropout:
244
+ desc: null
245
+ value: 0.0
246
+ final_dropout:
247
+ desc: null
248
+ value: 0.0
249
+ finetuning_task:
250
+ desc: null
251
+ value: null
252
+ forced_bos_token_id:
253
+ desc: null
254
+ value: null
255
+ forced_eos_token_id:
256
+ desc: null
257
+ value: null
258
+ fp16:
259
+ desc: null
260
+ value: true
261
+ fp16_backend:
262
+ desc: null
263
+ value: auto
264
+ fp16_full_eval:
265
+ desc: null
266
+ value: false
267
+ fp16_opt_level:
268
+ desc: null
269
+ value: O1
270
+ gradient_accumulation_steps:
271
+ desc: null
272
+ value: 8
273
+ gradient_checkpointing:
274
+ desc: null
275
+ value: true
276
+ greater_is_better:
277
+ desc: null
278
+ value: false
279
+ group_by_length:
280
+ desc: null
281
+ value: true
282
+ half_precision_backend:
283
+ desc: null
284
+ value: amp
285
+ hidden_act:
286
+ desc: null
287
+ value: gelu
288
+ hidden_dropout:
289
+ desc: null
290
+ value: 0.0
291
+ hidden_size:
292
+ desc: null
293
+ value: 1024
294
+ hub_model_id:
295
+ desc: null
296
+ value: None
297
+ hub_strategy:
298
+ desc: null
299
+ value: every_save
300
+ hub_token:
301
+ desc: null
302
+ value: <HUB_TOKEN>
303
+ id2label:
304
+ desc: null
305
+ value:
306
+ '0': LABEL_0
307
+ '1': LABEL_1
308
+ ignore_data_skip:
309
+ desc: null
310
+ value: false
311
+ initializer_range:
312
+ desc: null
313
+ value: 0.02
314
+ intermediate_size:
315
+ desc: null
316
+ value: 4096
317
+ is_decoder:
318
+ desc: null
319
+ value: false
320
+ is_encoder_decoder:
321
+ desc: null
322
+ value: false
323
+ label2id:
324
+ desc: null
325
+ value:
326
+ LABEL_0: 0
327
+ LABEL_1: 1
328
+ label_names:
329
+ desc: null
330
+ value: None
331
+ label_smoothing_factor:
332
+ desc: null
333
+ value: 0.0
334
+ layer_norm_eps:
335
+ desc: null
336
+ value: 1.0e-05
337
+ layerdrop:
338
+ desc: null
339
+ value: 0.0
340
+ learning_rate:
341
+ desc: null
342
+ value: 7.5e-05
343
+ length_column_name:
344
+ desc: null
345
+ value: input_length
346
+ length_penalty:
347
+ desc: null
348
+ value: 1.0
349
+ load_best_model_at_end:
350
+ desc: null
351
+ value: true
352
+ local_rank:
353
+ desc: null
354
+ value: -1
355
+ log_level:
356
+ desc: null
357
+ value: -1
358
+ log_level_replica:
359
+ desc: null
360
+ value: -1
361
+ log_on_each_node:
362
+ desc: null
363
+ value: true
364
+ logging_dir:
365
+ desc: null
366
+ value: ./runs/Jan30_22-59-56_job-3261699b-76eb-4c28-8419-66a66c5c9199
367
+ logging_first_step:
368
+ desc: null
369
+ value: false
370
+ logging_nan_inf_filter:
371
+ desc: null
372
+ value: true
373
+ logging_steps:
374
+ desc: null
375
+ value: 100
376
+ logging_strategy:
377
+ desc: null
378
+ value: steps
379
+ lr_scheduler_type:
380
+ desc: null
381
+ value: linear
382
+ mask_feature_length:
383
+ desc: null
384
+ value: 64
385
+ mask_feature_min_masks:
386
+ desc: null
387
+ value: 0
388
+ mask_feature_prob:
389
+ desc: null
390
+ value: 0.25
391
+ mask_time_length:
392
+ desc: null
393
+ value: 10
394
+ mask_time_min_masks:
395
+ desc: null
396
+ value: 2
397
+ mask_time_prob:
398
+ desc: null
399
+ value: 0.75
400
+ max_grad_norm:
401
+ desc: null
402
+ value: 1.0
403
+ max_length:
404
+ desc: null
405
+ value: 20
406
+ max_steps:
407
+ desc: null
408
+ value: -1
409
+ metric_for_best_model:
410
+ desc: null
411
+ value: loss
412
+ min_length:
413
+ desc: null
414
+ value: 0
415
+ model_type:
416
+ desc: null
417
+ value: wav2vec2
418
+ mp_parameters:
419
+ desc: null
420
+ value: ''
421
+ no_cuda:
422
+ desc: null
423
+ value: false
424
+ no_repeat_ngram_size:
425
+ desc: null
426
+ value: 0
427
+ num_adapter_layers:
428
+ desc: null
429
+ value: 3
430
+ num_attention_heads:
431
+ desc: null
432
+ value: 16
433
+ num_beam_groups:
434
+ desc: null
435
+ value: 1
436
+ num_beams:
437
+ desc: null
438
+ value: 1
439
+ num_codevector_groups:
440
+ desc: null
441
+ value: 2
442
+ num_codevectors_per_group:
443
+ desc: null
444
+ value: 320
445
+ num_conv_pos_embedding_groups:
446
+ desc: null
447
+ value: 16
448
+ num_conv_pos_embeddings:
449
+ desc: null
450
+ value: 128
451
+ num_feat_extract_layers:
452
+ desc: null
453
+ value: 7
454
+ num_hidden_layers:
455
+ desc: null
456
+ value: 24
457
+ num_negatives:
458
+ desc: null
459
+ value: 100
460
+ num_return_sequences:
461
+ desc: null
462
+ value: 1
463
+ num_train_epochs:
464
+ desc: null
465
+ value: 0.4
466
+ optim:
467
+ desc: null
468
+ value: adamw_hf
469
+ output_attentions:
470
+ desc: null
471
+ value: false
472
+ output_dir:
473
+ desc: null
474
+ value: ./
475
+ output_hidden_size:
476
+ desc: null
477
+ value: 1024
478
+ output_hidden_states:
479
+ desc: null
480
+ value: false
481
+ output_scores:
482
+ desc: null
483
+ value: false
484
+ overwrite_output_dir:
485
+ desc: null
486
+ value: true
487
+ pad_token_id:
488
+ desc: null
489
+ value: 40
490
+ past_index:
491
+ desc: null
492
+ value: -1
493
+ per_device_eval_batch_size:
494
+ desc: null
495
+ value: 8
496
+ per_device_train_batch_size:
497
+ desc: null
498
+ value: 8
499
+ per_gpu_eval_batch_size:
500
+ desc: null
501
+ value: None
502
+ per_gpu_train_batch_size:
503
+ desc: null
504
+ value: None
505
+ prediction_loss_only:
506
+ desc: null
507
+ value: false
508
+ prefix:
509
+ desc: null
510
+ value: null
511
+ problem_type:
512
+ desc: null
513
+ value: null
514
+ proj_codevector_dim:
515
+ desc: null
516
+ value: 768
517
+ pruned_heads:
518
+ desc: null
519
+ value: {}
520
+ push_to_hub:
521
+ desc: null
522
+ value: true
523
+ push_to_hub_model_id:
524
+ desc: null
525
+ value: None
526
+ push_to_hub_organization:
527
+ desc: null
528
+ value: None
529
+ push_to_hub_token:
530
+ desc: null
531
+ value: <PUSH_TO_HUB_TOKEN>
532
+ remove_invalid_values:
533
+ desc: null
534
+ value: false
535
+ remove_unused_columns:
536
+ desc: null
537
+ value: true
538
+ repetition_penalty:
539
+ desc: null
540
+ value: 1.0
541
+ report_to:
542
+ desc: null
543
+ value: '[''wandb'']'
544
+ resume_from_checkpoint:
545
+ desc: null
546
+ value: None
547
+ return_dict:
548
+ desc: null
549
+ value: true
550
+ return_dict_in_generate:
551
+ desc: null
552
+ value: false
553
+ run_name:
554
+ desc: null
555
+ value: ./
556
+ save_on_each_node:
557
+ desc: null
558
+ value: false
559
+ save_steps:
560
+ desc: null
561
+ value: 500
562
+ save_strategy:
563
+ desc: null
564
+ value: steps
565
+ save_total_limit:
566
+ desc: null
567
+ value: 3
568
+ seed:
569
+ desc: null
570
+ value: 42
571
+ sep_token_id:
572
+ desc: null
573
+ value: null
574
+ sharded_ddp:
575
+ desc: null
576
+ value: '[]'
577
+ skip_memory_metrics:
578
+ desc: null
579
+ value: true
580
+ task_specific_params:
581
+ desc: null
582
+ value: null
583
+ tdnn_dilation:
584
+ desc: null
585
+ value:
586
+ - 1
587
+ - 2
588
+ - 3
589
+ - 1
590
+ - 1
591
+ tdnn_dim:
592
+ desc: null
593
+ value:
594
+ - 512
595
+ - 512
596
+ - 512
597
+ - 512
598
+ - 1500
599
+ tdnn_kernel:
600
+ desc: null
601
+ value:
602
+ - 5
603
+ - 3
604
+ - 3
605
+ - 1
606
+ - 1
607
+ temperature:
608
+ desc: null
609
+ value: 1.0
610
+ tf32:
611
+ desc: null
612
+ value: None
613
+ tie_encoder_decoder:
614
+ desc: null
615
+ value: false
616
+ tie_word_embeddings:
617
+ desc: null
618
+ value: true
619
+ tokenizer_class:
620
+ desc: null
621
+ value: null
622
+ top_k:
623
+ desc: null
624
+ value: 50
625
+ top_p:
626
+ desc: null
627
+ value: 1.0
628
+ torch_dtype:
629
+ desc: null
630
+ value: float32
631
+ torchscript:
632
+ desc: null
633
+ value: false
634
+ tpu_metrics_debug:
635
+ desc: null
636
+ value: false
637
+ tpu_num_cores:
638
+ desc: null
639
+ value: None
640
+ train_batch_size:
641
+ desc: null
642
+ value: 8
643
+ transformers_version:
644
+ desc: null
645
+ value: 4.17.0.dev0
646
+ use_bfloat16:
647
+ desc: null
648
+ value: false
649
+ use_legacy_prediction_loop:
650
+ desc: null
651
+ value: false
652
+ use_weighted_layer_sum:
653
+ desc: null
654
+ value: false
655
+ vocab_size:
656
+ desc: null
657
+ value: 41
658
+ warmup_ratio:
659
+ desc: null
660
+ value: 0.0
661
+ warmup_steps:
662
+ desc: null
663
+ value: 2000
664
+ weight_decay:
665
+ desc: null
666
+ value: 0.0
667
+ xpu_backend:
668
+ desc: null
669
+ value: None
670
+ xvector_output_dim:
671
+ desc: null
672
+ value: 512
wandb/run-20220130_230018-ktkg6ghu/files/output.log ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+
4
+
5
+
6
+
7
+ 83%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████▌ | 5/6 [00:18<00:03, 3.64s/it]
8
+ 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 6/6 [00:20<00:00, 3.23s/it]
9
+ Training completed. Do not forget to share your model on huggingface.co/models =)
10
+ 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 6/6 [00:20<00:00, 3.47s/it]
11
+ Saving model checkpoint to ./
12
+ Configuration saved in ./config.json
13
+ Model weights saved in ./pytorch_model.bin
14
+ Configuration saved in ./preprocessor_config.json
15
+ Saving model checkpoint to ./
16
+ Configuration saved in ./config.json
17
+ Model weights saved in ./pytorch_model.bin
wandb/run-20220130_230018-ktkg6ghu/files/requirements.txt ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiohttp==3.8.1
2
+ aiosignal==1.2.0
3
+ analytics-python==1.4.0
4
+ anyio==3.5.0
5
+ appdirs==1.4.4
6
+ argon2-cffi-bindings==21.2.0
7
+ argon2-cffi==21.3.0
8
+ asgiref==3.5.0
9
+ asttokens==2.0.5
10
+ async-timeout==4.0.2
11
+ attrs==21.4.0
12
+ audioread==2.1.9
13
+ backcall==0.2.0
14
+ backoff==1.10.0
15
+ bcrypt==3.2.0
16
+ beautifulsoup4==4.9.3
17
+ black==21.12b0
18
+ bleach==4.1.0
19
+ brotlipy==0.7.0
20
+ certifi==2020.12.5
21
+ cffi==1.14.3
22
+ chardet==3.0.4
23
+ charset-normalizer==2.0.10
24
+ click==8.0.3
25
+ conda-build==3.21.4
26
+ conda-package-handling==1.7.2
27
+ conda==4.9.2
28
+ configparser==5.2.0
29
+ cryptography==3.2.1
30
+ cycler==0.11.0
31
+ datasets==1.18.2.dev0
32
+ debugpy==1.5.1
33
+ decorator==4.4.2
34
+ defusedxml==0.7.1
35
+ dill==0.3.4
36
+ dnspython==2.1.0
37
+ docker-pycreds==0.4.0
38
+ entrypoints==0.3
39
+ executing==0.8.2
40
+ fastapi==0.73.0
41
+ ffmpy==0.3.0
42
+ filelock==3.0.12
43
+ fonttools==4.29.0
44
+ frozenlist==1.3.0
45
+ fsspec==2022.1.0
46
+ gitdb==4.0.9
47
+ gitpython==3.1.26
48
+ glob2==0.7
49
+ gradio==2.7.5.2
50
+ h11==0.13.0
51
+ huggingface-hub==0.4.0
52
+ idna==2.10
53
+ importlib-resources==5.4.0
54
+ ipykernel==6.7.0
55
+ ipython-genutils==0.2.0
56
+ ipython==8.0.1
57
+ ipywidgets==7.6.3
58
+ jedi==0.17.0
59
+ jinja2==2.11.3
60
+ jiwer==2.3.0
61
+ joblib==1.1.0
62
+ json5==0.9.6
63
+ jsonschema==4.4.0
64
+ jupyter-client==7.1.2
65
+ jupyter-core==4.9.1
66
+ jupyterlab-pygments==0.1.2
67
+ jupyterlab-server==1.2.0
68
+ jupyterlab-widgets==1.0.2
69
+ jupyterlab==2.2.9
70
+ kiwisolver==1.3.2
71
+ libarchive-c==2.9
72
+ librosa==0.8.1
73
+ llvmlite==0.38.0
74
+ markdown2==2.4.2
75
+ markupsafe==1.1.1
76
+ matplotlib-inline==0.1.3
77
+ matplotlib==3.5.1
78
+ mistune==0.8.4
79
+ mkl-fft==1.3.0
80
+ mkl-random==1.1.1
81
+ mkl-service==2.3.0
82
+ monotonic==1.6
83
+ multidict==6.0.2
84
+ multiprocess==0.70.12.2
85
+ mypy-extensions==0.4.3
86
+ nano==0.10.0
87
+ nbclient==0.5.10
88
+ nbconvert==6.4.1
89
+ nbformat==5.1.3
90
+ nest-asyncio==1.5.4
91
+ notebook==6.4.8
92
+ numba==0.55.1
93
+ numpy==1.19.2
94
+ olefile==0.46
95
+ packaging==21.3
96
+ pandas==1.4.0
97
+ pandocfilters==1.5.0
98
+ paramiko==2.9.2
99
+ parso==0.8.1
100
+ pathspec==0.9.0
101
+ pathtools==0.1.2
102
+ pexpect==4.8.0
103
+ pickleshare==0.7.5
104
+ pillow==8.1.2
105
+ pip==21.3.1
106
+ pkginfo==1.7.0
107
+ platformdirs==2.4.1
108
+ pooch==1.6.0
109
+ prometheus-client==0.13.0
110
+ promise==2.3
111
+ prompt-toolkit==3.0.8
112
+ protobuf==3.19.4
113
+ psutil==5.8.0
114
+ ptyprocess==0.7.0
115
+ pure-eval==0.2.2
116
+ pyarrow==6.0.1
117
+ pycosat==0.6.3
118
+ pycparser==2.20
119
+ pycryptodome==3.13.0
120
+ pydantic==1.9.0
121
+ pydub==0.25.1
122
+ pygments==2.8.0
123
+ pynacl==1.5.0
124
+ pyopenssl==19.1.0
125
+ pyparsing==3.0.7
126
+ pyrsistent==0.18.1
127
+ pysocks==1.7.1
128
+ python-dateutil==2.8.2
129
+ python-etcd==0.4.5
130
+ python-levenshtein==0.12.2
131
+ python-multipart==0.0.5
132
+ pytz==2021.1
133
+ pyyaml==5.4.1
134
+ pyzmq==22.3.0
135
+ regex==2022.1.18
136
+ requests==2.24.0
137
+ resampy==0.2.2
138
+ ruamel-yaml==0.15.87
139
+ sacremoses==0.0.47
140
+ scikit-learn==1.0.2
141
+ scipy==1.7.3
142
+ send2trash==1.8.0
143
+ sentry-sdk==1.5.4
144
+ setuptools==50.3.1.post20201107
145
+ shortuuid==1.0.8
146
+ six==1.15.0
147
+ smmap==5.0.0
148
+ sniffio==1.2.0
149
+ soundfile==0.10.3.post1
150
+ soupsieve==2.2
151
+ stack-data==0.1.4
152
+ starlette==0.17.1
153
+ subprocess32==3.5.4
154
+ termcolor==1.1.0
155
+ terminado==0.13.1
156
+ testpath==0.5.0
157
+ threadpoolctl==3.0.0
158
+ tokenizers==0.11.4
159
+ tomli==1.2.3
160
+ torch==1.10.2
161
+ torchaudio==0.10.2
162
+ torchelastic==0.2.2
163
+ torchtext==0.9.1
164
+ torchvision==0.9.1
165
+ tornado==6.1
166
+ tqdm==4.62.3
167
+ traitlets==5.1.1
168
+ transformers==4.17.0.dev0
169
+ typing-extensions==4.0.1
170
+ urllib3==1.25.11
171
+ uvicorn==0.17.1
172
+ wandb==0.12.9
173
+ wcwidth==0.2.5
174
+ webencodings==0.5.1
175
+ wheel==0.35.1
176
+ widgetsnbextension==3.5.2
177
+ xxhash==2.0.2
178
+ yarl==1.7.2
179
+ yaspin==2.1.0
180
+ zipp==3.7.0
wandb/run-20220130_230018-ktkg6ghu/files/wandb-metadata.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-4.15.0-151-generic-x86_64-with-glibc2.10",
3
+ "python": "3.8.8",
4
+ "heartbeatAt": "2022-01-30T23:00:20.181812",
5
+ "startedAt": "2022-01-30T23:00:18.929948",
6
+ "docker": null,
7
+ "gpu": "Tesla V100S-PCIE-32GB",
8
+ "gpu_count": 1,
9
+ "cpu_count": 60,
10
+ "cuda": null,
11
+ "args": [
12
+ "--activation_dropout=0.1",
13
+ "--dataset_name=mozilla-foundation/common_voice_7_0",
14
+ "--dataset_config_name=fr",
15
+ "--eval_steps=500",
16
+ "--evaluation_strategy=steps",
17
+ "--feat_proj_dropout=0.0",
18
+ "--freeze_feature_encoder",
19
+ "--fp16",
20
+ "--gradient_accumulation_steps=8",
21
+ "--gradient_checkpointing",
22
+ "--group_by_length",
23
+ "--layerdrop=0.0",
24
+ "--learning_rate=7.5e-5",
25
+ "--length_column_name=input_length",
26
+ "--load_best_model_at_end",
27
+ "--logging_steps=100",
28
+ "--mask_feature_length=64",
29
+ "--mask_feature_prob=0.25",
30
+ "--mask_time_length=10",
31
+ "--mask_time_prob=0.75",
32
+ "--max_train_samples=1000",
33
+ "--max_eval_samples=200",
34
+ "--model_name_or_path=facebook/wav2vec2-xls-r-300m",
35
+ "--num_train_epochs=0.4",
36
+ "--output_dir=./",
37
+ "--overwrite_output_dir",
38
+ "--per_device_train_batch_size=8",
39
+ "--per_device_eval_batch_size=8",
40
+ "--preprocessing_num_workers=4",
41
+ "--push_to_hub",
42
+ "--report_to=wandb",
43
+ "--save_steps=500",
44
+ "--save_total_limit=3",
45
+ "--text_column_name=sentence",
46
+ "--use_auth_token",
47
+ "--warmup_steps=2000",
48
+ "--do_train",
49
+ "--do_eval"
50
+ ],
51
+ "state": "running",
52
+ "program": "run_speech_recognition_ctc.py",
53
+ "codePath": "run_speech_recognition_ctc.py",
54
+ "git": {
55
+ "remote": "https://huggingface.co/Plim/xls-r-300m-fr",
56
+ "commit": "1fb68dc4e7aab3ec7e3f3b252fb785ff9e047418"
57
+ },
58
+ "email": "lim.pascal93@gmail.com",
59
+ "root": "/workspace/xls-r-300m-fr",
60
+ "host": "job-3261699b-76eb-4c28-8419-66a66c5c9199",
61
+ "username": "ovh",
62
+ "executable": "/opt/conda/bin/python"
63
+ }
wandb/run-20220130_230018-ktkg6ghu/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
1
+ {"train/train_runtime": 23.2007, "train/train_samples_per_second": 17.241, "train/train_steps_per_second": 0.259, "train/total_flos": 5.41371015650304e+16, "train/train_loss": 13.584136962890625, "train/epoch": 0.38, "train/global_step": 6, "_runtime": 22, "_timestamp": 1643583641, "_step": 0}
wandb/run-20220130_230018-ktkg6ghu/logs/debug-internal.log ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-01-30 23:00:19,867 INFO MainThread:28776 [internal.py:wandb_internal():87] W&B internal server running at pid: 28776, started at: 2022-01-30 23:00:19.867526
2
+ 2022-01-30 23:00:19,871 DEBUG HandlerThread:28776 [handler.py:handle_request():130] handle_request: check_version
3
+ 2022-01-30 23:00:19,872 INFO WriterThread:28776 [datastore.py:open_for_write():77] open: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/run-ktkg6ghu.wandb
4
+ 2022-01-30 23:00:19,875 DEBUG SenderThread:28776 [sender.py:send():234] send: header
5
+ 2022-01-30 23:00:19,876 DEBUG SenderThread:28776 [sender.py:send_request():248] send_request: check_version
6
+ 2022-01-30 23:00:19,950 DEBUG SenderThread:28776 [sender.py:send():234] send: run
7
+ 2022-01-30 23:00:20,171 INFO SenderThread:28776 [dir_watcher.py:__init__():169] watching files in: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files
8
+ 2022-01-30 23:00:20,171 INFO SenderThread:28776 [sender.py:_start_run_threads():804] run started: ktkg6ghu with start time 1643583619
9
+ 2022-01-30 23:00:20,171 DEBUG SenderThread:28776 [sender.py:send():234] send: summary
10
+ 2022-01-30 23:00:20,172 INFO SenderThread:28776 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
11
+ 2022-01-30 23:00:20,173 DEBUG HandlerThread:28776 [handler.py:handle_request():130] handle_request: run_start
12
+ 2022-01-30 23:00:20,181 DEBUG HandlerThread:28776 [meta.py:__init__():40] meta init
13
+ 2022-01-30 23:00:20,181 DEBUG HandlerThread:28776 [meta.py:__init__():54] meta init done
14
+ 2022-01-30 23:00:20,181 DEBUG HandlerThread:28776 [meta.py:probe():214] probe
15
+ 2022-01-30 23:00:20,189 DEBUG HandlerThread:28776 [meta.py:_setup_git():204] setup git
16
+ 2022-01-30 23:00:20,223 DEBUG HandlerThread:28776 [meta.py:_setup_git():211] setup git done
17
+ 2022-01-30 23:00:20,223 DEBUG HandlerThread:28776 [meta.py:_save_pip():58] save pip
18
+ 2022-01-30 23:00:20,224 DEBUG HandlerThread:28776 [meta.py:_save_pip():72] save pip done
19
+ 2022-01-30 23:00:20,224 DEBUG HandlerThread:28776 [meta.py:_save_conda():79] save conda
20
+ 2022-01-30 23:00:20,778 DEBUG HandlerThread:28776 [meta.py:_save_conda():89] save conda done
21
+ 2022-01-30 23:00:20,778 DEBUG HandlerThread:28776 [meta.py:probe():252] probe done
22
+ 2022-01-30 23:00:20,787 DEBUG SenderThread:28776 [sender.py:send():234] send: files
23
+ 2022-01-30 23:00:20,787 INFO SenderThread:28776 [sender.py:_save_file():939] saving file wandb-metadata.json with policy now
24
+ 2022-01-30 23:00:20,797 DEBUG HandlerThread:28776 [handler.py:handle_request():130] handle_request: stop_status
25
+ 2022-01-30 23:00:20,798 DEBUG SenderThread:28776 [sender.py:send_request():248] send_request: stop_status
26
+ 2022-01-30 23:00:20,961 DEBUG SenderThread:28776 [sender.py:send():234] send: config
27
+ 2022-01-30 23:00:20,963 DEBUG SenderThread:28776 [sender.py:send():234] send: metric
28
+ 2022-01-30 23:00:20,963 DEBUG SenderThread:28776 [sender.py:send():234] send: metric
29
+ 2022-01-30 23:00:20,963 WARNING SenderThread:28776 [sender.py:send_metric():897] Seen metric with glob (shouldnt happen)
30
+ 2022-01-30 23:00:21,176 INFO Thread-8 :28776 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/wandb-summary.json
31
+ 2022-01-30 23:00:21,176 INFO Thread-8 :28776 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/conda-environment.yaml
32
+ 2022-01-30 23:00:21,177 INFO Thread-8 :28776 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
33
+ 2022-01-30 23:00:21,177 INFO Thread-8 :28776 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/requirements.txt
34
+ 2022-01-30 23:00:21,177 INFO Thread-8 :28776 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/wandb-metadata.json
35
+ 2022-01-30 23:00:21,331 INFO Thread-11 :28776 [upload_job.py:push():137] Uploaded file /tmp/tmpoltwert_wandb/8zi886rt-wandb-metadata.json
36
+ 2022-01-30 23:00:23,173 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
37
+ 2022-01-30 23:00:27,175 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
38
+ 2022-01-30 23:00:31,177 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
39
+ 2022-01-30 23:00:33,178 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
40
+ 2022-01-30 23:00:36,074 DEBUG HandlerThread:28776 [handler.py:handle_request():130] handle_request: stop_status
41
+ 2022-01-30 23:00:36,075 DEBUG SenderThread:28776 [sender.py:send_request():248] send_request: stop_status
42
+ 2022-01-30 23:00:37,180 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
43
+ 2022-01-30 23:00:41,182 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
44
+ 2022-01-30 23:00:41,623 DEBUG SenderThread:28776 [sender.py:send():234] send: metric
45
+ 2022-01-30 23:00:41,623 DEBUG SenderThread:28776 [sender.py:send():234] send: metric
46
+ 2022-01-30 23:00:41,623 DEBUG SenderThread:28776 [sender.py:send():234] send: metric
47
+ 2022-01-30 23:00:41,623 DEBUG SenderThread:28776 [sender.py:send():234] send: metric
48
+ 2022-01-30 23:00:41,623 DEBUG SenderThread:28776 [sender.py:send():234] send: metric
49
+ 2022-01-30 23:00:41,624 DEBUG SenderThread:28776 [sender.py:send():234] send: metric
50
+ 2022-01-30 23:00:41,624 DEBUG SenderThread:28776 [sender.py:send():234] send: history
51
+ 2022-01-30 23:00:41,624 DEBUG SenderThread:28776 [sender.py:send():234] send: summary
52
+ 2022-01-30 23:00:41,624 INFO SenderThread:28776 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
53
+ 2022-01-30 23:00:42,183 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/wandb-summary.json
54
+ 2022-01-30 23:00:43,184 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
55
+ 2022-01-30 23:00:45,185 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
56
+ 2022-01-30 23:00:47,187 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/output.log
57
+ 2022-01-30 23:00:48,450 DEBUG SenderThread:28776 [sender.py:send():234] send: stats
58
+ 2022-01-30 23:00:51,190 INFO Thread-8 :28776 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/files/config.yaml
59
+ 2022-01-30 23:00:51,326 DEBUG HandlerThread:28776 [handler.py:handle_request():130] handle_request: stop_status
60
+ 2022-01-30 23:00:51,327 DEBUG SenderThread:28776 [sender.py:send_request():248] send_request: stop_status
61
+ 2022-01-30 23:01:06,483 DEBUG HandlerThread:28776 [handler.py:handle_request():130] handle_request: stop_status
62
+ 2022-01-30 23:01:06,484 DEBUG SenderThread:28776 [sender.py:send_request():248] send_request: stop_status
63
+ 2022-01-30 23:01:18,718 DEBUG SenderThread:28776 [sender.py:send():234] send: stats
64
+ 2022-01-30 23:01:21,647 DEBUG HandlerThread:28776 [handler.py:handle_request():130] handle_request: stop_status
65
+ 2022-01-30 23:01:21,648 DEBUG SenderThread:28776 [sender.py:send_request():248] send_request: stop_status
66
+ 2022-01-30 23:01:36,814 DEBUG HandlerThread:28776 [handler.py:handle_request():130] handle_request: stop_status
67
+ 2022-01-30 23:01:36,814 DEBUG SenderThread:28776 [sender.py:send_request():248] send_request: stop_status
68
+ 2022-01-30 23:01:48,941 DEBUG SenderThread:28776 [sender.py:send():234] send: stats
69
+ 2022-01-30 23:01:51,982 DEBUG HandlerThread:28776 [handler.py:handle_request():130] handle_request: stop_status
70
+ 2022-01-30 23:01:51,982 DEBUG SenderThread:28776 [sender.py:send_request():248] send_request: stop_status
wandb/run-20220130_230018-ktkg6ghu/logs/debug.log ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-01-30 23:00:18,934 INFO MainThread:28546 [wandb_setup.py:_flush():71] setting env: {'project': 'xls-r-300-fr'}
2
+ 2022-01-30 23:00:18,934 INFO MainThread:28546 [wandb_setup.py:_flush():71] setting login settings: {}
3
+ 2022-01-30 23:00:18,935 INFO MainThread:28546 [wandb_init.py:_log_setup():371] Logging user logs to /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/logs/debug.log
4
+ 2022-01-30 23:00:18,935 INFO MainThread:28546 [wandb_init.py:_log_setup():372] Logging internal logs to /workspace/xls-r-300m-fr/wandb/run-20220130_230018-ktkg6ghu/logs/debug-internal.log
5
+ 2022-01-30 23:00:18,935 INFO MainThread:28546 [wandb_init.py:init():404] calling init triggers
6
+ 2022-01-30 23:00:18,935 INFO MainThread:28546 [wandb_init.py:init():409] wandb.init called with sweep_config: {}
7
+ config: {}
8
+ 2022-01-30 23:00:18,935 INFO MainThread:28546 [wandb_init.py:init():460] starting backend
9
+ 2022-01-30 23:00:18,935 INFO MainThread:28546 [backend.py:_multiprocessing_setup():99] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
10
+ 2022-01-30 23:00:19,011 INFO MainThread:28546 [backend.py:ensure_launched():216] starting backend process...
11
+ 2022-01-30 23:00:19,071 INFO MainThread:28546 [backend.py:ensure_launched():221] started backend process with pid: 28776
12
+ 2022-01-30 23:00:19,073 INFO MainThread:28546 [wandb_init.py:init():469] backend started and connected
13
+ 2022-01-30 23:00:19,081 INFO MainThread:28546 [wandb_init.py:init():533] updated telemetry
14
+ 2022-01-30 23:00:19,256 INFO MainThread:28546 [wandb_init.py:init():563] communicating current version
15
+ 2022-01-30 23:00:19,948 INFO MainThread:28546 [wandb_init.py:init():568] got version response
16
+ 2022-01-30 23:00:19,948 INFO MainThread:28546 [wandb_init.py:init():578] communicating run to backend with 30 second timeout
17
+ 2022-01-30 23:00:20,172 INFO MainThread:28546 [wandb_init.py:init():606] starting run threads in backend
18
+ 2022-01-30 23:00:20,795 INFO MainThread:28546 [wandb_run.py:_console_start():1810] atexit reg
19
+ 2022-01-30 23:00:20,796 INFO MainThread:28546 [wandb_run.py:_redirect():1684] redirect: SettingsConsole.REDIRECT
20
+ 2022-01-30 23:00:20,797 INFO MainThread:28546 [wandb_run.py:_redirect():1689] Redirecting console.
21
+ 2022-01-30 23:00:20,803 INFO MainThread:28546 [wandb_run.py:_redirect():1745] Redirects installed.
22
+ 2022-01-30 23:00:20,803 INFO MainThread:28546 [wandb_init.py:init():633] run started, returning control to user process
23
+ 2022-01-30 23:00:20,805 INFO MainThread:28546 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 40, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.1, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 41, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.75, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 64, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 8, 'eval_accumulation_steps': 'None', 'learning_rate': 7.5e-05, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 0.4, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2000, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Jan30_22-59-56_job-3261699b-76eb-4c28-8419-66a66c5c9199', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 3, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 8, 'eval_batch_size': 8}
24
+ 2022-01-30 23:00:20,811 INFO MainThread:28546 [wandb_watch.py:watch():43] Watching
wandb/run-20220130_230018-ktkg6ghu/run-ktkg6ghu.wandb ADDED
Binary file (9.53 kB). View file