File size: 11,514 Bytes
9614e9e
 
 
 
 
 
fb5c97a
9614e9e
 
 
 
 
 
 
 
fb5c97a
9614e9e
 
 
 
 
 
 
 
 
8ba698b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
2022-02-01 22:36:24,639 INFO    MainThread:37310 [wandb_setup.py:_flush():71] setting env: {}
2022-02-01 22:36:24,639 INFO    MainThread:37310 [wandb_setup.py:_flush():71] setting login settings: {}
2022-02-01 22:36:24,640 INFO    MainThread:37310 [wandb_init.py:_log_setup():371] Logging user logs to /workspace/xls-r-300m-pt/wandb/run-20220201_223624-2b1hcyq3/logs/debug.log
2022-02-01 22:36:24,640 INFO    MainThread:37310 [wandb_init.py:_log_setup():372] Logging internal logs to /workspace/xls-r-300m-pt/wandb/run-20220201_223624-2b1hcyq3/logs/debug-internal.log
2022-02-01 22:36:24,640 INFO    MainThread:37310 [wandb_init.py:init():404] calling init triggers
2022-02-01 22:36:24,640 INFO    MainThread:37310 [wandb_init.py:init():409] wandb.init called with sweep_config: {}
config: {}
2022-02-01 22:36:24,640 INFO    MainThread:37310 [wandb_init.py:init():460] starting backend
2022-02-01 22:36:24,640 INFO    MainThread:37310 [backend.py:_multiprocessing_setup():99] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
2022-02-01 22:36:24,721 INFO    MainThread:37310 [backend.py:ensure_launched():216] starting backend process...
2022-02-01 22:36:24,797 INFO    MainThread:37310 [backend.py:ensure_launched():221] started backend process with pid: 38198
2022-02-01 22:36:24,800 INFO    MainThread:37310 [wandb_init.py:init():469] backend started and connected
2022-02-01 22:36:24,809 INFO    MainThread:37310 [wandb_init.py:init():533] updated telemetry
2022-02-01 22:36:24,979 INFO    MainThread:37310 [wandb_init.py:init():563] communicating current version
2022-02-01 22:36:25,688 INFO    MainThread:37310 [wandb_init.py:init():568] got version response upgrade_message: "wandb version 0.12.10 is available!  To upgrade, please run:\n $ pip install wandb --upgrade"

2022-02-01 22:36:25,688 INFO    MainThread:37310 [wandb_init.py:init():578] communicating run to backend with 30 second timeout
2022-02-01 22:36:25,885 INFO    MainThread:37310 [wandb_init.py:init():606] starting run threads in backend
2022-02-01 22:36:26,492 INFO    MainThread:37310 [wandb_run.py:_console_start():1810] atexit reg
2022-02-01 22:36:26,493 INFO    MainThread:37310 [wandb_run.py:_redirect():1684] redirect: SettingsConsole.REDIRECT
2022-02-01 22:36:26,494 INFO    MainThread:37310 [wandb_run.py:_redirect():1689] Redirecting console.
2022-02-01 22:36:26,500 INFO    MainThread:37310 [wandb_run.py:_redirect():1745] Redirects installed.
2022-02-01 22:36:26,500 INFO    MainThread:37310 [wandb_init.py:init():633] run started, returning control to user process
2022-02-01 22:36:26,504 INFO    MainThread:37310 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 50, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.05, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 53, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.65, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.3, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 32, 'per_device_eval_batch_size': 32, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': 'None', 'learning_rate': 0.0002, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 15.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 1500, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Feb01_22-34-39_job-1abccd0a-3293-4ffe-8274-9e8f841f653f', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 3, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': 'xls-r-300m-fr', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 32, 'eval_batch_size': 32}
2022-02-01 22:36:26,509 INFO    MainThread:37310 [wandb_watch.py:watch():43] Watching
2022-02-02 05:17:25,827 INFO    MainThread:37310 [wandb_run.py:_atexit_cleanup():1780] got exitcode: 0
2022-02-02 05:17:25,831 INFO    MainThread:37310 [wandb_run.py:_restore():1752] restore
2022-02-02 05:17:28,379 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 1
}
pusher_stats {
  uploaded_bytes: 2149
  total_bytes: 2149
}

2022-02-02 05:17:28,489 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 1
}
pusher_stats {
  uploaded_bytes: 2149
  total_bytes: 2149
}

2022-02-02 05:17:29,258 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 1
}
pusher_stats {
  uploaded_bytes: 2149
  total_bytes: 2149
}

2022-02-02 05:17:30,283 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 4
}
pusher_stats {
  uploaded_bytes: 2149
  total_bytes: 2176207
}

2022-02-02 05:17:30,386 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2149
  total_bytes: 2179340
}

2022-02-02 05:17:30,491 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2149
  total_bytes: 2179340
}

2022-02-02 05:17:30,594 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2176207
  total_bytes: 2179340
}

2022-02-02 05:17:30,697 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2176207
  total_bytes: 2179340
}

2022-02-02 05:17:30,800 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2179340
  total_bytes: 2179340
}

2022-02-02 05:17:30,903 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2179340
  total_bytes: 2179340
}

2022-02-02 05:17:31,006 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2179340
  total_bytes: 2179340
}

2022-02-02 05:17:31,111 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2179340
  total_bytes: 2179340
}

2022-02-02 05:17:31,215 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2179340
  total_bytes: 2179340
}

2022-02-02 05:17:32,130 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2179340
  total_bytes: 2179340
}

2022-02-02 05:17:32,547 INFO    MainThread:37310 [wandb_run.py:_wait_for_finish():1912] got exit ret: done: true
exit_result {
}
file_counts {
  wandb_count: 5
}
pusher_stats {
  uploaded_bytes: 2179340
  total_bytes: 2179340
}
local_info {
}

2022-02-02 05:17:33,722 INFO    MainThread:37310 [wandb_run.py:_append_history():2130] rendering history
2022-02-02 05:17:33,724 INFO    MainThread:37310 [wandb_run.py:_append_summary():2085] rendering summary
2022-02-02 05:17:33,726 INFO    MainThread:37310 [wandb_run.py:_append_files():2180] logging synced files