charsiu commited on
Commit
147d275
1 Parent(s): 18db01b

Upload 3 files

Browse files
Files changed (3) hide show
  1. config.json +218 -0
  2. pytorch_model.bin +3 -0
  3. results.txt +272 -0
config.json ADDED
@@ -0,0 +1,218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "SpeechEncoderDecoderModel"
4
+ ],
5
+ "decoder": {
6
+ "_name_or_path": "/gpfs/accounts/lingjzhu_root/lingjzhu1/lingjzhu/embeddings/models/bert-hubert-200/checkpoint-8000",
7
+ "add_cross_attention": true,
8
+ "architectures": [
9
+ "BertForMaskedLM"
10
+ ],
11
+ "attention_probs_dropout_prob": 0.1,
12
+ "bad_words_ids": null,
13
+ "bos_token_id": null,
14
+ "chunk_size_feed_forward": 0,
15
+ "classifier_dropout": null,
16
+ "cross_attention_hidden_size": null,
17
+ "decoder_start_token_id": null,
18
+ "diversity_penalty": 0.0,
19
+ "do_sample": false,
20
+ "early_stopping": false,
21
+ "encoder_no_repeat_ngram_size": 0,
22
+ "eos_token_id": null,
23
+ "exponential_decay_length_penalty": null,
24
+ "finetuning_task": null,
25
+ "forced_bos_token_id": null,
26
+ "forced_eos_token_id": null,
27
+ "gradient_checkpointing": false,
28
+ "hidden_act": "gelu",
29
+ "hidden_dropout_prob": 0.1,
30
+ "hidden_size": 768,
31
+ "id2label": {
32
+ "0": "LABEL_0",
33
+ "1": "LABEL_1"
34
+ },
35
+ "initializer_range": 0.02,
36
+ "intermediate_size": 3072,
37
+ "is_decoder": true,
38
+ "is_encoder_decoder": false,
39
+ "label2id": {
40
+ "LABEL_0": 0,
41
+ "LABEL_1": 1
42
+ },
43
+ "layer_norm_eps": 1e-12,
44
+ "length_penalty": 1.0,
45
+ "max_length": 20,
46
+ "max_position_embeddings": 512,
47
+ "min_length": 0,
48
+ "model_type": "bert",
49
+ "no_repeat_ngram_size": 0,
50
+ "num_attention_heads": 12,
51
+ "num_beam_groups": 1,
52
+ "num_beams": 1,
53
+ "num_hidden_layers": 12,
54
+ "num_return_sequences": 1,
55
+ "output_attentions": false,
56
+ "output_hidden_states": false,
57
+ "output_scores": false,
58
+ "pad_token_id": 0,
59
+ "position_embedding_type": "absolute",
60
+ "prefix": null,
61
+ "problem_type": null,
62
+ "pruned_heads": {},
63
+ "remove_invalid_values": false,
64
+ "repetition_penalty": 1.0,
65
+ "return_dict": true,
66
+ "return_dict_in_generate": false,
67
+ "sep_token_id": null,
68
+ "task_specific_params": null,
69
+ "temperature": 1.0,
70
+ "tie_encoder_decoder": false,
71
+ "tie_word_embeddings": true,
72
+ "tokenizer_class": null,
73
+ "top_k": 50,
74
+ "top_p": 1.0,
75
+ "torch_dtype": "float32",
76
+ "torchscript": false,
77
+ "transformers_version": "4.18.0",
78
+ "type_vocab_size": 2,
79
+ "typical_p": 1.0,
80
+ "use_bfloat16": false,
81
+ "use_cache": true,
82
+ "vocab_size": 205
83
+ },
84
+ "decoder_start_token_id": 204,
85
+ "encoder": {
86
+ "_name_or_path": "facebook/hubert-base-ls960",
87
+ "activation_dropout": 0.1,
88
+ "add_cross_attention": false,
89
+ "apply_spec_augment": true,
90
+ "architectures": [
91
+ "HubertModel"
92
+ ],
93
+ "attention_dropout": 0.1,
94
+ "bad_words_ids": null,
95
+ "bos_token_id": 1,
96
+ "chunk_size_feed_forward": 0,
97
+ "classifier_proj_size": 256,
98
+ "conv_bias": false,
99
+ "conv_dim": [
100
+ 512,
101
+ 512,
102
+ 512,
103
+ 512,
104
+ 512,
105
+ 512,
106
+ 512
107
+ ],
108
+ "conv_kernel": [
109
+ 10,
110
+ 3,
111
+ 3,
112
+ 3,
113
+ 3,
114
+ 2,
115
+ 2
116
+ ],
117
+ "conv_stride": [
118
+ 5,
119
+ 2,
120
+ 2,
121
+ 2,
122
+ 2,
123
+ 2,
124
+ 2
125
+ ],
126
+ "cross_attention_hidden_size": null,
127
+ "ctc_loss_reduction": "sum",
128
+ "ctc_zero_infinity": false,
129
+ "decoder_start_token_id": null,
130
+ "diversity_penalty": 0.0,
131
+ "do_sample": false,
132
+ "do_stable_layer_norm": false,
133
+ "early_stopping": false,
134
+ "encoder_no_repeat_ngram_size": 0,
135
+ "eos_token_id": 2,
136
+ "exponential_decay_length_penalty": null,
137
+ "feat_extract_activation": "gelu",
138
+ "feat_extract_dropout": 0.0,
139
+ "feat_extract_norm": "group",
140
+ "feat_proj_dropout": 0.1,
141
+ "feat_proj_layer_norm": true,
142
+ "final_dropout": 0.1,
143
+ "finetuning_task": null,
144
+ "forced_bos_token_id": null,
145
+ "forced_eos_token_id": null,
146
+ "gradient_checkpointing": false,
147
+ "hidden_act": "gelu",
148
+ "hidden_dropout": 0.1,
149
+ "hidden_dropout_prob": 0.1,
150
+ "hidden_size": 768,
151
+ "id2label": {
152
+ "0": "LABEL_0",
153
+ "1": "LABEL_1"
154
+ },
155
+ "initializer_range": 0.02,
156
+ "intermediate_size": 3072,
157
+ "is_decoder": false,
158
+ "is_encoder_decoder": false,
159
+ "label2id": {
160
+ "LABEL_0": 0,
161
+ "LABEL_1": 1
162
+ },
163
+ "layer_norm_eps": 1e-05,
164
+ "layerdrop": 0.1,
165
+ "length_penalty": 1.0,
166
+ "mask_feature_length": 10,
167
+ "mask_feature_min_masks": 0,
168
+ "mask_feature_prob": 0.0,
169
+ "mask_time_length": 10,
170
+ "mask_time_min_masks": 2,
171
+ "mask_time_prob": 0.05,
172
+ "max_length": 20,
173
+ "min_length": 0,
174
+ "model_type": "hubert",
175
+ "no_repeat_ngram_size": 0,
176
+ "num_attention_heads": 12,
177
+ "num_beam_groups": 1,
178
+ "num_beams": 1,
179
+ "num_conv_pos_embedding_groups": 16,
180
+ "num_conv_pos_embeddings": 128,
181
+ "num_feat_extract_layers": 7,
182
+ "num_hidden_layers": 12,
183
+ "num_return_sequences": 1,
184
+ "output_attentions": false,
185
+ "output_hidden_states": false,
186
+ "output_scores": false,
187
+ "pad_token_id": 0,
188
+ "prefix": null,
189
+ "problem_type": null,
190
+ "pruned_heads": {},
191
+ "remove_invalid_values": false,
192
+ "repetition_penalty": 1.0,
193
+ "return_dict": true,
194
+ "return_dict_in_generate": false,
195
+ "sep_token_id": null,
196
+ "task_specific_params": null,
197
+ "temperature": 1.0,
198
+ "tie_encoder_decoder": false,
199
+ "tie_word_embeddings": true,
200
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
201
+ "top_k": 50,
202
+ "top_p": 1.0,
203
+ "torch_dtype": null,
204
+ "torchscript": false,
205
+ "transformers_version": "4.18.0",
206
+ "typical_p": 1.0,
207
+ "use_bfloat16": false,
208
+ "use_weighted_layer_sum": false,
209
+ "vocab_size": 32
210
+ },
211
+ "is_encoder_decoder": true,
212
+ "model_type": "speech-encoder-decoder",
213
+ "pad_token_id": 202,
214
+ "tie_word_embeddings": false,
215
+ "torch_dtype": "float32",
216
+ "transformers_version": null,
217
+ "vocab_size": 205
218
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aaadaae4f3fdeb21c3592ad7d806368e25d6f4d521c17f9d642f70f8bf0cc96e
3
+ size 835947409
results.txt ADDED
@@ -0,0 +1,272 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Iteration: 0 - Loss: 8.08641242980957
2
+ Pearsonr: 0.010621631076654455; P: 0.8204651416584556
3
+ Spearmanr: 0.015827880196904395; P: 0.7352143065765318
4
+
5
+ Iteration: 200 - Loss: 2.071859121322632
6
+ Pearsonr: 0.2703880744597078; P: 3.9306588610586655e-09
7
+ Spearmanr: 0.24888998853600333; P: 6.553401979822967e-08
8
+
9
+ Iteration: 400 - Loss: 2.0651769638061523
10
+ Pearsonr: 0.42718881100365946; P: 8.742135113954878e-22
11
+ Spearmanr: 0.40966308232783616; P: 5.2958389742092694e-20
12
+
13
+ Iteration: 600 - Loss: 2.117659330368042
14
+ Pearsonr: 0.5136731670675092; P: 2.8767046353634493e-32
15
+ Spearmanr: 0.5028092748318626; P: 8.823308311390258e-31
16
+
17
+ Iteration: 800 - Loss: 1.945109486579895
18
+ Pearsonr: 0.5681436159158254; P: 1.3678123197278457e-40
19
+ Spearmanr: 0.5504079249050792; P: 1.0348476674787196e-37
20
+
21
+ Iteration: 1000 - Loss: 2.0464084148406982
22
+ Pearsonr: 0.5939622212503224; P: 4.167997420071383e-45
23
+ Spearmanr: 0.5752656275449446; P: 8.514397579285141e-42
24
+
25
+ Iteration: 1200 - Loss: 1.9914944171905518
26
+ Pearsonr: 0.6073071935285385; P: 1.3225910599584228e-47
27
+ Spearmanr: 0.5881375367999437; P: 4.723981004193902e-44
28
+
29
+ Iteration: 1400 - Loss: 1.8374221324920654
30
+ Pearsonr: 0.6075717386880887; P: 1.1767806305680267e-47
31
+ Spearmanr: 0.581478727629097; P: 7.141860148677098e-43
32
+
33
+ Iteration: 1600 - Loss: 1.849561095237732
34
+ Pearsonr: 0.6278860892896775; P: 1.0664801455384033e-51
35
+ Spearmanr: 0.6015524902799576; P: 1.6340943351419174e-46
36
+
37
+ Iteration: 1800 - Loss: 1.8758680820465088
38
+ Pearsonr: 0.6263555276974726; P: 2.2028800259636204e-51
39
+ Spearmanr: 0.5998857996235254; P: 3.352566746703723e-46
40
+
41
+ Iteration: 2000 - Loss: 1.9839155673980713
42
+ Pearsonr: 0.6351966735114957; P: 3.152286156971959e-53
43
+ Spearmanr: 0.6017145261425186; P: 1.5234799888366574e-46
44
+
45
+ Iteration: 2200 - Loss: 1.756374478340149
46
+ Pearsonr: 0.6440005255607844; P: 3.993071466828629e-55
47
+ Spearmanr: 0.6106112229001889; P: 3.0506744187327255e-48
48
+
49
+ Iteration: 2400 - Loss: 1.8533791303634644
50
+ Pearsonr: 0.6408746646700408; P: 1.9145926870945078e-54
51
+ Spearmanr: 0.6093127898293356; P: 5.440424225527253e-48
52
+
53
+ Iteration: 2600 - Loss: 1.8667470216751099
54
+ Pearsonr: 0.6464866322282338; P: 1.1328237298127798e-55
55
+ Spearmanr: 0.6103482539078129; P: 3.4306173880739657e-48
56
+
57
+ Iteration: 2800 - Loss: 1.7068326473236084
58
+ Pearsonr: 0.6427583141482234; P: 7.460948718495204e-55
59
+ Spearmanr: 0.6051805281594114; P: 3.369117421617711e-47
60
+
61
+ Iteration: 3000 - Loss: 1.8382970094680786
62
+ Pearsonr: 0.6552845954028798; P: 1.1919002735048877e-57
63
+ Spearmanr: 0.6156078862459645; P: 3.2109776184825534e-49
64
+
65
+ Iteration: 3200 - Loss: 1.6554993391036987
66
+ Pearsonr: 0.6600840629923029; P: 9.31205434232349e-59
67
+ Spearmanr: 0.6201462159734871; P: 4.0109450559252026e-50
68
+
69
+ Iteration: 3400 - Loss: 1.7735623121261597
70
+ Pearsonr: 0.6509680156510307; P: 1.1347375020798484e-56
71
+ Spearmanr: 0.6138350864017781; P: 7.170435438627515e-49
72
+
73
+ Iteration: 3600 - Loss: 1.7398338317871094
74
+ Pearsonr: 0.649293980761855; P: 2.6924645192144566e-56
75
+ Spearmanr: 0.6110556695337269; P: 2.5011015044019234e-48
76
+
77
+ Iteration: 3800 - Loss: 1.9043127298355103
78
+ Pearsonr: 0.650289529572843; P: 1.611654278409537e-56
79
+ Spearmanr: 0.6141181915219909; P: 6.3092199769872e-49
80
+
81
+ Iteration: 4000 - Loss: 1.7640738487243652
82
+ Pearsonr: 0.6582143682985201; P: 2.528049495306161e-58
83
+ Spearmanr: 0.6177296281424445; P: 1.2193376871567522e-49
84
+
85
+ Iteration: 4200 - Loss: 1.6442313194274902
86
+ Pearsonr: 0.6599942491461693; P: 9.771294549984983e-59
87
+ Spearmanr: 0.6220306797993137; P: 1.6740322003525815e-50
88
+
89
+ Iteration: 4400 - Loss: 1.9100476503372192
90
+ Pearsonr: 0.6690743597017518; P: 6.905655376873195e-61
91
+ Spearmanr: 0.629251659360487; P: 5.563991810672497e-52
92
+
93
+ Iteration: 4600 - Loss: 1.6869518756866455
94
+ Pearsonr: 0.6564827532494028; P: 6.334820310839045e-58
95
+ Spearmanr: 0.6176601300962116; P: 1.2587771742941603e-49
96
+
97
+ Iteration: 4800 - Loss: 1.7319031953811646
98
+ Pearsonr: 0.6681917870593195; P: 1.126143415925422e-60
99
+ Spearmanr: 0.6292608121458675; P: 5.539719960157941e-52
100
+
101
+ Iteration: 5000 - Loss: 1.8187370300292969
102
+ Pearsonr: 0.6594868772838571; P: 1.282098479701026e-58
103
+ Spearmanr: 0.6209705347616384; P: 2.738847992410262e-50
104
+
105
+ Iteration: 5200 - Loss: 1.7238787412643433
106
+ Pearsonr: 0.6610702661461342; P: 5.482831953281816e-59
107
+ Spearmanr: 0.6184022631839132; P: 8.956566723438575e-50
108
+
109
+ Iteration: 5400 - Loss: 1.641048789024353
110
+ Pearsonr: 0.6663983491770775; P: 3.026478036010308e-60
111
+ Spearmanr: 0.6259620220494749; P: 2.6527929100890802e-51
112
+
113
+ Iteration: 5600 - Loss: 1.6363853216171265
114
+ Pearsonr: 0.6675162716393186; P: 1.6355427976123489e-60
115
+ Spearmanr: 0.6260125201757113; P: 2.5903119517339975e-51
116
+
117
+ Iteration: 5800 - Loss: 1.8371890783309937
118
+ Pearsonr: 0.670109121477053; P: 3.883815499767754e-61
119
+ Spearmanr: 0.6294011338141468; P: 5.180495094469664e-52
120
+
121
+ Iteration: 6000 - Loss: 1.562391996383667
122
+ Pearsonr: 0.6747816745692912; P: 2.8047547374897617e-62
123
+ Spearmanr: 0.6322999156280892; P: 1.2868260493171434e-52
124
+
125
+ Iteration: 6200 - Loss: 1.7009657621383667
126
+ Pearsonr: 0.6722479587455972; P: 1.173286368040994e-61
127
+ Spearmanr: 0.6319249670407839; P: 1.542119089345593e-52
128
+
129
+ Iteration: 6400 - Loss: 1.6922433376312256
130
+ Pearsonr: 0.671785299890823; P: 1.5213361622013014e-61
131
+ Spearmanr: 0.628526316899759; P: 7.864138172221659e-52
132
+
133
+ Iteration: 6600 - Loss: 1.7369767427444458
134
+ Pearsonr: 0.6713740274119598; P: 1.915771920005438e-61
135
+ Spearmanr: 0.6305757833530631; P: 2.951522310848249e-52
136
+
137
+ Iteration: 6800 - Loss: 1.629564642906189
138
+ Pearsonr: 0.6751278806108644; P: 2.304035964492078e-62
139
+ Spearmanr: 0.6315196564550791; P: 1.8748176415696037e-52
140
+
141
+ Iteration: 7000 - Loss: 1.5638651847839355
142
+ Pearsonr: 0.6782024789726145; P: 3.9706587516969526e-63
143
+ Spearmanr: 0.6361126504042527; P: 2.0141684229571868e-53
144
+
145
+ Iteration: 7200 - Loss: 1.6536705493927002
146
+ Pearsonr: 0.675255590626147; P: 2.1426683850070987e-62
147
+ Spearmanr: 0.6364294630237282; P: 1.7244862252630863e-53
148
+
149
+ Iteration: 7400 - Loss: 1.6959208250045776
150
+ Pearsonr: 0.6751852309375689; P: 2.2301301939549945e-62
151
+ Spearmanr: 0.6330855402270119; P: 8.80000736230832e-53
152
+
153
+ Iteration: 7600 - Loss: 1.6595476865768433
154
+ Pearsonr: 0.6699256892172856; P: 4.301701913212347e-61
155
+ Spearmanr: 0.626572544395673; P: 1.988056637591422e-51
156
+
157
+ Iteration: 7800 - Loss: 1.6718226671218872
158
+ Pearsonr: 0.6713268786646085; P: 1.9670309997700185e-61
159
+ Spearmanr: 0.630905472994729; P: 2.5193129523466977e-52
160
+
161
+ Iteration: 8000 - Loss: 1.5495575666427612
162
+ Pearsonr: 0.6712791624950885; P: 2.020293445357269e-61
163
+ Spearmanr: 0.6315060219609953; P: 1.8871692963918654e-52
164
+
165
+ Iteration: 8200 - Loss: 1.6815775632858276
166
+ Pearsonr: 0.6720321140972193; P: 1.3245369461630289e-61
167
+ Spearmanr: 0.6311942591541434; P: 2.1927099874473988e-52
168
+
169
+ Iteration: 8400 - Loss: 1.6123058795928955
170
+ Pearsonr: 0.6698522367072551; P: 4.48129299869789e-61
171
+ Spearmanr: 0.6286222002169504; P: 7.512948637795918e-52
172
+
173
+ Iteration: 8600 - Loss: 1.8655686378479004
174
+ Pearsonr: 0.670825239609283; P: 2.604279028771614e-61
175
+ Spearmanr: 0.6262207618237786; P: 2.347717740511714e-51
176
+
177
+ Iteration: 8800 - Loss: 1.741979956626892
178
+ Pearsonr: 0.6676089980078708; P: 1.553962137700895e-60
179
+ Spearmanr: 0.6240043359406054; P: 6.660935693914321e-51
180
+
181
+ Iteration: 9000 - Loss: 1.7861990928649902
182
+ Pearsonr: 0.6753597289101979; P: 2.0194320814037133e-62
183
+ Spearmanr: 0.6300015565350976; P: 3.8870088500443724e-52
184
+
185
+ Iteration: 9200 - Loss: 1.5549899339675903
186
+ Pearsonr: 0.6710648938401299; P: 2.277675850036259e-61
187
+ Spearmanr: 0.6266997365511309; P: 1.87195244096068e-51
188
+
189
+ Iteration: 9400 - Loss: 1.7516299486160278
190
+ Pearsonr: 0.6760956778192262; P: 1.3277694056867854e-62
191
+ Spearmanr: 0.6331347759000925; P: 8.59259186816818e-53
192
+
193
+ Iteration: 9600 - Loss: 1.5417461395263672
194
+ Pearsonr: 0.6724719018549866; P: 1.034477338038094e-61
195
+ Spearmanr: 0.6300778087057145; P: 3.747591501375187e-52
196
+
197
+ Iteration: 9800 - Loss: 1.495241403579712
198
+ Pearsonr: 0.6721989467238506; P: 1.2060505255107816e-61
199
+ Spearmanr: 0.6290805338352035; P: 6.0377258365565734e-52
200
+
201
+ Iteration: 10000 - Loss: 1.618151068687439
202
+ Pearsonr: 0.6803738029250779; P: 1.1322321140368235e-63
203
+ Spearmanr: 0.64053344574296; P: 2.269397030848794e-54
204
+
205
+ Iteration: 10200 - Loss: 1.5292233228683472
206
+ Pearsonr: 0.6791221456257276; P: 2.3368612689757928e-63
207
+ Spearmanr: 0.638255854004383; P: 7.020276228531535e-54
208
+
209
+ Iteration: 10400 - Loss: 1.4918687343597412
210
+ Pearsonr: 0.6789584266908542; P: 2.5685025993016526e-63
211
+ Spearmanr: 0.6378274405259249; P: 8.672502626901967e-54
212
+
213
+ Iteration: 10600 - Loss: 1.5673266649246216
214
+ Pearsonr: 0.6726076017661807; P: 9.584335148305353e-62
215
+ Spearmanr: 0.6302953293844776; P: 3.376568218141803e-52
216
+
217
+ Iteration: 10800 - Loss: 1.5710985660552979
218
+ Pearsonr: 0.6732054851372804; P: 6.843137036839127e-62
219
+ Spearmanr: 0.6321384478694484; P: 1.3911711318475115e-52
220
+
221
+ Iteration: 11000 - Loss: 1.565316081047058
222
+ Pearsonr: 0.6794492351862846; P: 1.9343985492085727e-63
223
+ Spearmanr: 0.6376315709187856; P: 9.551294642100249e-54
224
+
225
+ Iteration: 11200 - Loss: 1.6766408681869507
226
+ Pearsonr: 0.6742901790513357; P: 3.7063107434104586e-62
227
+ Spearmanr: 0.6357503263485065; P: 2.405042033044568e-53
228
+
229
+ Iteration: 11400 - Loss: 1.498397707939148
230
+ Pearsonr: 0.6808975685038877; P: 8.351818097958045e-64
231
+ Spearmanr: 0.640862819771337; P: 1.925932244187251e-54
232
+
233
+ Iteration: 11600 - Loss: 1.442487359046936
234
+ Pearsonr: 0.6795766280238125; P: 1.7969925970135028e-63
235
+ Spearmanr: 0.6410959948692334; P: 1.714486655770838e-54
236
+
237
+ Iteration: 11800 - Loss: 1.6892503499984741
238
+ Pearsonr: 0.6753984556160304; P: 1.9754213061437024e-62
239
+ Spearmanr: 0.6401089458692852; P: 2.803073942635199e-54
240
+
241
+ Iteration: 12000 - Loss: 1.5381754636764526
242
+ Pearsonr: 0.6784983796326016; P: 3.3487176362749904e-63
243
+ Spearmanr: 0.6388817782790831; P: 5.1520951013619294e-54
244
+
245
+ Iteration: 12200 - Loss: 1.6090995073318481
246
+ Pearsonr: 0.6792403600433304; P: 2.182619170349716e-63
247
+ Spearmanr: 0.6419734629352486; P: 1.1058006230634839e-54
248
+
249
+ Iteration: 12400 - Loss: 1.5205144882202148
250
+ Pearsonr: 0.6861358428000858; P: 3.843357429201634e-65
251
+ Spearmanr: 0.646575735915118; P: 1.082573845258996e-55
252
+
253
+ Iteration: 12600 - Loss: 1.6619857549667358
254
+ Pearsonr: 0.676975412804351; P: 8.030454018463582e-63
255
+ Spearmanr: 0.6407291891047838; P: 2.058573312431572e-54
256
+
257
+ Iteration: 12800 - Loss: 1.5637744665145874
258
+ Pearsonr: 0.6807443189447862; P: 9.13012966631097e-64
259
+ Spearmanr: 0.6429810267989803; P: 6.671330893304869e-55
260
+
261
+ Iteration: 13000 - Loss: 1.529252052307129
262
+ Pearsonr: 0.6831154067704784; P: 2.2861648655983025e-64
263
+ Spearmanr: 0.6476491367109304; P: 6.259819872232118e-56
264
+
265
+ Iteration: 13200 - Loss: 1.6095237731933594
266
+ Pearsonr: 0.6807567719868253; P: 9.064281891679384e-64
267
+ Spearmanr: 0.6409391981872695; P: 1.8539664928785416e-54
268
+
269
+ Iteration: 13400 - Loss: 1.3736294507980347
270
+ Pearsonr: 0.6829489934674531; P: 2.520582231208335e-64
271
+ Spearmanr: 0.6449012180491192; P: 2.533202460308662e-55
272
+