charsiu commited on
Commit
06e764f
1 Parent(s): 5c124b2

Upload 3 files

Browse files
Files changed (3) hide show
  1. config.json +218 -0
  2. pytorch_model.bin +3 -0
  3. results.txt +272 -0
config.json ADDED
@@ -0,0 +1,218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "SpeechEncoderDecoderModel"
4
+ ],
5
+ "decoder": {
6
+ "_name_or_path": "/gpfs/accounts/lingjzhu_root/lingjzhu1/lingjzhu/embeddings/models/bert-hubert-50/checkpoint-32000",
7
+ "add_cross_attention": true,
8
+ "architectures": [
9
+ "BertForMaskedLM"
10
+ ],
11
+ "attention_probs_dropout_prob": 0.1,
12
+ "bad_words_ids": null,
13
+ "bos_token_id": null,
14
+ "chunk_size_feed_forward": 0,
15
+ "classifier_dropout": null,
16
+ "cross_attention_hidden_size": null,
17
+ "decoder_start_token_id": null,
18
+ "diversity_penalty": 0.0,
19
+ "do_sample": false,
20
+ "early_stopping": false,
21
+ "encoder_no_repeat_ngram_size": 0,
22
+ "eos_token_id": null,
23
+ "exponential_decay_length_penalty": null,
24
+ "finetuning_task": null,
25
+ "forced_bos_token_id": null,
26
+ "forced_eos_token_id": null,
27
+ "gradient_checkpointing": false,
28
+ "hidden_act": "gelu",
29
+ "hidden_dropout_prob": 0.1,
30
+ "hidden_size": 768,
31
+ "id2label": {
32
+ "0": "LABEL_0",
33
+ "1": "LABEL_1"
34
+ },
35
+ "initializer_range": 0.02,
36
+ "intermediate_size": 3072,
37
+ "is_decoder": true,
38
+ "is_encoder_decoder": false,
39
+ "label2id": {
40
+ "LABEL_0": 0,
41
+ "LABEL_1": 1
42
+ },
43
+ "layer_norm_eps": 1e-12,
44
+ "length_penalty": 1.0,
45
+ "max_length": 20,
46
+ "max_position_embeddings": 512,
47
+ "min_length": 0,
48
+ "model_type": "bert",
49
+ "no_repeat_ngram_size": 0,
50
+ "num_attention_heads": 12,
51
+ "num_beam_groups": 1,
52
+ "num_beams": 1,
53
+ "num_hidden_layers": 12,
54
+ "num_return_sequences": 1,
55
+ "output_attentions": false,
56
+ "output_hidden_states": false,
57
+ "output_scores": false,
58
+ "pad_token_id": 0,
59
+ "position_embedding_type": "absolute",
60
+ "prefix": null,
61
+ "problem_type": null,
62
+ "pruned_heads": {},
63
+ "remove_invalid_values": false,
64
+ "repetition_penalty": 1.0,
65
+ "return_dict": true,
66
+ "return_dict_in_generate": false,
67
+ "sep_token_id": null,
68
+ "task_specific_params": null,
69
+ "temperature": 1.0,
70
+ "tie_encoder_decoder": false,
71
+ "tie_word_embeddings": true,
72
+ "tokenizer_class": null,
73
+ "top_k": 50,
74
+ "top_p": 1.0,
75
+ "torch_dtype": "float32",
76
+ "torchscript": false,
77
+ "transformers_version": "4.18.0",
78
+ "type_vocab_size": 2,
79
+ "typical_p": 1.0,
80
+ "use_bfloat16": false,
81
+ "use_cache": true,
82
+ "vocab_size": 55
83
+ },
84
+ "decoder_start_token_id": 54,
85
+ "encoder": {
86
+ "_name_or_path": "facebook/hubert-base-ls960",
87
+ "activation_dropout": 0.1,
88
+ "add_cross_attention": false,
89
+ "apply_spec_augment": true,
90
+ "architectures": [
91
+ "HubertModel"
92
+ ],
93
+ "attention_dropout": 0.1,
94
+ "bad_words_ids": null,
95
+ "bos_token_id": 1,
96
+ "chunk_size_feed_forward": 0,
97
+ "classifier_proj_size": 256,
98
+ "conv_bias": false,
99
+ "conv_dim": [
100
+ 512,
101
+ 512,
102
+ 512,
103
+ 512,
104
+ 512,
105
+ 512,
106
+ 512
107
+ ],
108
+ "conv_kernel": [
109
+ 10,
110
+ 3,
111
+ 3,
112
+ 3,
113
+ 3,
114
+ 2,
115
+ 2
116
+ ],
117
+ "conv_stride": [
118
+ 5,
119
+ 2,
120
+ 2,
121
+ 2,
122
+ 2,
123
+ 2,
124
+ 2
125
+ ],
126
+ "cross_attention_hidden_size": null,
127
+ "ctc_loss_reduction": "sum",
128
+ "ctc_zero_infinity": false,
129
+ "decoder_start_token_id": null,
130
+ "diversity_penalty": 0.0,
131
+ "do_sample": false,
132
+ "do_stable_layer_norm": false,
133
+ "early_stopping": false,
134
+ "encoder_no_repeat_ngram_size": 0,
135
+ "eos_token_id": 2,
136
+ "exponential_decay_length_penalty": null,
137
+ "feat_extract_activation": "gelu",
138
+ "feat_extract_dropout": 0.0,
139
+ "feat_extract_norm": "group",
140
+ "feat_proj_dropout": 0.1,
141
+ "feat_proj_layer_norm": true,
142
+ "final_dropout": 0.1,
143
+ "finetuning_task": null,
144
+ "forced_bos_token_id": null,
145
+ "forced_eos_token_id": null,
146
+ "gradient_checkpointing": false,
147
+ "hidden_act": "gelu",
148
+ "hidden_dropout": 0.1,
149
+ "hidden_dropout_prob": 0.1,
150
+ "hidden_size": 768,
151
+ "id2label": {
152
+ "0": "LABEL_0",
153
+ "1": "LABEL_1"
154
+ },
155
+ "initializer_range": 0.02,
156
+ "intermediate_size": 3072,
157
+ "is_decoder": false,
158
+ "is_encoder_decoder": false,
159
+ "label2id": {
160
+ "LABEL_0": 0,
161
+ "LABEL_1": 1
162
+ },
163
+ "layer_norm_eps": 1e-05,
164
+ "layerdrop": 0.1,
165
+ "length_penalty": 1.0,
166
+ "mask_feature_length": 10,
167
+ "mask_feature_min_masks": 0,
168
+ "mask_feature_prob": 0.0,
169
+ "mask_time_length": 10,
170
+ "mask_time_min_masks": 2,
171
+ "mask_time_prob": 0.05,
172
+ "max_length": 20,
173
+ "min_length": 0,
174
+ "model_type": "hubert",
175
+ "no_repeat_ngram_size": 0,
176
+ "num_attention_heads": 12,
177
+ "num_beam_groups": 1,
178
+ "num_beams": 1,
179
+ "num_conv_pos_embedding_groups": 16,
180
+ "num_conv_pos_embeddings": 128,
181
+ "num_feat_extract_layers": 7,
182
+ "num_hidden_layers": 12,
183
+ "num_return_sequences": 1,
184
+ "output_attentions": false,
185
+ "output_hidden_states": false,
186
+ "output_scores": false,
187
+ "pad_token_id": 0,
188
+ "prefix": null,
189
+ "problem_type": null,
190
+ "pruned_heads": {},
191
+ "remove_invalid_values": false,
192
+ "repetition_penalty": 1.0,
193
+ "return_dict": true,
194
+ "return_dict_in_generate": false,
195
+ "sep_token_id": null,
196
+ "task_specific_params": null,
197
+ "temperature": 1.0,
198
+ "tie_encoder_decoder": false,
199
+ "tie_word_embeddings": true,
200
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
201
+ "top_k": 50,
202
+ "top_p": 1.0,
203
+ "torch_dtype": null,
204
+ "torchscript": false,
205
+ "transformers_version": "4.18.0",
206
+ "typical_p": 1.0,
207
+ "use_bfloat16": false,
208
+ "use_weighted_layer_sum": false,
209
+ "vocab_size": 32
210
+ },
211
+ "is_encoder_decoder": true,
212
+ "model_type": "speech-encoder-decoder",
213
+ "pad_token_id": 52,
214
+ "tie_word_embeddings": false,
215
+ "torch_dtype": "float32",
216
+ "transformers_version": null,
217
+ "vocab_size": 55
218
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1318763d5c9fa693cc015da4b668f4c35287c637a8eb9b29a69fc947c042827c
3
+ size 835486033
results.txt ADDED
@@ -0,0 +1,272 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Iteration: 0 - Loss: 6.8719563484191895
2
+ Pearsonr: 0.02822506823041044; P: 0.5463934512704677
3
+ Spearmanr: 0.026839185990723664; P: 0.5662751931974506
4
+
5
+ Iteration: 200 - Loss: 1.831741213798523
6
+ Pearsonr: -0.09370338596178601; P: 0.04480606366430184
7
+ Spearmanr: -0.10795557662763665; P: 0.02070456274632216
8
+
9
+ Iteration: 400 - Loss: 1.7502282857894897
10
+ Pearsonr: -0.12576684371984334; P: 0.006979152523430303
11
+ Spearmanr: -0.12308542388293962; P: 0.008293741780421948
12
+
13
+ Iteration: 600 - Loss: 1.7469736337661743
14
+ Pearsonr: -0.15532910254058274; P: 0.0008407192821918212
15
+ Spearmanr: -0.16105387587432093; P: 0.0005328146798399329
16
+
17
+ Iteration: 800 - Loss: 1.6827481985092163
18
+ Pearsonr: -0.17282118408172523; P: 0.000198824226570517
19
+ Spearmanr: -0.14909205768399567; P: 0.0013581386866252311
20
+
21
+ Iteration: 1000 - Loss: 1.7101093530654907
22
+ Pearsonr: -0.15197685404988917; P: 0.0010903640114218705
23
+ Spearmanr: -0.10739373384036281; P: 0.021379247605653462
24
+
25
+ Iteration: 1200 - Loss: 1.7587130069732666
26
+ Pearsonr: 0.28936411623208685; P: 2.646598271042884e-10
27
+ Spearmanr: 0.27571907490147973; P: 1.8803594259465297e-09
28
+
29
+ Iteration: 1400 - Loss: 1.7680922746658325
30
+ Pearsonr: 0.35739628841621623; P: 2.8239402618744422e-15
31
+ Spearmanr: 0.3613504518722948; P: 1.3254574534859212e-15
32
+
33
+ Iteration: 1600 - Loss: 1.7529939413070679
34
+ Pearsonr: 0.45522121831213513; P: 7.337820650468306e-25
35
+ Spearmanr: 0.45364953830870564; P: 1.1109282914536672e-24
36
+
37
+ Iteration: 1800 - Loss: 1.7283083200454712
38
+ Pearsonr: 0.44402094991803337; P: 1.345747194942606e-23
39
+ Spearmanr: 0.45461626181284365; P: 8.610085340093419e-25
40
+
41
+ Iteration: 2000 - Loss: 1.7766599655151367
42
+ Pearsonr: 0.4673103469903363; P: 2.808386302997859e-26
43
+ Spearmanr: 0.47400817351419716; P: 4.3518934504043215e-27
44
+
45
+ Iteration: 2200 - Loss: 1.6536964178085327
46
+ Pearsonr: 0.4620748800536652; P: 1.1724868112658767e-25
47
+ Spearmanr: 0.4673898259669975; P: 2.7475908846015254e-26
48
+
49
+ Iteration: 2400 - Loss: 1.6404857635498047
50
+ Pearsonr: 0.4633044472374313; P: 8.40052965277035e-26
51
+ Spearmanr: 0.46868832216050865; P: 1.9200553428604305e-26
52
+
53
+ Iteration: 2600 - Loss: 1.6549359560012817
54
+ Pearsonr: 0.5011212318698759; P: 1.4855573182320391e-30
55
+ Spearmanr: 0.5031797417104643; P: 7.866956307638243e-31
56
+
57
+ Iteration: 2800 - Loss: 1.6123921871185303
58
+ Pearsonr: 0.5008548338656781; P: 1.6124312887558582e-30
59
+ Spearmanr: 0.5050961456011356; P: 4.3360163687960505e-31
60
+
61
+ Iteration: 3000 - Loss: 1.7403918504714966
62
+ Pearsonr: 0.5285856531842982; P: 2.1329234513567175e-34
63
+ Spearmanr: 0.5307277320255737; P: 1.0333825438755218e-34
64
+
65
+ Iteration: 3200 - Loss: 1.6316196918487549
66
+ Pearsonr: 0.5367563537401875; P: 1.3072854953179037e-35
67
+ Spearmanr: 0.54106394099427; P: 2.908259698683309e-36
68
+
69
+ Iteration: 3400 - Loss: 1.6573631763458252
70
+ Pearsonr: 0.5446861095889993; P: 8.080139921529646e-37
71
+ Spearmanr: 0.5491812622961394; P: 1.613142927251775e-37
72
+
73
+ Iteration: 3600 - Loss: 1.5965261459350586
74
+ Pearsonr: 0.5668971666587147; P: 2.2083444300492596e-40
75
+ Spearmanr: 0.5645931641781715; P: 5.324747818941762e-40
76
+
77
+ Iteration: 3800 - Loss: 1.6543303728103638
78
+ Pearsonr: 0.5553883652436633; P: 1.6743487701529246e-38
79
+ Spearmanr: 0.5573307761534844; P: 8.160386756301154e-39
80
+
81
+ Iteration: 4000 - Loss: 1.6208667755126953
82
+ Pearsonr: 0.5648862621584205; P: 4.762563074791771e-40
83
+ Spearmanr: 0.5630012107485691; P: 9.742181119097455e-40
84
+
85
+ Iteration: 4200 - Loss: 1.589337706565857
86
+ Pearsonr: 0.5735581885353966; P: 1.6670124993440217e-41
87
+ Spearmanr: 0.573256117734025; P: 1.876644869536107e-41
88
+
89
+ Iteration: 4400 - Loss: 1.5426558256149292
90
+ Pearsonr: 0.5656500278289712; P: 3.559088224298744e-40
91
+ Spearmanr: 0.5651935868991221; P: 4.236218247606656e-40
92
+
93
+ Iteration: 4600 - Loss: 1.5967150926589966
94
+ Pearsonr: 0.5805063592538471; P: 1.0563006313323093e-42
95
+ Spearmanr: 0.5796340310776816; P: 1.4989643627610055e-42
96
+
97
+ Iteration: 4800 - Loss: 1.6179089546203613
98
+ Pearsonr: 0.5738909908009685; P: 1.4628514856533864e-41
99
+ Spearmanr: 0.572972128896603; P: 2.0974777532746343e-41
100
+
101
+ Iteration: 5000 - Loss: 1.6256439685821533
102
+ Pearsonr: 0.5781692276548196; P: 2.6915840539003812e-42
103
+ Spearmanr: 0.5772358119400575; P: 3.902422855798749e-42
104
+
105
+ Iteration: 5200 - Loss: 1.5391310453414917
106
+ Pearsonr: 0.5816199450623889; P: 6.746493735758462e-43
107
+ Spearmanr: 0.5804478083819812; P: 1.0814449078904429e-42
108
+
109
+ Iteration: 5400 - Loss: 1.6562271118164062
110
+ Pearsonr: 0.5827991794835897; P: 4.1886265624163026e-43
111
+ Spearmanr: 0.5789342532933607; P: 1.9833432069495964e-42
112
+
113
+ Iteration: 5600 - Loss: 1.6008801460266113
114
+ Pearsonr: 0.5832895002519203; P: 3.4336054487220046e-43
115
+ Spearmanr: 0.5790409937076929; P: 1.9004988305762952e-42
116
+
117
+ Iteration: 5800 - Loss: 1.5070966482162476
118
+ Pearsonr: 0.5928269792902043; P: 6.716042014277357e-45
119
+ Spearmanr: 0.5874265863051931; P: 6.332060036437849e-44
120
+
121
+ Iteration: 6000 - Loss: 1.472196102142334
122
+ Pearsonr: 0.5951134354121741; P: 2.5643921587041375e-45
123
+ Spearmanr: 0.5872130423538708; P: 6.913548987836518e-44
124
+
125
+ Iteration: 6200 - Loss: 1.5007520914077759
126
+ Pearsonr: 0.5987526718275521; P: 5.451553645529932e-46
127
+ Spearmanr: 0.5871522552344138; P: 7.088548382398432e-44
128
+
129
+ Iteration: 6400 - Loss: 1.5846940279006958
130
+ Pearsonr: 0.5999422639000738; P: 3.2721563231899757e-46
131
+ Spearmanr: 0.5907427351324793; P: 1.6045876173576617e-44
132
+
133
+ Iteration: 6600 - Loss: 1.527746558189392
134
+ Pearsonr: 0.6033485008559338; P: 7.497213669470443e-47
135
+ Spearmanr: 0.5898760610409471; P: 2.3005956640995875e-44
136
+
137
+ Iteration: 6800 - Loss: 1.5243699550628662
138
+ Pearsonr: 0.6071802224877151; P: 1.398804600876004e-47
139
+ Spearmanr: 0.5930837651421408; P: 6.030050544468702e-45
140
+
141
+ Iteration: 7000 - Loss: 1.4590983390808105
142
+ Pearsonr: 0.6018006303294685; P: 1.4677563637101072e-46
143
+ Spearmanr: 0.5830826112410227; P: 3.7341450217285706e-43
144
+
145
+ Iteration: 7200 - Loss: 1.5712698698043823
146
+ Pearsonr: 0.6062190174244831; P: 2.1359814851275776e-47
147
+ Spearmanr: 0.5858399983015034; P: 1.2143998195906845e-43
148
+
149
+ Iteration: 7400 - Loss: 1.5214884281158447
150
+ Pearsonr: 0.6099004023741128; P: 4.188689746813027e-48
151
+ Spearmanr: 0.5920205902168914; P: 9.414211254324742e-45
152
+
153
+ Iteration: 7600 - Loss: 1.5258961915969849
154
+ Pearsonr: 0.6055007133395706; P: 2.9280054732260365e-47
155
+ Spearmanr: 0.5842234270353607; P: 2.349233529314388e-43
156
+
157
+ Iteration: 7800 - Loss: 1.5699869394302368
158
+ Pearsonr: 0.6093231622997569; P: 5.4153979089710764e-48
159
+ Spearmanr: 0.58477865393333; P: 1.8736301995904306e-43
160
+
161
+ Iteration: 8000 - Loss: 1.5184214115142822
162
+ Pearsonr: 0.605060782470973; P: 3.5505110155659953e-47
163
+ Spearmanr: 0.5789763561061104; P: 1.950246861072138e-42
164
+
165
+ Iteration: 8200 - Loss: 1.5991960763931274
166
+ Pearsonr: 0.6112574276498028; P: 2.2852097041251617e-48
167
+ Spearmanr: 0.5890162041964571; P: 3.2856410965658694e-44
168
+
169
+ Iteration: 8400 - Loss: 1.5256214141845703
170
+ Pearsonr: 0.6166074012573319; P: 2.0367621603375855e-49
171
+ Spearmanr: 0.5915340407706037; P: 1.1536676024411538e-44
172
+
173
+ Iteration: 8600 - Loss: 1.5412988662719727
174
+ Pearsonr: 0.617253839029917; P: 1.5160061382263392e-49
175
+ Spearmanr: 0.591598110268266; P: 1.1232132807652539e-44
176
+
177
+ Iteration: 8800 - Loss: 1.490647315979004
178
+ Pearsonr: 0.626078265183986; P: 2.5111495751614948e-51
179
+ Spearmanr: 0.6011685782752453; P: 1.9289908188639965e-46
180
+
181
+ Iteration: 9000 - Loss: 1.5037692785263062
182
+ Pearsonr: 0.6156840948372987; P: 3.1016210256479906e-49
183
+ Spearmanr: 0.5897977889452809; P: 2.376561004841454e-44
184
+
185
+ Iteration: 9200 - Loss: 1.4089856147766113
186
+ Pearsonr: 0.6146929210125671; P: 4.863962848935974e-49
187
+ Spearmanr: 0.5894476475624891; P: 2.74799184682832e-44
188
+
189
+ Iteration: 9400 - Loss: 1.5190479755401611
190
+ Pearsonr: 0.6279227335074822; P: 1.0480660773238495e-51
191
+ Spearmanr: 0.6001644861576926; P: 2.9738484904043734e-46
192
+
193
+ Iteration: 9600 - Loss: 1.5042378902435303
194
+ Pearsonr: 0.6321982775029772; P: 1.3515629592872948e-52
195
+ Spearmanr: 0.6067599201801124; P: 1.6835320827161711e-47
196
+
197
+ Iteration: 9800 - Loss: 1.4317461252212524
198
+ Pearsonr: 0.6295689616107754; P: 4.7810976831407934e-52
199
+ Spearmanr: 0.6009738448759464; P: 2.098169975631767e-46
200
+
201
+ Iteration: 10000 - Loss: 1.453094720840454
202
+ Pearsonr: 0.624768922206764; P: 4.652826148836808e-51
203
+ Spearmanr: 0.5968096431411777; P: 1.2491797183492255e-45
204
+
205
+ Iteration: 10200 - Loss: 1.4644638299942017
206
+ Pearsonr: 0.623839142537878; P: 7.196875072059396e-51
207
+ Spearmanr: 0.5975294939306774; P: 9.193912329551836e-46
208
+
209
+ Iteration: 10400 - Loss: 1.455410361289978
210
+ Pearsonr: 0.6235544830058308; P: 8.222633364556974e-51
211
+ Spearmanr: 0.6023223342144315; P: 1.1708398854462395e-46
212
+
213
+ Iteration: 10600 - Loss: 1.4695369005203247
214
+ Pearsonr: 0.6348209416475994; P: 3.7864436860874804e-53
215
+ Spearmanr: 0.6116882848101534; P: 1.884118101905883e-48
216
+
217
+ Iteration: 10800 - Loss: 1.5022165775299072
218
+ Pearsonr: 0.6281541414853896; P: 9.388538547537936e-52
219
+ Spearmanr: 0.6028106510951374; P: 9.472370858441417e-47
220
+
221
+ Iteration: 11000 - Loss: 1.4612548351287842
222
+ Pearsonr: 0.6229978607204307; P: 1.0665748630456236e-50
223
+ Spearmanr: 0.5995646946633196; P: 3.848544141558783e-46
224
+
225
+ Iteration: 11200 - Loss: 1.5569599866867065
226
+ Pearsonr: 0.6314183844965858; P: 1.968511548311075e-52
227
+ Spearmanr: 0.6067585946042987; P: 1.6845153805583686e-47
228
+
229
+ Iteration: 11400 - Loss: 1.3613736629486084
230
+ Pearsonr: 0.6408258017284847; P: 1.961802436251644e-54
231
+ Spearmanr: 0.6155540426188649; P: 3.2905389911833565e-49
232
+
233
+ Iteration: 11600 - Loss: 1.5064013004302979
234
+ Pearsonr: 0.6410379265662344; P: 1.764883697691992e-54
235
+ Spearmanr: 0.6150733635797523; P: 4.093279039521056e-49
236
+
237
+ Iteration: 11800 - Loss: 1.4196375608444214
238
+ Pearsonr: 0.6364924352181984; P: 1.672038217225987e-53
239
+ Spearmanr: 0.6118865530782891; P: 1.7238288213773374e-48
240
+
241
+ Iteration: 12000 - Loss: 1.53407883644104
242
+ Pearsonr: 0.636618748374165; P: 1.5715607736816904e-53
243
+ Spearmanr: 0.6104506388587572; P: 3.2774093494448963e-48
244
+
245
+ Iteration: 12200 - Loss: 1.4950836896896362
246
+ Pearsonr: 0.644366421928408; P: 3.3197046474191645e-55
247
+ Spearmanr: 0.6152795853027703; P: 3.7275052987909404e-49
248
+
249
+ Iteration: 12400 - Loss: 1.3674345016479492
250
+ Pearsonr: 0.6473769861777058; P: 7.193973392935627e-56
251
+ Spearmanr: 0.6210564446988981; P: 2.6319194480249493e-50
252
+
253
+ Iteration: 12600 - Loss: 1.4719256162643433
254
+ Pearsonr: 0.6463918659102053; P: 1.1888092558118035e-55
255
+ Spearmanr: 0.621427542804078; P: 2.2156523070358257e-50
256
+
257
+ Iteration: 12800 - Loss: 1.4978338479995728
258
+ Pearsonr: 0.6400583143580534; P: 2.874518683085562e-54
259
+ Spearmanr: 0.6165698755507679; P: 2.0719318443916184e-49
260
+
261
+ Iteration: 13000 - Loss: 1.4537323713302612
262
+ Pearsonr: 0.6433952124432938; P: 5.416969365141264e-55
263
+ Spearmanr: 0.6199290109080128; P: 4.434252845348923e-50
264
+
265
+ Iteration: 13200 - Loss: 1.4218829870224
266
+ Pearsonr: 0.6467281503382728; P: 1.0016975471460382e-55
267
+ Spearmanr: 0.6260784833531076; P: 2.510890919552011e-51
268
+
269
+ Iteration: 13400 - Loss: 1.4264096021652222
270
+ Pearsonr: 0.6471279084637483; P: 8.169592967434213e-56
271
+ Spearmanr: 0.6246663032529068; P: 4.882644880048028e-51
272
+