nishantyadav commited on
Commit
029f4e0
·
verified ·
1 Parent(s): d4a09a2

Upload 8 files

Browse files

Adding model files

CERerankingEvaluator_train-eval_results.csv ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,MRR@10
2
+ 0,5000,0.5183234126984128
3
+ 0,10000,0.5799543650793652
4
+ 0,15000,0.6082440476190476
5
+ 0,20000,0.6290059523809524
6
+ 0,25000,0.6228075396825397
7
+ 0,30000,0.6305396825396825
8
+ 0,35000,0.6330515873015873
9
+ 0,40000,0.6122103174603174
10
+ 0,45000,0.6444940476190476
11
+ 0,50000,0.6226547619047619
12
+ 0,55000,0.6403928571428571
13
+ 0,60000,0.6510257936507935
14
+ 0,65000,0.6380873015873015
15
+ 0,70000,0.6560019841269842
16
+ 0,75000,0.6571031746031746
17
+ 0,80000,0.6577023809523809
18
+ 0,85000,0.6527936507936508
19
+ 0,90000,0.6529563492063493
20
+ 0,95000,0.6611071428571428
21
+ 0,100000,0.6481765873015874
22
+ 0,105000,0.656109126984127
23
+ 0,110000,0.6446587301587301
24
+ 0,115000,0.6371904761904762
25
+ 0,120000,0.6460734126984127
26
+ 0,125000,0.650767857142857
27
+ 0,130000,0.6511230158730159
28
+ 0,135000,0.6501964285714286
29
+ 0,140000,0.6726210317460317
30
+ 0,145000,0.6524603174603175
31
+ 0,150000,0.6537400793650795
32
+ 0,155000,0.6457003968253968
33
+ 0,160000,0.6591964285714286
34
+ 0,165000,0.6496210317460317
35
+ 0,170000,0.6524126984126984
36
+ 0,175000,0.6579742063492062
37
+ 0,180000,0.6596984126984127
38
+ 0,185000,0.6553234126984127
39
+ 0,190000,0.6594742063492063
40
+ 0,195000,0.6550039682539682
41
+ 0,200000,0.6506230158730159
42
+ 0,205000,0.6645734126984126
43
+ 0,210000,0.6649345238095238
44
+ 0,215000,0.6621289682539683
45
+ 0,220000,0.6696845238095238
46
+ 0,225000,0.6620039682539682
47
+ 0,230000,0.6730813492063491
48
+ 0,235000,0.668345238095238
49
+ 0,240000,0.6539821428571428
50
+ 0,245000,0.657220238095238
51
+ 0,250000,0.6646626984126983
52
+ 0,255000,0.6767519841269841
53
+ 0,260000,0.6714920634920635
54
+ 0,265000,0.6630376984126986
55
+ 0,270000,0.6647400793650795
56
+ 0,275000,0.678781746031746
57
+ 0,280000,0.684281746031746
58
+ 0,285000,0.6695198412698411
59
+ 0,290000,0.6745059523809525
60
+ 0,295000,0.681672619047619
61
+ 0,300000,0.6795575396825396
62
+ 0,305000,0.6796071428571429
63
+ 0,310000,0.6707123015873014
64
+ 0,315000,0.6772400793650795
65
+ 0,320000,0.6806587301587301
66
+ 0,325000,0.6839007936507936
67
+ 0,330000,0.6798988095238095
68
+ 0,335000,0.6642619047619047
69
+ 0,340000,0.6730873015873016
70
+ 0,345000,0.6710039682539682
71
+ 0,350000,0.676795634920635
72
+ 0,355000,0.6763809523809524
73
+ 0,360000,0.6798095238095238
74
+ 0,365000,0.6697738095238094
75
+ 0,370000,0.6712043650793652
76
+ 0,375000,0.680109126984127
77
+ 0,380000,0.6659484126984127
78
+ 0,385000,0.6895178571428571
79
+ 0,390000,0.6921150793650793
80
+ 0,395000,0.6748551587301587
81
+ 0,400000,0.670718253968254
82
+ 0,405000,0.681704365079365
83
+ 0,410000,0.6762242063492063
84
+ 0,415000,0.6791904761904761
85
+ 0,420000,0.6817460317460317
86
+ 0,425000,0.6717281746031746
87
+ 0,430000,0.6741924603174603
88
+ 0,435000,0.6763591269841269
89
+ 0,440000,0.6856309523809523
90
+ 0,445000,0.6904821428571429
91
+ 0,450000,0.6825476190476191
92
+ 0,455000,0.6845198412698413
93
+ 0,460000,0.6934642857142856
94
+ 0,465000,0.6892142857142858
95
+ 0,470000,0.6820178571428572
96
+ 0,475000,0.6887202380952381
97
+ 0,480000,0.6841011904761904
98
+ 0,485000,0.6916507936507936
99
+ 0,490000,0.6884345238095239
100
+ 0,495000,0.6874761904761906
101
+ 0,500000,0.6879285714285714
102
+ 0,505000,0.6888988095238097
103
+ 0,510000,0.6796567460317461
104
+ 0,515000,0.6878869047619048
105
+ 0,520000,0.6902142857142857
106
+ 0,525000,0.6817500000000001
107
+ 0,530000,0.68975
108
+ 0,535000,0.6792281746031746
109
+ 0,540000,0.6862757936507936
110
+ 0,545000,0.6746448412698414
111
+ 0,550000,0.6721865079365079
112
+ 0,555000,0.6777400793650793
113
+ 0,560000,0.6777400793650794
114
+ 0,565000,0.6737638888888888
115
+ 0,570000,0.6779781746031746
116
+ 0,575000,0.6773055555555555
117
+ 0,580000,0.6781369047619047
118
+ 0,585000,0.6773869047619047
119
+ 0,590000,0.6820615079365081
120
+ 0,595000,0.6744305555555556
121
+ 0,600000,0.6759166666666667
122
+ 0,605000,0.6719166666666666
123
+ 0,610000,0.6744166666666667
124
+ 0,615000,0.6707499999999998
125
+ 0,620000,0.6800198412698413
126
+ 0,625000,0.6825972222222222
127
+ 0,630000,0.6748253968253968
128
+ 0,635000,0.6678452380952382
129
+ 0,640000,0.6701309523809523
130
+ 0,645000,0.667686507936508
131
+ 0,650000,0.6716309523809524
132
+ 0,655000,0.6596666666666667
133
+ 0,660000,0.6740476190476191
134
+ 0,665000,0.6738611111111112
135
+ 0,670000,0.6657698412698413
136
+ 0,675000,0.6664166666666667
137
+ 0,680000,0.671236111111111
138
+ 0,685000,0.663547619047619
139
+ 0,690000,0.6676587301587301
140
+ 0,695000,0.6707698412698412
141
+ 0,700000,0.6676309523809525
142
+ 0,705000,0.6710753968253969
143
+ 0,710000,0.6688373015873016
144
+ 0,715000,0.6640138888888888
145
+ 0,720000,0.6690138888888889
146
+ 0,725000,0.6631805555555556
147
+ 0,730000,0.6620416666666666
148
+ 0,735000,0.6669583333333332
149
+ 0,740000,0.6670416666666666
150
+ 0,745000,0.6692142857142858
151
+ 0,750000,0.6706031746031746
152
+ 0,755000,0.6723055555555555
153
+ 0,760000,0.6715059523809523
154
+ 0,765000,0.664875
155
+ 0,770000,0.6722083333333333
156
+ 0,775000,0.6728809523809524
157
+ 0,780000,0.6726428571428571
158
+ 0,785000,0.6716369047619049
159
+ 0,790000,0.6715952380952381
160
+ 0,795000,0.6730119047619049
161
+ 0,800000,0.673345238095238
162
+ 0,805000,0.6726785714285713
163
+ 0,810000,0.6720119047619048
164
+ 0,815000,0.6722499999999999
165
+ 0,820000,0.6718452380952381
166
+ 0,825000,0.6719761904761904
167
+ 0,830000,0.672095238095238
168
+ 0,-1,0.6722142857142858
config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "bert-large-uncased-whole-word-masking",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 1024,
11
+ "id2label": {
12
+ "0": "LABEL_0"
13
+ },
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 4096,
16
+ "label2id": {
17
+ "LABEL_0": 0
18
+ },
19
+ "layer_norm_eps": 1e-12,
20
+ "max_position_embeddings": 512,
21
+ "model_type": "bert",
22
+ "num_attention_heads": 16,
23
+ "num_hidden_layers": 24,
24
+ "output_hidden_states": true,
25
+ "pad_token_id": 0,
26
+ "position_embedding_type": "absolute",
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.26.1",
29
+ "type_vocab_size": 2,
30
+ "use_cache": true,
31
+ "vocab_size": 30522
32
+ }
orig_param_for_run.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_model_name": "bert-large-uncased-whole-word-masking",
3
+ "use_embed_ce_model": 1,
4
+ "res_dir": "../../results/9_BEIR_CrossEnc/basemodel=bert_large_wwm_ce_pool=embed_loss=bce_from_scratch_seed=0",
5
+ "evaluation_steps": 5000,
6
+ "train_batch_size": 24,
7
+ "lr": 1e-06,
8
+ "disable_wandb": 0,
9
+ "seed": 0
10
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56100b89c6f749f8a83f94732a584476dc35a2e1674f6cdf16928ba495a0cc1d
3
+ size 1340707629
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "do_lower_case": true,
4
+ "mask_token": "[MASK]",
5
+ "model_max_length": 512,
6
+ "name_or_path": "bert-large-uncased-whole-word-masking",
7
+ "pad_token": "[PAD]",
8
+ "sep_token": "[SEP]",
9
+ "special_tokens_map_file": null,
10
+ "strip_accents": null,
11
+ "tokenize_chinese_chars": true,
12
+ "tokenizer_class": "BertTokenizer",
13
+ "unk_token": "[UNK]"
14
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff