mmazuecos commited on
Commit
e769683
1 Parent(s): e71918d

Updated model with more data.

Browse files
2_Dense/pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cb8b726320c19db73fe1b10f1e8fd9476783234dc72483d7aa971bc328069ff4
3
  size 1575975
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:318abfeb7ac3562dae47bd5126150009554f49c4704ea18ecc8903dfd970d857
3
  size 1575975
README.md CHANGED
@@ -48,14 +48,14 @@ Our model was evaluated on the task of Semantic Textual Similarity using the [Se
48
 
49
  | | [BETO STS](https://huggingface.co/espejelomar/sentece-embeddings-BETO) | BERTIN STS (this model) | Relative improvement |
50
  |-------------------:|---------:|-----------:|---------------------:|
51
- | cosine_pearson | 0.609803 | 0.670862 | +10.01 |
52
- | cosine_spearman | 0.528776 | 0.598593 | +13.20 |
53
- | euclidean_pearson | 0.590613 | 0.675257 | +14.33 |
54
- | euclidean_spearman | 0.526529 | 0.604656 | +14.84 |
55
- | manhattan_pearson | 0.589108 | 0.676706 | +14.87 |
56
- | manhattan_spearman | 0.525910 | 0.606461 | +15.32 |
57
- | dot_pearson | 0.544078 | 0.586429 | +7.78 |
58
- | dot_spearman | 0.460427 | 0.495614 | +7.64 |
59
 
60
 
61
  ## Training
 
48
 
49
  | | [BETO STS](https://huggingface.co/espejelomar/sentece-embeddings-BETO) | BERTIN STS (this model) | Relative improvement |
50
  |-------------------:|---------:|-----------:|---------------------:|
51
+ | cosine_pearson | 0.609803 | 0.669326 | +9.76 |
52
+ | cosine_spearman | 0.528776 | 0.596159 | +12.74 |
53
+ | euclidean_pearson | 0.590613 | 0.665561 | +12.69 |
54
+ | euclidean_spearman | 0.526529 | 0.600940 | +14.13 |
55
+ | manhattan_pearson | 0.589108 | 0.665463 | +12.96 |
56
+ | manhattan_spearman | 0.525910 | 0.600947 | +14.27 |
57
+ | dot_pearson | 0.544078 | 0.600923 | +10.45 |
58
+ | dot_spearman | 0.460427 | 0.517005 | +12.29 |
59
 
60
 
61
  ## Training
eval/similarity_evaluation_sts-test_results.csv CHANGED
@@ -1,21 +1,21 @@
1
  epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
- 0,-1,0.6581493026240269,0.5802356493272345,0.6634103225436967,0.5887142267642859,0.6633780369919056,0.5889272703378547,0.571143299245293,0.4732063731208094
3
- 1,-1,0.6708623071971952,0.5985927642452136,0.675257124555732,0.6046557636211276,0.6767056382416057,0.6064614282847686,0.5864286716770518,0.495613872629927
4
- 2,-1,0.6702651671065515,0.5922353474286076,0.6672984469521261,0.594962259717964,0.6673488770673561,0.5945960133985778,0.6020281121442566,0.5095236479097289
5
- 3,-1,0.6605032101558846,0.5890776181094547,0.6641603284425521,0.5993232115293229,0.6645610706259933,0.5984927833321959,0.5913711078698719,0.5082729703485656
6
- 4,-1,0.6530779759890578,0.5781118041787935,0.6547349577996249,0.5874599982401654,0.6543895215674207,0.5869029941907284,0.581310838565905,0.497299912453472
7
- 5,-1,0.6543208566097818,0.58194344945178,0.6491366001755403,0.5864144953186581,0.6498888685831916,0.5868199442690898,0.5910026676646215,0.5151166986440603
8
- 6,-1,0.6472013765440673,0.5732578792820385,0.6433653543280587,0.5798672298185186,0.6435570196620376,0.5787832344585572,0.5864683897395871,0.5061222515124659
9
- 7,-1,0.6400980616286338,0.5667287521006833,0.63656772778559,0.5743034106112495,0.6370456545413393,0.5730667948633957,0.5747946450752401,0.4979748652884891
10
- 8,-1,0.6452381886531463,0.5729618568060901,0.6424560387096414,0.5811990823928792,0.6429919873656215,0.5808723511892786,0.58104040571895,0.5054740303265891
11
- 9,-1,0.6384071016780053,0.5625492118848722,0.635541246663347,0.572271947315098,0.6356269918508849,0.5711684216588732,0.5787246865749316,0.5019918424201281
12
- 10,-1,0.6419031233362158,0.569618113843397,0.6353794935936075,0.5752729939447357,0.6357950326588047,0.5748007016676012,0.5828757686864627,0.5121824811372735
13
- 11,-1,0.6423540880042571,0.568767118468823,0.6349101892245405,0.5744865976882757,0.6356595326561394,0.5750529478722156,0.5862014516783208,0.5123514217509163
14
- 12,-1,0.6402987765788005,0.563953589326667,0.6355130494886717,0.5722567776013456,0.6357087618283007,0.572723715026341,0.581950349472401,0.5068270750478774
15
- 13,-1,0.6349740259317077,0.5605840947806097,0.6310765115255937,0.5677235330673125,0.6313588325370229,0.5675908264796831,0.5761493116509655,0.5012367514530972
16
- 14,-1,0.6358190709395628,0.5624146872042087,0.6327586555267605,0.5708724133867766,0.6331569512601531,0.5702529265931481,0.5781642308453095,0.504501876095922
17
- 15,-1,0.6311127603260229,0.5584118997205461,0.6288133015450812,0.5664748582492568,0.628861427115307,0.5660526913652236,0.5729793164702304,0.5013857356549215
18
- 16,-1,0.6333152268675507,0.5610230364136519,0.6305527453515162,0.5691509491567724,0.6304938356795253,0.567996417468633,0.5755055543932878,0.5033426713405311
19
- 17,-1,0.6353869036678222,0.5630840974232757,0.6323343041403464,0.5707859119291437,0.6322656960665566,0.5696195382630574,0.5764210881100128,0.5045764889295747
20
- 18,-1,0.6352029170986021,0.5627512048617189,0.6315279989234651,0.5694638031971371,0.6315471951413272,0.5690261399107593,0.5761747454613159,0.5035466102450782
21
- 19,-1,0.6355421845320552,0.5627031532309846,0.631491538163409,0.569449769791531,0.631682972028249,0.5686912699000612,0.5771564421958566,0.5049254207535826
 
1
  epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ 0,-1,0.6619363474232212,0.5885900850993088,0.6601369870108922,0.5898887432719473,0.6588104710011553,0.5895314311753609,0.5830982449199276,0.4937547304641535
3
+ 1,-1,0.6693262164948957,0.5961588063998108,0.6655605051331788,0.6009401780214281,0.6654625082134044,0.60094695325875,0.6009226777841519,0.5170048024637112
4
+ 2,-1,0.6547420445754193,0.5823884531525437,0.6511385841619973,0.5855875470652291,0.6522056613011148,0.5865922991358415,0.5843887822582672,0.5008089314360363
5
+ 3,-1,0.6552498219421751,0.584669481102323,0.64666647770094,0.5882148051301598,0.6481504965342848,0.5881976468771515,0.5997006203644883,0.519217293851411
6
+ 4,-1,0.6528668557506568,0.582745952876213,0.6410731501019628,0.5856126736791128,0.6424055098070853,0.5859782098059078,0.5916670098025287,0.5158791756140418
7
+ 5,-1,0.6483945219301912,0.5788003785077136,0.6343322715136153,0.5786700439633394,0.6364103314423305,0.579525783424424,0.5912828063040729,0.5199335798952477
8
+ 6,-1,0.6423989989672444,0.5699500862683221,0.6313857693866886,0.5746688473110814,0.6328955724455424,0.5744953472610018,0.5803600675604295,0.5036125587291159
9
+ 7,-1,0.6462629722681043,0.5770818260673343,0.6318230435253588,0.5775325284896901,0.6325422525209058,0.5764058505549855,0.586762886345868,0.5173493168898005
10
+ 8,-1,0.639790660325868,0.5676685783645897,0.6294617784838941,0.5698867228853173,0.6299734551587954,0.5695742381451001,0.5880059591595673,0.5146391367378975
11
+ 9,-1,0.6450089783532716,0.5758663314471489,0.6333562814425514,0.5766438502962163,0.6340741475326621,0.575110984810785,0.5879731498917842,0.5192021383415104
12
+ 10,-1,0.6434909937737626,0.5713701447625351,0.6301188859529719,0.5709692410446885,0.6309719329436375,0.5701395230401529,0.593913567963774,0.522557073939444
13
+ 11,-1,0.641203878405462,0.5722014251907718,0.6284168875038928,0.5737909498411451,0.6295168797303964,0.5728132601653629,0.5893572348665002,0.5218607585112776
14
+ 12,-1,0.6405665479784053,0.5712144563426479,0.6258392075727873,0.5693129298830195,0.6262440363392721,0.5679223727890534,0.593952756495054,0.5268886237775188
15
+ 13,-1,0.6390052346365416,0.5686395678794071,0.6258537618625887,0.5685859625426081,0.6265438374367317,0.5677389726542497,0.591956305872708,0.5218520657539587
16
+ 14,-1,0.6401240726804178,0.5711650411421381,0.6278602450688386,0.5727693520022645,0.628050553113738,0.5709335183573409,0.5937276661244524,0.5234451981826964
17
+ 15,-1,0.6398403358896347,0.5692425497972115,0.6246306232307527,0.5691193313826032,0.6255512511477327,0.5683736149577787,0.5940274286246308,0.5237160798409092
18
+ 16,-1,0.640328214937794,0.5708227567207858,0.6255762617684392,0.5716483840159948,0.6265171469104598,0.569976860529018,0.5945084491171609,0.5247411860311914
19
+ 17,-1,0.6404406282410006,0.5712850352823815,0.6251102831494417,0.5715652062596898,0.6257154822798084,0.5695532590559501,0.5939059512747178,0.525733381896788
20
+ 18,-1,0.64141106615211,0.572578991980065,0.6261621835757434,0.5725016418579003,0.6268679024101312,0.5700271563683411,0.5965506884620715,0.5278869557071051
21
+ 19,-1,0.6406208759751268,0.5720221725807018,0.625890984121176,0.5726780638465656,0.6263716719579958,0.5694595420415887,0.5959571243848296,0.5275360779553968
loss_digest.json CHANGED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:94bf68a63b5b838390ca25847d46db5693bd6ba6aa72f218f6fda267787eed75
3
  size 498664817
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa94d27f78e0ea7d33cdcb67c9b9cf8959fa314dace803031713b5c976f761e2
3
  size 498664817