tceron commited on
Commit
366130a
1 Parent(s): 59f523c
.gitattributes CHANGED
@@ -30,3 +30,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
30
  *.zip filter=lfs diff=lfs merge=lfs -text
31
  *.zst filter=lfs diff=lfs merge=lfs -text
32
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
30
  *.zip filter=lfs diff=lfs merge=lfs -text
31
  *.zst filter=lfs diff=lfs merge=lfs -text
32
  *tfevents* filter=lfs diff=lfs merge=lfs -text
33
+ *.json filter=lfs diff=lfs merge=lfs -text
1_Pooling/config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a37f83ada23e7887be6b88f4998927dbeac0038af301553c7cd5461413bf1a56
3
+ size 190
binary_classification_evaluation_evaluation_results.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ epoch,steps,cossim_accuracy,cossim_accuracy_threshold,cossim_f1,cossim_precision,cossim_recall,cossim_f1_threshold,cossim_ap,manhatten_accuracy,manhatten_accuracy_threshold,manhatten_f1,manhatten_precision,manhatten_recall,manhatten_f1_threshold,manhatten_ap,euclidean_accuracy,euclidean_accuracy_threshold,euclidean_f1,euclidean_precision,euclidean_recall,euclidean_f1_threshold,euclidean_ap,dot_accuracy,dot_accuracy_threshold,dot_f1,dot_precision,dot_recall,dot_f1_threshold,dot_ap
2
+ -1,-1,0.7018628301790474,0.41827163100242615,0.7139408249851281,0.6067908157242357,0.8670483474678782,-0.013363838195800781,0.7605544906720272,0.7011758935170491,170.86181640625,0.7125411122326571,0.6381925301714788,0.8064969337544473,200.87957763671875,0.7585932555955537,0.7022484690967282,9.126983642578125,0.7151453298154022,0.6180708660462432,0.8483945213749386,11.43150520324707,0.7608869177446481,0.7008302056814878,29.782302856445312,0.7139724337965765,0.6067050000271703,0.8673169075199034,-0.9657132625579834,0.752612375068187
config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc2aea9df9fd4d3ad89645fe6f417002b04ab987995c3ebaa811469e87870626
3
+ size 836
config_sentence_transformers.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8c64b5cece00d8424b4896ea75b512b6008576088497609dfeb6bd63e6d36b8
3
+ size 122
modules.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f4b264b80206c830bebbdcae377e137925650a433b689343a63bdc9b3145460
3
+ size 229
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5cd404261a5fc2d5ee555a2e2269456967469b5252faacba7981bfa8502b5ee7
3
+ size 1112244081
sentence_bert_config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70f4448f31320443fe3557cacea5abf2dcc4915dda8c80646bec9f3bb0aa5a1f
3
+ size 53
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
3
+ size 5069051
special_tokens_map.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:378eb3bf733eb16e65792d7e3fda5b8a4631387ca04d2015199c4d4f22ae554d
3
+ size 239
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b60b6b43406a48bf3638526314f3d232d97058bc93472ff2de930d43686fa441
3
+ size 17082913
tokenizer_config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9a522939ec5f013b0e85ec53e9dab2bb50d4d1c80904be53a3286e6281e03f4
3
+ size 498