yobi commited on
Commit
c9c39a7
1 Parent(s): ee0b2ae

upload sts model

Browse files
1_Pooling/config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "klue/roberta-base",
3
+ "architectures": [
4
+ "RobertaModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "gradient_checkpointing": false,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 512,
17
+ "model_type": "roberta",
18
+ "num_attention_heads": 12,
19
+ "num_hidden_layers": 12,
20
+ "pad_token_id": 1,
21
+ "position_embedding_type": "absolute",
22
+ "tokenizer_class": "BertTokenizer",
23
+ "transformers_version": "4.7.0",
24
+ "type_vocab_size": 1,
25
+ "use_cache": true,
26
+ "vocab_size": 32000
27
+ }
modules.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "0_Transformer",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ }
14
+ ]
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0fdbe7237da862fc0be161930fefcbaae12cb0309bb42461ed163c1033e19d29
3
+ size 442552823
sentence_bert_config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ {
2
+ "max_seq_length": 128
3
+ }
similarity_evaluation_sts-dev_results.csv ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ 0,1000,0.8523253647830666,0.8479881475980451,0.843085946920058,0.8464467628647114,0.8427749656944536,0.8459919969948294,0.8357277367317985,0.8317161880293799
3
+ 0,-1,0.8548245753864991,0.8520141977008205,0.8511031512583442,0.8517475745688917,0.8512703353982825,0.8520629637958119,0.8414702560181455,0.8363945156744724
4
+ 1,1000,0.8602223412514562,0.8590312740640974,0.8571099290611404,0.859209754528844,0.856488075550383,0.858846279361326,0.8457653516413578,0.8427980968362775
5
+ 1,-1,0.8615992598034478,0.8599627124197579,0.855771587311051,0.858511829307903,0.8554912113748516,0.8582677245637961,0.8446829799784645,0.8415209038099224
6
+ 2,1000,0.865724495345754,0.8645599592706353,0.8606407739624855,0.8628290149715305,0.8607891498115707,0.8630880933775689,0.8508211306199371,0.8470972201447121
7
+ 2,-1,0.8654392357265168,0.8635890689902616,0.8612835887998227,0.8637523747003235,0.8613394780282255,0.8641173000528153,0.850057981693161,0.8457635647219377
8
+ 3,1000,0.8663794720704876,0.8654549011301631,0.8630229225403987,0.8659329683628167,0.8626657592500865,0.86582372033598,0.8519271384670168,0.8491097401181578
9
+ 3,-1,0.8658843282635078,0.8645744903817244,0.8610827175364305,0.8653948282357269,0.8608735737453008,0.8653358331586726,0.8489277012935185,0.8461737576411918
10
+ 4,1000,0.8675291001543237,0.8653526014073236,0.859119383886179,0.8630208505219921,0.8589199150150725,0.8631022748645039,0.8474387204975561,0.8440527021619145
11
+ 4,-1,0.8663779520419456,0.8653367936729611,0.8593382107865066,0.8627151594229913,0.8590229226967419,0.8625799851274466,0.8514058551907155,0.8483936341206835
12
+ 5,1000,0.8702157988060812,0.8686505588280959,0.8628279595754006,0.8663369159854302,0.8624257305631733,0.8662203578430634,0.8534990322698968,0.8505220850267589
13
+ 5,-1,0.8684295899164396,0.8670654406071499,0.8599262633789017,0.8641306520393058,0.859776865980857,0.8640585281563211,0.8527513018248534,0.8502705955748278
14
+ 6,1000,0.870220027699182,0.8687968578747783,0.8607151156384214,0.8646066893190816,0.8605441868782677,0.8645493531911799,0.8514507644836449,0.8492843218354232
15
+ 6,-1,0.8711290613519982,0.8701397954811555,0.8614500136907117,0.8656272033781016,0.860934535357904,0.8653999240134905,0.8520983164865445,0.8501676636344107
16
+ 7,1000,0.8696803487776803,0.86818576893451,0.8611441083567066,0.865069975600292,0.8609653144146695,0.8650973095061174,0.8494895316412442,0.8466885415017453
17
+ 7,-1,0.8706073473679731,0.868975703197847,0.8605276573735954,0.8650761507702662,0.8603256143035385,0.8649731875994131,0.8516296517414808,0.8490376014397476
18
+ 8,1000,0.8715392234101196,0.8699789424721319,0.8619114489269982,0.8662239287831283,0.8615110686582258,0.8659198578378879,0.8520906293600943,0.8497977681360369
19
+ 8,-1,0.8716414431233991,0.8700063860639667,0.861988941645388,0.866451048899272,0.8616469172402355,0.8662732707296678,0.8512941357020245,0.8488588635182435
20
+ 9,1000,0.8716043086619858,0.8701516403779795,0.8618455732399168,0.8662074118982481,0.8614952389193472,0.8661708666016593,0.8504282874483489,0.8479867325434
21
+ 9,-1,0.8716101120388275,0.8701471870994398,0.8618457983974901,0.8661906367942215,0.8614941705156921,0.8661505846432916,0.8504797769357033,0.8480492086275908
similarity_evaluation_sts-test_results.csv ADDED
@@ -0,0 +1,2 @@
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ -1,-1,0.8251418687805505,0.8242608605316377,0.8191574173045042,0.8220100081912145,0.8182904731132529,0.8215151217667238,0.809715688973155,0.8032031828425135
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "do_basic_tokenize": true, "never_split": null, "bos_token": "[CLS]", "eos_token": "[SEP]", "model_max_length": 512, "special_tokens_map_file": "/home/yobi/.cache/huggingface/transformers/9d0c87e44b00acfbfbae931b2e4068eb6311a0c3e71e23e5400bdf57cab4bfbf.70c17d6e4d492c8f24f5bb97ab56c7f272e947112c6faf9dd846da42ba13eb23", "name_or_path": "klue/roberta-base"}
vocab.txt ADDED
The diff for this file is too large to render. See raw diff