Ubuntu commited on
Commit
d53b311
1 Parent(s): 5f70480

added model

Browse files
0_Transformer/added_tokens.json ADDED
@@ -0,0 +1 @@
 
1
+ {"IDskintoneID": 250054, "IDcellularphoneID": 250011, "IDbadappleID": 250003, "IDlabourunionID": 250035, "IDsilverliningID": 250052, "IDsugardaddyID": 250055, "IDfishstoryID": 250027, "IDdrylandID": 250021, "IDsexbombID": 250051, "IDbusybeeID": 250009, "IDdisabilityinsuranceID": 250019, "IDfoodmarketID": 250029, "IDphonebookID": 250045, "IDtravelguideID": 250059, "IDsacredcowID": 250050, "IDgooseeggID": 250030, "IDentrancehallID": 250026, "IDswansongID": 250056, "IDancienthistoryID": 250002, "IDweddingdayID": 250061, "IDbananarepublicID": 250005, "IDinsurancecompanyID": 250034, "IDbirthrateID": 250007, "IDlifevestID": 250036, "IDcriticalreviewID": 250018, "IDloansharkID": 250037, "IDprivateeyeID": 250047, "IDhighlifeID": 250032, "IDconartistID": 250015, "IDmentaldisorderID": 250040, "IDmiddleschoolID": 250041, "IDbigfishID": 250006, "IDcallcentreID": 250010, "IDeconomicaidID": 250024, "IDbadhatID": 250004, "IDtopdogID": 250058, "IDpeaceconferenceID": 250044, "IDcottoncandyID": 250017, "IDpillowslipID": 250046, "IDricepaperID": 250049, "IDpublicserviceID": 250048, "IDelbowroomID": 250025, "IDmailinglistID": 250038, "IDchainreactionID": 250012, "IDmailserviceID": 250039, "IDhomerunID": 250033, "IDduststormID": 250022, "IDcomputerprogramID": 250014, "IDflowerchildID": 250028, "IDpandacarID": 250043, "IDhealthcheckID": 250031, "IDthinktankID": 250057, "IDweddinganniversaryID": 250060, "IDsittingduckID": 250053, "IDeagerbeaverID": 250023, "IDdreamticketID": 250020, "IDbowtieID": 250008, "IDclosedbookID": 250013, "IDnarrowescapeID": 250042, "IDcookingstoveID": 250016}
0_Transformer/config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/ubuntu/preTrained-xlm-pt-e8-all",
3
+ "architectures": [
4
+ "XLMRobertaModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "gradient_checkpointing": false,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 514,
17
+ "model_type": "xlm-roberta",
18
+ "num_attention_heads": 12,
19
+ "num_hidden_layers": 12,
20
+ "output_past": true,
21
+ "pad_token_id": 1,
22
+ "position_embedding_type": "absolute",
23
+ "transformers_version": "4.5.1",
24
+ "type_vocab_size": 1,
25
+ "use_cache": true,
26
+ "vocab_size": 250062
27
+ }
0_Transformer/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b304a515c6c3f765bbd566108962bf220d00d5ec55b7e48fd1f736755517257
3
+ size 1112445495
0_Transformer/sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
1
+ {
2
+ "max_seq_length": null,
3
+ "do_lower_case": false
4
+ }
0_Transformer/sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
3
+ size 5069051
0_Transformer/special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
0_Transformer/tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "truncation": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "/home/ubuntu/preTrained-xlm-pt-e8-all", "max_length": 510}
1_Pooling/config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ {
2
+ "__version__": "1.1.0"
3
+ }
modules.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "0_Transformer",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ }
14
+ ]
similarity_evaluation_sts-dev_results.csv ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ 0,-1,0.8217636801388839,0.8213748960210768,0.8050135355809203,0.8075767263564638,0.8139790782755192,0.8156005966625057,0.7734427495179009,0.7794504809675942
3
+ 1,-1,0.8396114416468827,0.8409286230384443,0.8277647977327913,0.8302227330879178,0.833102640024024,0.8344763267706993,0.806926782070464,0.8064803376994787
4
+ 2,-1,0.8510586400120905,0.8503083341378175,0.8374559029387212,0.8414827871219465,0.8416737406562991,0.8446319085122707,0.8159896600494119,0.8167360440653073
5
+ 3,-1,0.8497126657136432,0.849710412017,0.8369337507381109,0.8405015515693405,0.8413669351856989,0.8443135470826015,0.8181123846048188,0.8185474723616566
similarity_evaluation_sts-test_results.csv ADDED
@@ -0,0 +1,2 @@
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ -1,-1,0.8159805868203964,0.8098928994330129,0.8103343175230819,0.8051505823999008,0.8145836666175282,0.8092289034864261,0.7614275412951926,0.7503233959142428