hpprc commited on
Commit
4bf1550
1 Parent(s): e291691

Upload folder using huggingface_hub

Browse files
Files changed (6) hide show
  1. 1_Pooling/config.json +1 -3
  2. README.md +5 -5
  3. config.json +1 -1
  4. dev-metrics.json +5 -0
  5. log.csv +66 -0
  6. sts-metrics.json +6 -0
1_Pooling/config.json CHANGED
@@ -3,7 +3,5 @@
3
  "pooling_mode_cls_token": true,
4
  "pooling_mode_mean_tokens": false,
5
  "pooling_mode_max_tokens": false,
6
- "pooling_mode_mean_sqrt_len_tokens": false,
7
- "pooling_mode_weightedmean_tokens": false,
8
- "pooling_mode_lasttoken": false
9
  }
 
3
  "pooling_mode_cls_token": true,
4
  "pooling_mode_mean_tokens": false,
5
  "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
 
 
7
  }
README.md CHANGED
@@ -9,7 +9,7 @@ datasets:
9
  - shunk031/jsnli
10
  ---
11
 
12
- # cl-nagoya/sup-simcse-ja-base
13
 
14
  This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
15
 
@@ -29,7 +29,7 @@ Then you can use the model like this:
29
  from sentence_transformers import SentenceTransformer
30
  sentences = ["This is an example sentence", "Each sentence is converted"]
31
 
32
- model = SentenceTransformer('cl-nagoya/sup-simcse-ja-base')
33
  embeddings = model.encode(sentences)
34
  print(embeddings)
35
  ```
@@ -52,8 +52,8 @@ def cls_pooling(model_output, attention_mask):
52
  sentences = ['This is an example sentence', 'Each sentence is converted']
53
 
54
  # Load model from HuggingFace Hub
55
- tokenizer = AutoTokenizer.from_pretrained('cl-nagoya/sup-simcse-ja-base')
56
- model = AutoModel.from_pretrained('cl-nagoya/sup-simcse-ja-base')
57
 
58
  # Tokenize sentences
59
  encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
@@ -75,7 +75,7 @@ print(sentence_embeddings)
75
 
76
  <!--- Describe how your model was evaluated -->
77
 
78
- For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=cl-nagoya/sup-simcse-ja-base)
79
 
80
 
81
 
 
9
  - shunk031/jsnli
10
  ---
11
 
12
+ # {MODEL_NAME}
13
 
14
  This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
15
 
 
29
  from sentence_transformers import SentenceTransformer
30
  sentences = ["This is an example sentence", "Each sentence is converted"]
31
 
32
+ model = SentenceTransformer('{MODEL_NAME}')
33
  embeddings = model.encode(sentences)
34
  print(embeddings)
35
  ```
 
52
  sentences = ['This is an example sentence', 'Each sentence is converted']
53
 
54
  # Load model from HuggingFace Hub
55
+ tokenizer = AutoTokenizer.from_pretrained('{MODEL_NAME}')
56
+ model = AutoModel.from_pretrained('{MODEL_NAME}')
57
 
58
  # Tokenize sentences
59
  encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
 
75
 
76
  <!--- Describe how your model was evaluated -->
77
 
78
+ For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
79
 
80
 
81
 
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "./outputs/sup-simcse/jsnli/cl-tohoku__bert-base-japanese-v3/2023-10-02/16-22-36/",
3
  "architectures": [
4
  "BertModel"
5
  ],
 
1
  {
2
+ "_name_or_path": "cl-tohoku/bert-base-japanese-v3",
3
  "architectures": [
4
  "BertModel"
5
  ],
dev-metrics.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "best-epoch": 0,
3
+ "best-step": 128,
4
+ "best-dev": 83.61539847191834
5
+ }
log.csv ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,step,loss,sts-dev
2
+ 0,0,inf,51.375121585435735
3
+ 0,32,5.8056640625,78.35678255256113
4
+ 0,64,3.11572265625,82.6244856578063
5
+ 0,96,2.42578125,83.35358238047021
6
+ 0,128,2.176513671875,83.61539847191834
7
+ 0,160,2.0302734375,81.87700855632563
8
+ 0,192,1.875732421875,79.53856626810594
9
+ 0,224,1.843994140625,77.77162870199022
10
+ 0,256,1.773193359375,76.12193564966829
11
+ 1,288,1.690185546875,75.82749644399787
12
+ 1,320,1.600341796875,77.40347138209835
13
+ 1,352,1.56298828125,74.4370737294954
14
+ 1,384,1.570556640625,76.08974073722088
15
+ 1,416,1.534912109375,73.89938923579099
16
+ 1,448,1.55078125,70.69398516957513
17
+ 1,480,1.509521484375,73.33702686894722
18
+ 1,512,1.545654296875,75.2034184712643
19
+ 1,544,1.52001953125,73.9900785391876
20
+ 2,576,1.463134765625,72.0822596167315
21
+ 2,608,1.376220703125,72.30434210546885
22
+ 2,640,1.373046875,75.35516784317878
23
+ 2,672,1.37646484375,75.00048637711306
24
+ 2,704,1.370849609375,71.54454915080103
25
+ 2,736,1.35595703125,72.10342263835692
26
+ 2,768,1.351318359375,73.61822488939758
27
+ 2,800,1.341552734375,73.69283818050053
28
+ 2,832,1.350341796875,73.35254646396575
29
+ 3,864,1.292724609375,71.73891113636543
30
+ 3,896,1.231201171875,72.50612877979137
31
+ 3,928,1.249267578125,71.73997216828496
32
+ 3,960,1.23486328125,72.24572646319987
33
+ 3,992,1.2470703125,72.21874965045426
34
+ 3,1024,1.260498046875,71.01279770471795
35
+ 3,1056,1.262451171875,71.28239282311822
36
+ 3,1088,1.24609375,72.01843871324893
37
+ 3,1120,1.25048828125,71.49440820181441
38
+ 4,1152,1.151611328125,69.90122275478602
39
+ 4,1184,1.157958984375,71.99470697954118
40
+ 4,1216,1.179931640625,69.07210383354008
41
+ 4,1248,1.173828125,67.60162600897702
42
+ 4,1280,1.152587890625,68.50119537305783
43
+ 4,1312,1.176513671875,69.20852332481955
44
+ 4,1344,1.166015625,69.92232791766861
45
+ 4,1376,1.1796875,71.00892896894848
46
+ 5,1408,1.16357421875,72.45555003088666
47
+ 5,1440,1.0804443359375,69.45015860800906
48
+ 5,1472,1.0814208984375,71.00156597627819
49
+ 5,1504,1.1224365234375,70.58322578457057
50
+ 5,1536,1.097412109375,69.09835049406871
51
+ 5,1568,1.097412109375,70.2241604764949
52
+ 5,1600,1.1026611328125,71.35726715857705
53
+ 5,1632,1.1053466796875,69.87296046317874
54
+ 5,1664,1.11376953125,70.50921749290218
55
+ 6,1696,1.092529296875,70.10898081907449
56
+ 6,1728,1.0654296875,70.09196262215698
57
+ 6,1760,1.068603515625,69.50168151188232
58
+ 6,1792,1.0595703125,68.94335544010495
59
+ 6,1824,1.048583984375,68.59140278421327
60
+ 6,1856,1.0460205078125,69.62795505629208
61
+ 6,1888,1.0633544921875,69.44217502657546
62
+ 6,1920,1.0531005859375,69.53616592289175
63
+ 6,1952,1.0594482421875,69.40128536108074
64
+ 7,1984,1.044189453125,69.35274684809701
65
+ 7,2016,1.013427734375,69.4939563818511
66
+ 7,2048,1.0462646484375,69.50480224133872
sts-metrics.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "jsick": 82.7495424766893,
3
+ "jsts-val": 80.8645208379796,
4
+ "jsts-train": 77.85762915212484,
5
+ "avg": 80.49056415559791
6
+ }