ai-forever commited on
Commit
ab78070
1 Parent(s): dd038a5

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +16 -16
README.md CHANGED
@@ -23,15 +23,15 @@ model-index:
23
  metrics:
24
  - name: Precision
25
  type: precision
26
- value: 56.2
27
  verified: false
28
  - name: Recall
29
  type: recall
30
- value: 65.8
31
  verified: false
32
  - name: F1
33
  type: f1
34
- value: 60.6
35
  verified: false
36
  - task:
37
  type: text-generation
@@ -41,15 +41,15 @@ model-index:
41
  metrics:
42
  - name: Precision
43
  type: precision
44
- value: 42.1
45
  verified: false
46
  - name: Recall
47
  type: recall
48
- value: 47.5
49
  verified: false
50
  - name: F1
51
  type: f1
52
- value: 44.6
53
  verified: false
54
  - task:
55
  type: text-generation
@@ -59,15 +59,15 @@ model-index:
59
  metrics:
60
  - name: Precision
61
  type: precision
62
- value: 38.6
63
  verified: false
64
  - name: Recall
65
  type: recall
66
- value: 56.0
67
  verified: false
68
  - name: F1
69
  type: f1
70
- value: 45.7
71
  verified: false
72
  - task:
73
  type: text-generation
@@ -77,15 +77,15 @@ model-index:
77
  metrics:
78
  - name: Precision
79
  type: precision
80
- value: 52.8
81
  verified: false
82
  - name: Recall
83
  type: recall
84
- value: 49.8
85
  verified: false
86
  - name: F1
87
  type: f1
88
- value: 51.2
89
  verified: false
90
  - task:
91
  type: text-generation
@@ -164,7 +164,7 @@ RUSpellRU, MultidomainGold, MedSpellChecker, GitHubTypoCorpusRu are datasets for
164
  **RUSpellRU**
165
  | Model | Precision | Recall | F1 |
166
  | --- | --- | --- | --- |
167
- | sage-mt5-large | 56.2 | 65.8 | 60.6 |
168
  | sage-mt5-large (ft.) | 88.4 | 71.6 | 79.1 |
169
  | sage-ai-service | 93.5 | 82.4 | 87.6 |
170
  | gpt-3.5-turbo | 39.6 | 62.3 | 48.5 |
@@ -173,7 +173,7 @@ RUSpellRU, MultidomainGold, MedSpellChecker, GitHubTypoCorpusRu are datasets for
173
  **MultidomainGold**
174
  | Model | Precision | Recall | F1 |
175
  | --- | --- | --- | --- |
176
- | sage-mt5-large | 42.1 | 47.5 | 44.6 |
177
  | sage-mt5-large (ft.) | 65.3 | 62.7 | 63.9 |
178
  | sage-ai-service | 70.9 | 68.8 | 69.9 |
179
  | gpt-3.5-turbo | 17.8 | 56.1 | 27.0 |
@@ -182,7 +182,7 @@ RUSpellRU, MultidomainGold, MedSpellChecker, GitHubTypoCorpusRu are datasets for
182
  **MedSpellChecker**
183
  | Model | Precision | Recall | F1 |
184
  | --- | --- | --- | --- |
185
- | sage-mt5-large | 38.6 | 56.0 | 45.7 |
186
  | sage-mt5-large (ft.) | 77.7 | 77.5 | 77.6 |
187
  | sage-ai-service | 73.4 | 76.2 | 74.9 |
188
  | gpt-3.5-turbo | 15.1 | 53.6 | 23.5 |
@@ -191,7 +191,7 @@ RUSpellRU, MultidomainGold, MedSpellChecker, GitHubTypoCorpusRu are datasets for
191
  **GitHubTypoCorpusRu**
192
  | Model | Precision | Recall | F1 |
193
  | --- | --- | --- | --- |
194
- | sage-mt5-large | 52.8 | 49.8 | 51.2 |
195
  | sage-mt5-large (ft.) | 69.5 | 46.0 | 55.3 |
196
  | sage-ai-service | 76.1 | 51.2 | 61.2 |
197
  | gpt-3.5-turbo | 23.7 | 43.9 | 30.8 |
 
23
  metrics:
24
  - name: Precision
25
  type: precision
26
+ value: 55.7
27
  verified: false
28
  - name: Recall
29
  type: recall
30
+ value: 68.5
31
  verified: false
32
  - name: F1
33
  type: f1
34
+ value: 61.4
35
  verified: false
36
  - task:
37
  type: text-generation
 
41
  metrics:
42
  - name: Precision
43
  type: precision
44
+ value: 35.4
45
  verified: false
46
  - name: Recall
47
  type: recall
48
+ value: 57.9
49
  verified: false
50
  - name: F1
51
  type: f1
52
+ value: 43.9
53
  verified: false
54
  - task:
55
  type: text-generation
 
59
  metrics:
60
  - name: Precision
61
  type: precision
62
+ value: 35.1
63
  verified: false
64
  - name: Recall
65
  type: recall
66
+ value: 70.8
67
  verified: false
68
  - name: F1
69
  type: f1
70
+ value: 47.0
71
  verified: false
72
  - task:
73
  type: text-generation
 
77
  metrics:
78
  - name: Precision
79
  type: precision
80
+ value: 47.4
81
  verified: false
82
  - name: Recall
83
  type: recall
84
+ value: 53.8
85
  verified: false
86
  - name: F1
87
  type: f1
88
+ value: 50.4
89
  verified: false
90
  - task:
91
  type: text-generation
 
164
  **RUSpellRU**
165
  | Model | Precision | Recall | F1 |
166
  | --- | --- | --- | --- |
167
+ | sage-mt5-large | 55.7 | 68.5 | 61.4 |
168
  | sage-mt5-large (ft.) | 88.4 | 71.6 | 79.1 |
169
  | sage-ai-service | 93.5 | 82.4 | 87.6 |
170
  | gpt-3.5-turbo | 39.6 | 62.3 | 48.5 |
 
173
  **MultidomainGold**
174
  | Model | Precision | Recall | F1 |
175
  | --- | --- | --- | --- |
176
+ | sage-mt5-large | 35.4 | 57.9 | 43.9 |
177
  | sage-mt5-large (ft.) | 65.3 | 62.7 | 63.9 |
178
  | sage-ai-service | 70.9 | 68.8 | 69.9 |
179
  | gpt-3.5-turbo | 17.8 | 56.1 | 27.0 |
 
182
  **MedSpellChecker**
183
  | Model | Precision | Recall | F1 |
184
  | --- | --- | --- | --- |
185
+ | sage-mt5-large | 35.1 | 70.8 | 47.0 |
186
  | sage-mt5-large (ft.) | 77.7 | 77.5 | 77.6 |
187
  | sage-ai-service | 73.4 | 76.2 | 74.9 |
188
  | gpt-3.5-turbo | 15.1 | 53.6 | 23.5 |
 
191
  **GitHubTypoCorpusRu**
192
  | Model | Precision | Recall | F1 |
193
  | --- | --- | --- | --- |
194
+ | sage-mt5-large | 47.4 | 53.8 | 50.4 |
195
  | sage-mt5-large (ft.) | 69.5 | 46.0 | 55.3 |
196
  | sage-ai-service | 76.1 | 51.2 | 61.2 |
197
  | gpt-3.5-turbo | 23.7 | 43.9 | 30.8 |