lvwerra HF staff commited on
Commit
222b274
1 Parent(s): d947196

Update Space (evaluate main: c447fc8e)

Browse files
Files changed (2) hide show
  1. requirements.txt +1 -1
  2. sacrebleu.py +13 -29
requirements.txt CHANGED
@@ -1,2 +1,2 @@
1
- git+https://github.com/huggingface/evaluate@e4a2724377909fe2aeb4357e3971e5a569673b39
2
  sacrebleu
 
1
+ git+https://github.com/huggingface/evaluate@c447fc8eda9c62af501bfdc6988919571050d950
2
  sacrebleu
sacrebleu.py CHANGED
@@ -13,9 +13,6 @@
13
  # limitations under the License.
14
  """ SACREBLEU metric. """
15
 
16
- from dataclasses import dataclass
17
- from typing import Callable, Optional
18
-
19
  import datasets
20
  import sacrebleu as scb
21
  from packaging import version
@@ -105,27 +102,9 @@ Examples:
105
  """
106
 
107
 
108
- @dataclass
109
- class SacrebleuConfig(evaluate.info.Config):
110
-
111
- name: str = "default"
112
-
113
- smooth_method: str = "exp"
114
- average: str = "binary"
115
- smooth_value: Optional[float] = None
116
- force: bool = False
117
- lowercase: bool = False
118
- tokenize: Optional[Callable] = None
119
- use_effective_order: bool = False
120
-
121
-
122
  @evaluate.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)
123
  class Sacrebleu(evaluate.Metric):
124
-
125
- CONFIG_CLASS = SacrebleuConfig
126
- ALLOWED_CONFIG_NAMES = ["default"]
127
-
128
- def _info(self, config):
129
  if version.parse(scb.__version__) < version.parse("1.4.12"):
130
  raise ImportWarning(
131
  "To use `sacrebleu`, the module `sacrebleu>=1.4.12` is required, and the current version of `sacrebleu` doesn't match this condition.\n"
@@ -136,7 +115,6 @@ class Sacrebleu(evaluate.Metric):
136
  citation=_CITATION,
137
  homepage="https://github.com/mjpost/sacreBLEU",
138
  inputs_description=_KWARGS_DESCRIPTION,
139
- config=config,
140
  features=[
141
  datasets.Features(
142
  {
@@ -163,6 +141,12 @@ class Sacrebleu(evaluate.Metric):
163
  self,
164
  predictions,
165
  references,
 
 
 
 
 
 
166
  ):
167
  # if only one reference is provided make sure we still use list of lists
168
  if isinstance(references[0], str):
@@ -175,12 +159,12 @@ class Sacrebleu(evaluate.Metric):
175
  output = scb.corpus_bleu(
176
  predictions,
177
  transformed_references,
178
- smooth_method=self.config.smooth_method,
179
- smooth_value=self.config.smooth_value,
180
- force=self.config.force,
181
- lowercase=self.config.lowercase,
182
- use_effective_order=self.config.use_effective_order,
183
- **(dict(tokenize=self.config.tokenize) if self.config.tokenize else {}),
184
  )
185
  output_dict = {
186
  "score": output.score,
 
13
  # limitations under the License.
14
  """ SACREBLEU metric. """
15
 
 
 
 
16
  import datasets
17
  import sacrebleu as scb
18
  from packaging import version
 
102
  """
103
 
104
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
  @evaluate.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)
106
  class Sacrebleu(evaluate.Metric):
107
+ def _info(self):
 
 
 
 
108
  if version.parse(scb.__version__) < version.parse("1.4.12"):
109
  raise ImportWarning(
110
  "To use `sacrebleu`, the module `sacrebleu>=1.4.12` is required, and the current version of `sacrebleu` doesn't match this condition.\n"
 
115
  citation=_CITATION,
116
  homepage="https://github.com/mjpost/sacreBLEU",
117
  inputs_description=_KWARGS_DESCRIPTION,
 
118
  features=[
119
  datasets.Features(
120
  {
 
141
  self,
142
  predictions,
143
  references,
144
+ smooth_method="exp",
145
+ smooth_value=None,
146
+ force=False,
147
+ lowercase=False,
148
+ tokenize=None,
149
+ use_effective_order=False,
150
  ):
151
  # if only one reference is provided make sure we still use list of lists
152
  if isinstance(references[0], str):
 
159
  output = scb.corpus_bleu(
160
  predictions,
161
  transformed_references,
162
+ smooth_method=smooth_method,
163
+ smooth_value=smooth_value,
164
+ force=force,
165
+ lowercase=lowercase,
166
+ use_effective_order=use_effective_order,
167
+ **(dict(tokenize=tokenize) if tokenize else {}),
168
  )
169
  output_dict = {
170
  "score": output.score,