Datasets:
wmt
/

Modalities:
Text
Formats:
parquet
Libraries:
Datasets
Dask
License:
albertvillanova HF staff commited on
Commit
ddd0e41
1 Parent(s): 08f7ac2

Delete loading script

Browse files
Files changed (1) hide show
  1. wmt_t2t.py +0 -64
wmt_t2t.py DELETED
@@ -1,64 +0,0 @@
1
- # coding=utf-8
2
- # Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- # Lint as: python3
17
- """The WMT EnDe Translate dataset used by the Tensor2Tensor library."""
18
-
19
- import datasets
20
-
21
- from .wmt_utils import Wmt, WmtConfig
22
-
23
-
24
- _URL = "https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/data_generators/translate_ende.py"
25
- _CITATION = """
26
- @InProceedings{bojar-EtAl:2014:W14-33,
27
- author = {Bojar, Ondrej and Buck, Christian and Federmann, Christian and Haddow, Barry and Koehn, Philipp and Leveling, Johannes and Monz, Christof and Pecina, Pavel and Post, Matt and Saint-Amand, Herve and Soricut, Radu and Specia, Lucia and Tamchyna, Ale\v{s}},
28
- title = {Findings of the 2014 Workshop on Statistical Machine Translation},
29
- booktitle = {Proceedings of the Ninth Workshop on Statistical Machine Translation},
30
- month = {June},
31
- year = {2014},
32
- address = {Baltimore, Maryland, USA},
33
- publisher = {Association for Computational Linguistics},
34
- pages = {12--58},
35
- url = {http://www.aclweb.org/anthology/W/W14/W14-3302}
36
- }
37
- """
38
-
39
-
40
- class WmtT2t(Wmt):
41
- """The WMT EnDe Translate dataset used by the Tensor2Tensor library."""
42
-
43
- BUILDER_CONFIGS = [
44
- WmtConfig( # pylint:disable=g-complex-comprehension
45
- description="WMT T2T EnDe translation task dataset.",
46
- url=_URL,
47
- citation=_CITATION,
48
- language_pair=("de", "en"),
49
- version=datasets.Version("1.0.0"),
50
- )
51
- ]
52
-
53
- @property
54
- def manual_download_instructions(self):
55
- if self.config.language_pair[1] in ["cs", "hi", "ru"]:
56
- return "Please download the data manually as explained. TODO(PVP)"
57
-
58
- @property
59
- def _subsets(self):
60
- return {
61
- datasets.Split.TRAIN: ["europarl_v7", "commoncrawl", "newscommentary_v13"],
62
- datasets.Split.VALIDATION: ["newstest2013"],
63
- datasets.Split.TEST: ["newstest2014"],
64
- }