Sebastian Gehrmann commited on
Commit
a4619f9
1 Parent(s): b1e3133

Data Card.

Browse files
Files changed (2) hide show
  1. OrangeSum.json +6 -2
  2. README.md +22 -8
OrangeSum.json CHANGED
@@ -14,7 +14,7 @@
14
  "additional-splits-capacicites": "N/A"
15
  },
16
  "starting": {
17
- "research-pointers": "Papers about abstractive summarization using seq2seq models:\nhttps://aclanthology.org/K16-1028/\nhttps://aclanthology.org/P17-1099/\nhttps://aclanthology.org/2020.acl-main.703\nhttps://aclanthology.org/2021.emnlp-main.740/\n\nPapers about (pretrained) Transformers:\nhttps://papers.nips.cc/paper/2017/hash/3f5ee243547dee91fbd053c1c4a845aa-Abstract.html\nhttps://aclanthology.org/N19-1423/\nhttps://aclanthology.org/2020.acl-main.703",
18
  "technical-terms": "No unique technical words in this data card."
19
  }
20
  },
@@ -79,7 +79,11 @@
79
  "languages": {
80
  "is-multilingual": "no",
81
  "license": "other: Other license",
82
- "task-other": "N/A"
 
 
 
 
83
  },
84
  "credit": {},
85
  "structure": {}
 
14
  "additional-splits-capacicites": "N/A"
15
  },
16
  "starting": {
17
+ "research-pointers": "Papers about abstractive summarization using seq2seq models:\n\n- [Abstractive Text Summarization using Sequence-to-sequence RNNs and Beyond](https://aclanthology.org/K16-1028/)\n- [Get To The Point: Summarization with Pointer-Generator Networks](https://aclanthology.org/P17-1099/)\n- [BART: Denoising Sequence-to-Sequence Pre-training for Natural Language Generation, Translation, and Comprehension](https://aclanthology.org/2020.acl-main.703)\n- [BARThez: a Skilled Pretrained French Sequence-to-Sequence Model](https://aclanthology.org/2021.emnlp-main.740/)\n\nPapers about (pretrained) Transformers:\n\n- [Attention is All you Need](https://papers.nips.cc/paper/2017/hash/3f5ee243547dee91fbd053c1c4a845aa-Abstract.html)\n- [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://aclanthology.org/N19-1423/)",
18
  "technical-terms": "No unique technical words in this data card."
19
  }
20
  },
 
79
  "languages": {
80
  "is-multilingual": "no",
81
  "license": "other: Other license",
82
+ "task": "Summarization",
83
+ "task-other": "N/A",
84
+ "language-names": [
85
+ "French"
86
+ ]
87
  },
88
  "credit": {},
89
  "structure": {}
README.md CHANGED
@@ -15,7 +15,7 @@ size_categories:
15
  source_datasets:
16
  - original
17
  task_categories:
18
- - unknown
19
  task_ids:
20
  - unknown
21
  ---
@@ -102,6 +102,13 @@ no
102
  <!-- scope: telescope -->
103
  no
104
 
 
 
 
 
 
 
 
105
  #### License
106
 
107
  <!-- quick -->
@@ -109,6 +116,12 @@ no
109
  <!-- scope: telescope -->
110
  other: Other license
111
 
 
 
 
 
 
 
112
 
113
  ### Credit
114
 
@@ -152,15 +165,16 @@ no
152
  <!-- info: Getting started with in-depth research on the task. Add relevant pointers to resources that researchers can consult when they want to get started digging deeper into the task. -->
153
  <!-- scope: microscope -->
154
  Papers about abstractive summarization using seq2seq models:
155
- https://aclanthology.org/K16-1028/
156
- https://aclanthology.org/P17-1099/
157
- https://aclanthology.org/2020.acl-main.703
158
- https://aclanthology.org/2021.emnlp-main.740/
 
159
 
160
  Papers about (pretrained) Transformers:
161
- https://papers.nips.cc/paper/2017/hash/3f5ee243547dee91fbd053c1c4a845aa-Abstract.html
162
- https://aclanthology.org/N19-1423/
163
- https://aclanthology.org/2020.acl-main.703
164
 
165
  #### Technical Terms
166
 
 
15
  source_datasets:
16
  - original
17
  task_categories:
18
+ - summarization
19
  task_ids:
20
  - unknown
21
  ---
 
102
  <!-- scope: telescope -->
103
  no
104
 
105
+ #### Covered Languages
106
+
107
+ <!-- quick -->
108
+ <!-- info: What languages/dialects are covered in the dataset? -->
109
+ <!-- scope: telescope -->
110
+ `French`
111
+
112
  #### License
113
 
114
  <!-- quick -->
 
116
  <!-- scope: telescope -->
117
  other: Other license
118
 
119
+ #### Primary Task
120
+
121
+ <!-- info: What primary task does the dataset support? -->
122
+ <!-- scope: telescope -->
123
+ Summarization
124
+
125
 
126
  ### Credit
127
 
 
165
  <!-- info: Getting started with in-depth research on the task. Add relevant pointers to resources that researchers can consult when they want to get started digging deeper into the task. -->
166
  <!-- scope: microscope -->
167
  Papers about abstractive summarization using seq2seq models:
168
+
169
+ - [Abstractive Text Summarization using Sequence-to-sequence RNNs and Beyond](https://aclanthology.org/K16-1028/)
170
+ - [Get To The Point: Summarization with Pointer-Generator Networks](https://aclanthology.org/P17-1099/)
171
+ - [BART: Denoising Sequence-to-Sequence Pre-training for Natural Language Generation, Translation, and Comprehension](https://aclanthology.org/2020.acl-main.703)
172
+ - [BARThez: a Skilled Pretrained French Sequence-to-Sequence Model](https://aclanthology.org/2021.emnlp-main.740/)
173
 
174
  Papers about (pretrained) Transformers:
175
+
176
+ - [Attention is All you Need](https://papers.nips.cc/paper/2017/hash/3f5ee243547dee91fbd053c1c4a845aa-Abstract.html)
177
+ - [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://aclanthology.org/N19-1423/)
178
 
179
  #### Technical Terms
180