lara-martin commited on
Commit
cbcf59c
1 Parent(s): f3b066d

Update Scifi_TV_Shows.py

Browse files
Files changed (1) hide show
  1. Scifi_TV_Shows.py +27 -20
Scifi_TV_Shows.py CHANGED
@@ -35,32 +35,32 @@ _DESCRIPTION = 'Loading script for the science fiction TV show plot dataset.'
35
 
36
  URL = 'https://huggingface.co/datasets/lara-martin/Scifi_TV_Shows/blob/main/'
37
  _URLS = {
38
- 'test':'Test-Train-Val/all-sci-fi-data-test.txt',
39
- 'train':'Test-Train-Val/all-sci-fi-data-train.txt',
40
- 'val':'Test-Train-Val/all-sci-fi-data-val.txt',
41
- 'all':'all-sci-fi-data.txt',
42
  }
43
 
44
  _INPUT_OUTPUT = ["all-sci-fi-data-test_input.txt", "all-sci-fi-data-test_output.txt", "all-sci-fi-data-train_input.txt", "all-sci-fi-data-train_output.txt", "all-sci-fi-data-val_input.txt", "all-sci-fi-data-val_output.txt"]
45
 
46
 
47
- class ScifiTV(datasets.GeneratorBasedBuilder):
48
  BUILDER_CONFIGS = [
49
  datasets.BuilderConfig(
50
  version=datasets.Version('1.1.0'),
51
- name=k,
52
- description=f'Science fiction TV show plot summaries.'
53
- ) for k in _URLS.keys()
54
  ]
55
 
56
  def _info(self):
57
  features = datasets.Features({
58
- 'event': datasets.Value('string'),
59
- 'gen_event': datasets.Value('string'),
60
- 'sent': datasets.Value('string'),
61
- 'gen_sent': datasets.Value('string'),
62
  'story_num': datasets.Value('int16'),
63
- 'entities': datasets.Value('string')
 
 
 
 
64
  })
65
 
66
  return datasets.DatasetInfo(
@@ -100,7 +100,14 @@ class ScifiTV(datasets.GeneratorBasedBuilder):
100
  'filepath': downloaded_files['val'],
101
  "split": "val",
102
  },
103
- ),
 
 
 
 
 
 
 
104
  ]
105
 
106
  def _generate_examples(self, filepath):
@@ -115,15 +122,15 @@ class ScifiTV(datasets.GeneratorBasedBuilder):
115
  line = line.replace("%%%%%%%%%%%%%%%%%", "")
116
  entities = line.replace("defaultdict(<type 'list'>, ", "")[:-1]
117
  yield id_, {
118
- 'event': event,
119
- 'gen_event': gen_event,
120
- 'sent': sent,
121
- 'gen_sent': gen_sent,
122
  'story_num': story_count,
 
 
 
 
123
  'entities': entities,
124
  }
125
- story = []
126
- story_count+=1
127
  elif "<EOS>" in line:
128
  continue
129
  else:
 
35
 
36
  URL = 'https://huggingface.co/datasets/lara-martin/Scifi_TV_Shows/blob/main/'
37
  _URLS = {
38
+ 'test':URL+'Test-Train-Val/all-sci-fi-data-test.txt',
39
+ 'train':URL+'Test-Train-Val/all-sci-fi-data-train.txt',
40
+ 'val':URL+'Test-Train-Val/all-sci-fi-data-val.txt',
41
+ 'all':URL+'all-sci-fi-data.txt',
42
  }
43
 
44
  _INPUT_OUTPUT = ["all-sci-fi-data-test_input.txt", "all-sci-fi-data-test_output.txt", "all-sci-fi-data-train_input.txt", "all-sci-fi-data-train_output.txt", "all-sci-fi-data-val_input.txt", "all-sci-fi-data-val_output.txt"]
45
 
46
 
47
+ class Scifi_TV_Shows(datasets.GeneratorBasedBuilder):
48
  BUILDER_CONFIGS = [
49
  datasets.BuilderConfig(
50
  version=datasets.Version('1.1.0'),
51
+ name="Scifi_TV_Shows",
52
+ description=f'Science fiction TV show plot summaries.',
53
+ )
54
  ]
55
 
56
  def _info(self):
57
  features = datasets.Features({
 
 
 
 
58
  'story_num': datasets.Value('int16'),
59
+ 'event': datasets.Sequence(datasets.Value('string')),
60
+ 'gen_event': datasets.Sequence(datasets.Value('string')),
61
+ 'sent': datasets.Value('string'),
62
+ 'gen_sent': datasets.Value('string'),
63
+ 'entities': datasets.Value('string'),
64
  })
65
 
66
  return datasets.DatasetInfo(
 
100
  'filepath': downloaded_files['val'],
101
  "split": "val",
102
  },
103
+ ),
104
+ datasets.SplitGenerator(
105
+ name="all",
106
+ gen_kwargs={
107
+ 'filepath': downloaded_files['all'],
108
+ "split": "all",
109
+ },
110
+ ),
111
  ]
112
 
113
  def _generate_examples(self, filepath):
 
122
  line = line.replace("%%%%%%%%%%%%%%%%%", "")
123
  entities = line.replace("defaultdict(<type 'list'>, ", "")[:-1]
124
  yield id_, {
 
 
 
 
125
  'story_num': story_count,
126
+ 'event': eval(event),
127
+ 'gen_event': eval(gen_event),
128
+ 'sent': sent,
129
+ 'gen_sent': gen_sent,
130
  'entities': entities,
131
  }
132
+ story = []
133
+ story_count+=1
134
  elif "<EOS>" in line:
135
  continue
136
  else: