sumanthd commited on
Commit
c11ebe8
1 Parent(s): 56839ff

add sentiment data

Browse files
.gitignore ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ data/*.txt
2
+ .DS_Store
3
+ data/*.tsv
HAIndicSentiment.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """TODO(IndicSentiment): Add a description here."""
2
+
3
+
4
+ import json
5
+
6
+ import datasets
7
+
8
+
9
+ _HOMEPAGE = ""
10
+
11
+ _CITATION = """\
12
+
13
+ """
14
+
15
+ _DESCRIPTION = """\
16
+
17
+ """
18
+
19
+ _LANG = ["as", "bn", "bd", "gu", "hi", "kn", "ml", "mr", "or", "pa", "ta", "te", "ur"]
20
+ _URL = "https://huggingface.co/datasets/ai4bharat/HAIndicSentiment/resolve/main/data/{split}/{language}.json"
21
+ _VERSION = datasets.Version("1.0.0", "First version of IndicSentiment")
22
+
23
+
24
+ class HAIndicSentiment(datasets.GeneratorBasedBuilder):
25
+ """TODO(IndicSentiment): Short description of my dataset."""
26
+ BUILDER_CONFIGS = [
27
+ datasets.BuilderConfig(
28
+ name=f"translation-{lang}",
29
+ description=f"translated sentiment data for {lang}",
30
+ version=_VERSION,
31
+ )
32
+ for lang in _LANG
33
+ ]
34
+
35
+ def _info(self):
36
+ return datasets.DatasetInfo(
37
+ description=_DESCRIPTION + self.config.description,
38
+ features=datasets.Features(
39
+ {
40
+ "GENERIC CATEGORIES": datasets.Value("string"),
41
+ "CATEGORY": datasets.Value("string"),
42
+ "SUB-CATEGORY": datasets.Value("string"),
43
+ "PRODUCT": datasets.Value("string"),
44
+ "BRAND": datasets.Value("string"),
45
+ "ASPECTS": datasets.Value("string"),
46
+ "ASPECT COMBO": datasets.Value("string"),
47
+ "ENGLISH REVIEW": datasets.Value("string"),
48
+ "LABEL": datasets.Value("string"),
49
+ "INDIC REVIEW": datasets.Value("string"),
50
+ }
51
+ ),
52
+ homepage=_HOMEPAGE,
53
+ citation=_CITATION,
54
+ )
55
+
56
+ def _split_generators(self, dl_manager):
57
+ """Returns SplitGenerators."""
58
+ *translation_prefix, language = self.config.name.split("-")
59
+ splits = {datasets.Split.VALIDATION: "validation", datasets.Split.TEST: "test"}
60
+
61
+ data_urls = {
62
+ split: _URL.format(language=language, split=splits[split]) for split in splits
63
+ }
64
+ dl_paths = dl_manager.download(data_urls)
65
+ return [
66
+ datasets.SplitGenerator(
67
+ name=split,
68
+ gen_kwargs={"filepath": dl_paths[split]},
69
+ )
70
+ for split in splits
71
+ ]
72
+
73
+ def _generate_examples(self, filepath):
74
+ """Yields examples."""
75
+
76
+ with open(filepath, encoding="utf-8") as f:
77
+ for idx, row in enumerate(f):
78
+ data = json.loads(row)
79
+ yield idx, data
data/compile.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+ langs = ['Hindi', 'Punjabi', 'Assamese', 'Bengali', 'Gujarati', 'Kannada', 'Marathi', 'Tamil', 'Malayalam', 'Odia', 'Telugu', 'Bodo', 'Urdu']
4
+
5
+ codes = ['hi', 'pa', 'as', 'bn', 'gu', 'kn', 'mr', 'ta', 'ml', 'or', 'te', 'bd', 'ur']
6
+
7
+ metadata = pd.read_csv('metadata.tsv', sep='\t')
8
+ idx = np.random.permutation(metadata.index)
9
+
10
+ # for lang, code in zip(langs, codes):
11
+ for lang in codes:
12
+ with open(f"{lang}.txt", "r") as f:
13
+ lines = f.readlines()
14
+ lines = [line.strip() for line in lines]
15
+
16
+ metadata = pd.read_csv('metadata.tsv', sep='\t')
17
+ metadata.columns = ['GENERIC CATEGORIES', 'CATEGORY', 'SUB-CATEGORY', 'PRODUCT', 'BRAND', 'ASPECTS', 'ASPECT COMBO', 'ENGLISH REVIEW', 'LABEL']
18
+ metadata['INDIC REVIEW'] = lines
19
+
20
+ metadata = metadata.reindex(idx)
21
+
22
+ metadata[:1000].to_json(f"test/{lang}.json", orient='records', lines=True)
23
+ metadata[1000:].to_json(f"dev/{lang}.json", orient='records', lines=True)
24
+
25
+ # metadata.to_json(f"dataset/{code}.json", orient='records', lines=True)
data/dev/as.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/bd.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/bn.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/gu.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/hi.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/kn.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/ml.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/mr.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/or.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/pa.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/ta.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/te.json ADDED
The diff for this file is too large to render. See raw diff
 
data/dev/ur.json ADDED
The diff for this file is too large to render. See raw diff
 
data/metadata.tsv ADDED
The diff for this file is too large to render. See raw diff
 
data/test/as.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/bd.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/bn.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/gu.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/hi.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/kn.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/ml.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/mr.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/or.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/pa.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/ta.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/te.json ADDED
The diff for this file is too large to render. See raw diff
 
data/test/ur.json ADDED
The diff for this file is too large to render. See raw diff