Vadzim Kashko commited on
Commit
f1e3448
1 Parent(s): e7050f8
data/hand-gesture-recognition-dataset.csv CHANGED
@@ -1,29 +1,29 @@
1
- set_id;one;four;small;fist;me
2
- 0;files/0/one.mp4;files/0/four.mp4;files/0/small.mp4;files/0/fist.mp4;files/0/me.mp4
3
- 1;files/1/one.mp4;files/1/four.mp4;files/1/small.mp4;files/1/fist.mp4;files/1/me.mp4
4
- 2;files/2/one.mp4;files/2/four.mp4;files/2/small.mp4;files/2/fist.mp4;files/2/me.mp4
5
- 3;files/3/one.mp4;files/3/four.mp4;files/3/small.mp4;files/3/fist.mp4;files/3/me.mp4
6
- 4;files/4/one.mp4;files/4/four.mp4;files/4/small.mp4;files/4/fist.mp4;files/4/me.mp4
7
- 5;files/5/one.mp4;files/5/four.mp4;files/5/small.mp4;files/5/fist.mp4;files/5/me.mp4
8
- 6;files/6/one.mp4;files/6/four.mp4;files/6/small.mp4;files/6/fist.mp4;files/6/me.mp4
9
- 7;files/7/one.mp4;files/7/four.mp4;files/7/small.mp4;files/7/fist.mp4;files/7/me.mp4
10
- 8;files/8/one.mp4;files/8/four.mp4;files/8/small.mp4;files/8/fist.mp4;files/8/me.mp4
11
- 9;files/9/one.mp4;files/9/four.mp4;files/9/small.mp4;files/9/fist.mp4;files/9/me.mp4
12
- 10;files/10/one.mp4;files/10/four.mp4;files/10/small.mp4;files/10/fist.mp4;files/10/me.mp4
13
- 11;files/11/one.mp4;files/11/four.mp4;files/11/small.mp4;files/11/fist.mp4;files/11/me.mp4
14
- 12;files/12/one.mp4;files/12/four.mp4;files/12/small.mp4;files/12/fist.mp4;files/12/me.mp4
15
- 13;files/13/one.mp4;files/13/four.mp4;files/13/small.mp4;files/13/fist.mp4;files/13/me.mp4
16
- 14;files/14/one.mp4;files/14/four.mp4;files/14/small.mp4;files/14/fist.mp4;files/14/me.mp4
17
- 15;files/15/one.mp4;files/15/four.mp4;files/15/small.mp4;files/15/fist.mp4;files/15/me.mp4
18
- 16;files/16/one.mp4;files/16/four.mp4;files/16/small.mp4;files/16/fist.mp4;files/16/me.mp4
19
- 17;files/17/one.mp4;files/17/four.mp4;files/17/small.mp4;files/17/fist.mp4;files/17/me.mp4
20
- 18;files/18/one.mp4;files/18/four.mp4;files/18/small.mp4;files/18/fist.mp4;files/18/me.mp4
21
- 19;files/19/one.mov;files/19/four.MOV;files/19/small.MOV;files/19/fist.MOV;files/19/me.MOV
22
- 20;files/20/one.mp4;files/20/four.mp4;files/20/small.mp4;files/20/fist.mp4;files/20/me.mp4
23
- 21;files/21/one.mp4;files/21/four.mp4;files/21/small.mp4;files/21/fist.mp4;files/21/me.mp4
24
- 22;files/22/one.mp4;files/22/four.mp4;files/22/small.mp4;files/22/fist.mp4;files/22/me.mp4
25
- 23;files/23/one.mp4;files/23/four.mp4;files/23/small.mp4;files/23/fist.mp4;files/23/me.mp4
26
- 24;files/24/one.mp4;files/24/four.mp4;files/24/small.mp4;files/24/fist.mp4;files/24/me.mp4
27
- 25;files/25/one.mp4;files/25/four.mp4;files/25/small.mp4;files/25/fist.mp4;files/25/me.mp4
28
- 26;files/26/one.mp4;files/26/four.mp4;files/26/small.mp4;files/26/fist.mp4;files/26/me.mp4
29
- 27;files/27/one.mp4;files/27/four.mp4;files/27/small.mp4;files/27/fist.mp4;files/27/me.mp4
 
1
+ set_id,one,four,small,fist,me
2
+ 0,files/0/one.mp4,files/0/four.mp4,files/0/small.mp4,files/0/fist.mp4,files/0/me.mp4
3
+ 1,files/1/one.mp4,files/1/four.mp4,files/1/small.mp4,files/1/fist.mp4,files/1/me.mp4
4
+ 2,files/2/one.mp4,files/2/four.mp4,files/2/small.mp4,files/2/fist.mp4,files/2/me.mp4
5
+ 3,files/3/one.mp4,files/3/four.mp4,files/3/small.mp4,files/3/fist.mp4,files/3/me.mp4
6
+ 4,files/4/one.mp4,files/4/four.mp4,files/4/small.mp4,files/4/fist.mp4,files/4/me.mp4
7
+ 5,files/5/one.mp4,files/5/four.mp4,files/5/small.mp4,files/5/fist.mp4,files/5/me.mp4
8
+ 6,files/6/one.mp4,files/6/four.mp4,files/6/small.mp4,files/6/fist.mp4,files/6/me.mp4
9
+ 7,files/7/one.mp4,files/7/four.mp4,files/7/small.mp4,files/7/fist.mp4,files/7/me.mp4
10
+ 8,files/8/one.mp4,files/8/four.mp4,files/8/small.mp4,files/8/fist.mp4,files/8/me.mp4
11
+ 9,files/9/one.mp4,files/9/four.mp4,files/9/small.mp4,files/9/fist.mp4,files/9/me.mp4
12
+ 10,files/10/one.mp4,files/10/four.mp4,files/10/small.mp4,files/10/fist.mp4,files/10/me.mp4
13
+ 11,files/11/one.mp4,files/11/four.mp4,files/11/small.mp4,files/11/fist.mp4,files/11/me.mp4
14
+ 12,files/12/one.mp4,files/12/four.mp4,files/12/small.mp4,files/12/fist.mp4,files/12/me.mp4
15
+ 13,files/13/one.mp4,files/13/four.mp4,files/13/small.mp4,files/13/fist.mp4,files/13/me.mp4
16
+ 14,files/14/one.mp4,files/14/four.mp4,files/14/small.mp4,files/14/fist.mp4,files/14/me.mp4
17
+ 15,files/15/one.mp4,files/15/four.mp4,files/15/small.mp4,files/15/fist.mp4,files/15/me.mp4
18
+ 16,files/16/one.mp4,files/16/four.mp4,files/16/small.mp4,files/16/fist.mp4,files/16/me.mp4
19
+ 17,files/17/one.mp4,files/17/four.mp4,files/17/small.mp4,files/17/fist.mp4,files/17/me.mp4
20
+ 18,files/18/one.mp4,files/18/four.mp4,files/18/small.mp4,files/18/fist.mp4,files/18/me.mp4
21
+ 19,files/19/one.mov,files/19/four.MOV,files/19/small.MOV,files/19/fist.MOV,files/19/me.MOV
22
+ 20,files/20/one.mp4,files/20/four.mp4,files/20/small.mp4,files/20/fist.mp4,files/20/me.mp4
23
+ 21,files/21/one.mp4,files/21/four.mp4,files/21/small.mp4,files/21/fist.mp4,files/21/me.mp4
24
+ 22,files/22/one.mp4,files/22/four.mp4,files/22/small.mp4,files/22/fist.mp4,files/22/me.mp4
25
+ 23,files/23/one.mp4,files/23/four.mp4,files/23/small.mp4,files/23/fist.mp4,files/23/me.mp4
26
+ 24,files/24/one.mp4,files/24/four.mp4,files/24/small.mp4,files/24/fist.mp4,files/24/me.mp4
27
+ 25,files/25/one.mp4,files/25/four.mp4,files/25/small.mp4,files/25/fist.mp4,files/25/me.mp4
28
+ 26,files/26/one.mp4,files/26/four.mp4,files/26/small.mp4,files/26/fist.mp4,files/26/me.mp4
29
+ 27,files/27/one.mp4,files/27/four.mp4,files/27/small.mp4,files/27/fist.mp4,files/27/me.mp4
hand-gesture-recognition-dataset.py CHANGED
@@ -1,83 +1,83 @@
1
- import datasets
2
- import pandas as pd
3
 
4
- _CITATION = """\
5
- @InProceedings{huggingface:dataset,
6
- title = {hand-gesture-recognition-dataset},
7
- author = {TrainingDataPro},
8
- year = {2023}
9
- }
10
- """
11
 
12
- _DESCRIPTION = """\
13
- The dataset consists of videos showcasing individuals demonstrating 5 different
14
- hand gestures (*"one", "four", "small", "fist", and "me"*). Each video captures
15
- a person prominently displaying a single hand gesture, allowing for accurate
16
- identification and differentiation of the gestures.
17
- The dataset offers a diverse range of individuals performing the gestures,
18
- enabling the exploration of variations in hand shapes, sizes, and movements
19
- across different individuals.
20
- The videos in the dataset are recorded in reasonable lighting conditions and
21
- with adequate resolution, to ensure that the hand gestures can be easily
22
- observed and studied.
23
- """
24
- _NAME = 'hand-gesture-recognition-dataset'
25
 
26
- _HOMEPAGE = f"https://huggingface.co/datasets/TrainingDataPro/{_NAME}"
27
 
28
- _LICENSE = "cc-by-nc-nd-4.0"
29
 
30
- _DATA = f"https://huggingface.co/datasets/TrainingDataPro/{_NAME}/resolve/main/data/"
31
 
32
 
33
- class HandGestureRecognitionDataset(datasets.GeneratorBasedBuilder):
34
 
35
- def _info(self):
36
- return datasets.DatasetInfo(description=_DESCRIPTION,
37
- features=datasets.Features({
38
- 'set_id': datasets.Value('int32'),
39
- 'fist': datasets.Value('string'),
40
- 'four': datasets.Value('string'),
41
- 'me': datasets.Value('string'),
42
- 'one': datasets.Value('string'),
43
- 'small': datasets.Value('string')
44
- }),
45
- supervised_keys=None,
46
- homepage=_HOMEPAGE,
47
- citation=_CITATION,
48
- license=_LICENSE)
49
 
50
- def _split_generators(self, dl_manager):
51
- files = dl_manager.download_and_extract(f"{_DATA}files.zip")
52
- annotations = dl_manager.download(f"{_DATA}{_NAME}.csv")
53
- files = dl_manager.iter_files(files)
54
- return [
55
- datasets.SplitGenerator(name=datasets.Split.TRAIN,
56
- gen_kwargs={
57
- "files": files,
58
- 'annotations': annotations
59
- }),
60
- ]
61
 
62
- def _generate_examples(self, files, annotations):
63
- annotations_df = pd.read_csv(annotations, sep=';')
64
 
65
- files = sorted(files)
66
- files = [files[i:i + 5] for i in range(0, len(files), 5)]
67
- for idx, files_set in enumerate(files):
68
- set_id = int(files_set[0].split('/')[2])
69
- data = {'set_id': set_id}
70
 
71
- for file in files_set:
72
- file_name = file.split('/')[3]
73
- if 'fist' in file_name.lower():
74
- data['fist'] = file
75
- elif 'four' in file_name.lower():
76
- data['four'] = file
77
- elif 'me' in file_name.lower():
78
- data['me'] = file
79
- elif 'one' in file_name.lower():
80
- data['one'] = file
81
- elif 'small' in file_name.lower():
82
- data['small'] = file
83
- yield idx, data
 
1
+ # import datasets
2
+ # import pandas as pd
3
 
4
+ # _CITATION = """\
5
+ # @InProceedings{huggingface:dataset,
6
+ # title = {hand-gesture-recognition-dataset},
7
+ # author = {TrainingDataPro},
8
+ # year = {2023}
9
+ # }
10
+ # """
11
 
12
+ # _DESCRIPTION = """\
13
+ # The dataset consists of videos showcasing individuals demonstrating 5 different
14
+ # hand gestures (*"one", "four", "small", "fist", and "me"*). Each video captures
15
+ # a person prominently displaying a single hand gesture, allowing for accurate
16
+ # identification and differentiation of the gestures.
17
+ # The dataset offers a diverse range of individuals performing the gestures,
18
+ # enabling the exploration of variations in hand shapes, sizes, and movements
19
+ # across different individuals.
20
+ # The videos in the dataset are recorded in reasonable lighting conditions and
21
+ # with adequate resolution, to ensure that the hand gestures can be easily
22
+ # observed and studied.
23
+ # """
24
+ # _NAME = 'hand-gesture-recognition-dataset'
25
 
26
+ # _HOMEPAGE = f"https://huggingface.co/datasets/TrainingDataPro/{_NAME}"
27
 
28
+ # _LICENSE = "cc-by-nc-nd-4.0"
29
 
30
+ # _DATA = f"https://huggingface.co/datasets/TrainingDataPro/{_NAME}/resolve/main/data/"
31
 
32
 
33
+ # class HandGestureRecognitionDataset(datasets.GeneratorBasedBuilder):
34
 
35
+ # def _info(self):
36
+ # return datasets.DatasetInfo(description=_DESCRIPTION,
37
+ # features=datasets.Features({
38
+ # 'set_id': datasets.Value('int32'),
39
+ # 'fist': datasets.Value('string'),
40
+ # 'four': datasets.Value('string'),
41
+ # 'me': datasets.Value('string'),
42
+ # 'one': datasets.Value('string'),
43
+ # 'small': datasets.Value('string')
44
+ # }),
45
+ # supervised_keys=None,
46
+ # homepage=_HOMEPAGE,
47
+ # citation=_CITATION,
48
+ # license=_LICENSE)
49
 
50
+ # def _split_generators(self, dl_manager):
51
+ # files = dl_manager.download_and_extract(f"{_DATA}files.zip")
52
+ # annotations = dl_manager.download(f"{_DATA}{_NAME}.csv")
53
+ # files = dl_manager.iter_files(files)
54
+ # return [
55
+ # datasets.SplitGenerator(name=datasets.Split.TRAIN,
56
+ # gen_kwargs={
57
+ # "files": files,
58
+ # 'annotations': annotations
59
+ # }),
60
+ # ]
61
 
62
+ # def _generate_examples(self, files, annotations):
63
+ # annotations_df = pd.read_csv(annotations, sep=';')
64
 
65
+ # files = sorted(files)
66
+ # files = [files[i:i + 5] for i in range(0, len(files), 5)]
67
+ # for idx, files_set in enumerate(files):
68
+ # set_id = int(files_set[0].split('/')[2])
69
+ # data = {'set_id': set_id}
70
 
71
+ # for file in files_set:
72
+ # file_name = file.split('/')[3]
73
+ # if 'fist' in file_name.lower():
74
+ # data['fist'] = file
75
+ # elif 'four' in file_name.lower():
76
+ # data['four'] = file
77
+ # elif 'me' in file_name.lower():
78
+ # data['me'] = file
79
+ # elif 'one' in file_name.lower():
80
+ # data['one'] = file
81
+ # elif 'small' in file_name.lower():
82
+ # data['small'] = file
83
+ # yield idx, data