augustoperes commited on
Commit
70159fb
1 Parent(s): c003804

Upload fluid_cube.py (#1)

Browse files

- Upload fluid_cube.py (65795fc8da57c03608ab59d6747c0df9be808f23)

Files changed (1) hide show
  1. fluid_cube.py +74 -0
fluid_cube.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Dataset for the fluid cube
2
+
3
+ More on: https://inductiva.ai/blog/article/fluid-cube-dataset
4
+
5
+ """
6
+ import json
7
+
8
+ import datasets
9
+ import numpy as np
10
+
11
+ _DESCRIPTION = 'https://inductiva.ai/blog/article/fluid-cube-dataset'
12
+
13
+ _BASE_URL = 'https://storage.googleapis.com/fluid_cube/'
14
+
15
+
16
+ class WindTunnel(datasets.GeneratorBasedBuilder):
17
+ '''The FluidCube builder'''
18
+
19
+ def __init__(self, version, **kwargs):
20
+ super().__init__(**kwargs)
21
+ self.bucket_url = _BASE_URL + f'{version}.tar.gz'
22
+
23
+ def _info(self):
24
+ return datasets.DatasetInfo(
25
+ description=_DESCRIPTION,
26
+ features=datasets.Features({
27
+ 'block_position': [datasets.Value('float32')],
28
+ 'block_dimensions': [datasets.Value('float32')],
29
+ 'fluid_volume':
30
+ datasets.Value('float32'),
31
+ 'block_velocity': [datasets.Value('float32')],
32
+ 'block_velocity_magnitude':
33
+ datasets.Value('float32'),
34
+ 'kinematic_viscosity':
35
+ datasets.Value('float32'),
36
+ 'density':
37
+ datasets.Value('float32'),
38
+ 'tank_dimensions': [datasets.Value('float32')],
39
+ 'time_max':
40
+ datasets.Value('float32'),
41
+ 'time_step':
42
+ datasets.Value('float32'),
43
+ 'particle_radius':
44
+ datasets.Value('float32'),
45
+ 'number_of_fluid_particles':
46
+ datasets.Value('int32'),
47
+ # Float64 because pyArrow is not capable of
48
+ # [Array2D(shape, float32)].
49
+ # https://github.com/huggingface/datasets/issues/5936
50
+ 'simulation_time_steps':
51
+ datasets.Sequence(
52
+ datasets.Array2D(dtype='float64', shape=(None, 6)))
53
+ }))
54
+
55
+ def _split_generators(self, dl_manager):
56
+ # Download and extract the zip file in the bucket.
57
+ downloaded_dir = dl_manager.download(self.bucket_url)
58
+ return [
59
+ datasets.SplitGenerator(
60
+ name=datasets.Split.TRAIN,
61
+ gen_kwargs={
62
+ 'json_files': dl_manager.iter_archive(downloaded_dir)
63
+ })
64
+ ]
65
+
66
+ # pylint: disable=arguments-differ
67
+ def _generate_examples(self, json_files):
68
+ for id_, (_, json_file) in enumerate(json_files):
69
+ bytes_data = json_file.read()
70
+ data = json.loads(bytes_data)
71
+ data['simulation_time_steps'] = [
72
+ np.transpose(a) for a in data['simulation_time_steps']
73
+ ]
74
+ yield id_, data