zhuwq0 commited on
Commit
f65c733
1 Parent(s): a5134d8

fix small bugs

Browse files
Files changed (1) hide show
  1. quakeflow_nc.py +50 -44
quakeflow_nc.py CHANGED
@@ -21,6 +21,7 @@ import h5py
21
  import numpy as np
22
  import torch
23
  from typing import Dict, List, Optional, Tuple, Union
 
24
 
25
  import datasets
26
 
@@ -52,6 +53,7 @@ _LICENSE = ""
52
  # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
53
  _REPO = "https://huggingface.co/datasets/AI4EPS/quakeflow_nc/resolve/main/data"
54
  _FILENAMES = ["NC1970-1989.h5", "NC1990-1994.h5", "NC1995-1999.h5", "NC2000-2004.h5", "NC2005-2009.h5", "NC2010.h5", "NC2011.h5", "NC2012.h5", "NC2013.h5", "NC2014.h5", "NC2015.h5", "NC2016.h5", "NC2017.h5", "NC2018.h5", "NC2019.h5", "NC2020.h5"]
 
55
  _URLS = {
56
  "station": [f"{_REPO}/{x}" for x in _FILENAMES],
57
  "event": [f"{_REPO}/{x}" for x in _FILENAMES],
@@ -107,8 +109,8 @@ class QuakeFlow_NC(datasets.GeneratorBasedBuilder):
107
  if self.config.name=="station":
108
  features=datasets.Features(
109
  {
110
- "waveform": datasets.Array3D(shape=(3, self.nt), dtype='float32'),
111
- "phase_pick": datasets.Array3D(shape=(3, self.nt), dtype='float32'),
112
  "event_location": datasets.Sequence(datasets.Value("float32")),
113
  "station_location": datasets.Sequence(datasets.Value("float32")),
114
  })
@@ -186,54 +188,58 @@ class QuakeFlow_NC(datasets.GeneratorBasedBuilder):
186
  # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
187
 
188
  for file in filepath:
189
- with h5py.File(file, "r") as fp:
190
- # for event_id in sorted(list(fp.keys())):
191
- for event_id in fp.keys():
192
- event = fp[event_id]
193
- station_ids = list(event.keys())
194
- if self.config.name=="station":
195
- waveforms = np.zeros([3, self.nt], dtype="float32")
196
- phase_pick = np.zeros_like(waveforms)
197
- attrs = event.attrs
198
- event_location = [attrs["longitude"], attrs["latitude"], attrs["depth_km"], attrs["event_time_index"]]
199
-
200
- for i, sta_id in enumerate(station_ids):
201
- waveforms[:, :self.nt] = event[sta_id][:,:self.nt]
202
- attrs = event[sta_id].attrs
203
- p_picks = attrs["phase_index"][attrs["phase_type"] == "P"]
204
- s_picks = attrs["phase_index"][attrs["phase_type"] == "S"]
205
- phase_pick[:, :self.nt] = generate_label([p_picks, s_picks], nt=self.nt)
206
- station_location = [attrs["longitude"], attrs["latitude"], -attrs["elevation_m"]/1e3]
207
-
208
- yield f"{event_id}/{sta_id}", {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
209
  "waveform": torch.from_numpy(waveforms).float(),
210
  "phase_pick": torch.from_numpy(phase_pick).float(),
211
  "event_location": torch.from_numpy(np.array(event_location)).float(),
212
  "station_location": torch.from_numpy(np.array(station_location)).float(),
213
  }
214
 
215
- elif self.config.name=="event":
216
- waveforms = np.zeros([len(station_ids), 3, self.nt], dtype="float32")
217
- phase_pick = np.zeros_like(waveforms)
218
- attrs = event.attrs
219
- event_location = [attrs["longitude"], attrs["latitude"], attrs["depth_km"], attrs["event_time_index"]]
220
- station_location = []
221
-
222
- for i, sta_id in enumerate(station_ids):
223
- waveforms[i, :, :self.nt] = event[sta_id][:,:self.nt]
224
- attrs = event[sta_id].attrs
225
- p_picks = attrs["phase_index"][attrs["phase_type"] == "P"]
226
- s_picks = attrs["phase_index"][attrs["phase_type"] == "S"]
227
- phase_pick[i, :, :] = generate_label([p_picks, s_picks], nt=self.nt)
228
- station_location.append([attrs["longitude"], attrs["latitude"], -attrs["elevation_m"]/1e3])
229
-
230
- yield event_id, {
231
- "waveform": torch.from_numpy(waveforms).float(),
232
- "phase_pick": torch.from_numpy(phase_pick).float(),
233
- "event_location": torch.from_numpy(np.array(event_location)).float(),
234
- "station_location": torch.from_numpy(np.array(station_location)).float(),
235
- }
236
-
237
 
238
  def generate_label(phase_list, label_width=[150, 150], nt=8192):
239
 
 
21
  import numpy as np
22
  import torch
23
  from typing import Dict, List, Optional, Tuple, Union
24
+ import fsspec
25
 
26
  import datasets
27
 
 
53
  # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
54
  _REPO = "https://huggingface.co/datasets/AI4EPS/quakeflow_nc/resolve/main/data"
55
  _FILENAMES = ["NC1970-1989.h5", "NC1990-1994.h5", "NC1995-1999.h5", "NC2000-2004.h5", "NC2005-2009.h5", "NC2010.h5", "NC2011.h5", "NC2012.h5", "NC2013.h5", "NC2014.h5", "NC2015.h5", "NC2016.h5", "NC2017.h5", "NC2018.h5", "NC2019.h5", "NC2020.h5"]
56
+ # _FILENAMES = ["NC2020.h5"]
57
  _URLS = {
58
  "station": [f"{_REPO}/{x}" for x in _FILENAMES],
59
  "event": [f"{_REPO}/{x}" for x in _FILENAMES],
 
109
  if self.config.name=="station":
110
  features=datasets.Features(
111
  {
112
+ "waveform": datasets.Array2D(shape=(3, self.nt), dtype='float32'),
113
+ "phase_pick": datasets.Array2D(shape=(3, self.nt), dtype='float32'),
114
  "event_location": datasets.Sequence(datasets.Value("float32")),
115
  "station_location": datasets.Sequence(datasets.Value("float32")),
116
  })
 
188
  # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
189
 
190
  for file in filepath:
191
+ with fsspec.open(file, "rb") as fs:
192
+ with h5py.File(fs, "r") as fp:
193
+ # for event_id in sorted(list(fp.keys())):
194
+ event_ids = list(fp.keys())
195
+ for event_id in event_ids:
196
+ event = fp[event_id]
197
+ station_ids = list(event.keys())
198
+ if self.config.name=="station":
199
+ waveforms = np.zeros([3, self.nt], dtype="float32")
200
+ phase_pick = np.zeros_like(waveforms)
201
+ attrs = event.attrs
202
+ event_location = [attrs["longitude"], attrs["latitude"], attrs["depth_km"], attrs["event_time_index"]]
203
+
204
+ for i, sta_id in enumerate(station_ids):
205
+ # waveforms[:, :self.nt] = event[sta_id][:,:self.nt]
206
+ waveforms[:, :self.nt] = event[sta_id][:self.nt,:].T
207
+ attrs = event[sta_id].attrs
208
+ p_picks = attrs["phase_index"][attrs["phase_type"] == "P"]
209
+ s_picks = attrs["phase_index"][attrs["phase_type"] == "S"]
210
+ # phase_pick[:, :self.nt] = generate_label([p_picks, s_picks], nt=self.nt)
211
+ station_location = [attrs["longitude"], attrs["latitude"], -attrs["elevation_m"]/1e3]
212
+
213
+ yield f"{event_id}/{sta_id}", {
214
+ "waveform": torch.from_numpy(waveforms).float(),
215
+ "phase_pick": torch.from_numpy(phase_pick).float(),
216
+ "event_location": torch.from_numpy(np.array(event_location)).float(),
217
+ "station_location": torch.from_numpy(np.array(station_location)).float(),
218
+ }
219
+
220
+ elif self.config.name=="event":
221
+ waveforms = np.zeros([len(station_ids), 3, self.nt], dtype="float32")
222
+ phase_pick = np.zeros_like(waveforms)
223
+ attrs = event.attrs
224
+ event_location = [attrs["longitude"], attrs["latitude"], attrs["depth_km"], attrs["event_time_index"]]
225
+ station_location = []
226
+
227
+ for i, sta_id in enumerate(station_ids):
228
+ # waveforms[i, :, :self.nt] = event[sta_id][:,:self.nt]
229
+ waveforms[i, :, :self.nt] = event[sta_id][:self.nt,:].T
230
+ attrs = event[sta_id].attrs
231
+ p_picks = attrs["phase_index"][attrs["phase_type"] == "P"]
232
+ s_picks = attrs["phase_index"][attrs["phase_type"] == "S"]
233
+ phase_pick[i, :, :] = generate_label([p_picks, s_picks], nt=self.nt)
234
+ station_location.append([attrs["longitude"], attrs["latitude"], -attrs["elevation_m"]/1e3])
235
+
236
+ yield event_id, {
237
  "waveform": torch.from_numpy(waveforms).float(),
238
  "phase_pick": torch.from_numpy(phase_pick).float(),
239
  "event_location": torch.from_numpy(np.array(event_location)).float(),
240
  "station_location": torch.from_numpy(np.array(station_location)).float(),
241
  }
242
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
243
 
244
  def generate_label(phase_list, label_width=[150, 150], nt=8192):
245