Spaces:
Runtime error
Runtime error
added missing folder
Browse files- Gradio_app.ipynb +70 -41
- data/.DS_Store +0 -0
Gradio_app.ipynb
CHANGED
@@ -152,7 +152,7 @@
|
|
152 |
" coeff = 1 - (var / (residuals.max() - residuals.min()))\n",
|
153 |
" return coeff\n",
|
154 |
"\n",
|
155 |
-
"def predict_on_section(client_name, timestamp, eq_lat, eq_lon, radius_km, source_depth_km, velocity_model, max_waveforms):\n",
|
156 |
" distances, t0s, st_lats, st_lons, waveforms, names = [], [], [], [], [], []\n",
|
157 |
" \n",
|
158 |
" taup_model = TauPyModel(model=velocity_model)\n",
|
@@ -175,6 +175,7 @@
|
|
175 |
" minlongitude=(eq_lon-window), maxlongitude=(eq_lon+window), \n",
|
176 |
" level='station')\n",
|
177 |
" print('Finished downloading inventory')\n",
|
|
|
178 |
" except (IndexError, FDSNNoDataException, FDSNTimeoutException, FDSNInternalServerException):\n",
|
179 |
" fig, ax = plt.subplots()\n",
|
180 |
" ax.text(0.5,0.5,'Something is wrong with the data provider, try another')\n",
|
@@ -187,7 +188,6 @@
|
|
187 |
" cached_waveforms = glob(\"data/cached/*.mseed\")\n",
|
188 |
"\n",
|
189 |
" for network in inv:\n",
|
190 |
-
" # Skip the SYntetic networks\n",
|
191 |
" if network.code == 'SY':\n",
|
192 |
" continue\n",
|
193 |
" for station in network:\n",
|
@@ -203,8 +203,9 @@
|
|
203 |
" starttime = obspy.UTCDateTime(timestamp) + arrivals[0].time - 15\n",
|
204 |
" endtime = starttime + 60\n",
|
205 |
" try:\n",
|
206 |
-
"
|
207 |
-
"
|
|
|
208 |
" waveform = client.get_waveforms(network=network.code, station=station.code, location=\"*\", channel=\"*\", \n",
|
209 |
" starttime=starttime, endtime=endtime)\n",
|
210 |
" waveform.write(f\"data/cached/{network.code}_{station.code}_{starttime}.mseed\", format=\"MSEED\")\n",
|
@@ -245,12 +246,16 @@
|
|
245 |
" \n",
|
246 |
" # If there are no waveforms, return an empty plot\n",
|
247 |
" if len(waveforms) == 0:\n",
|
|
|
248 |
" fig, ax = plt.subplots()\n",
|
249 |
" ax.text(0.5,0.5,'No waveforms found')\n",
|
250 |
" fig.canvas.draw();\n",
|
251 |
" image = np.array(fig.canvas.renderer.buffer_rgba())\n",
|
252 |
" plt.close(fig)\n",
|
253 |
-
"
|
|
|
|
|
|
|
254 |
" \n",
|
255 |
"\n",
|
256 |
" first_distances = bin_distances(distances, bin_size=10/111.2)\n",
|
@@ -277,9 +282,12 @@
|
|
277 |
" p_phases = output[:, 0]\n",
|
278 |
" s_phases = output[:, 1]\n",
|
279 |
"\n",
|
280 |
-
"
|
281 |
-
"
|
282 |
-
"
|
|
|
|
|
|
|
283 |
"\n",
|
284 |
" print(f\"Starting plotting {len(waveforms)} waveforms\")\n",
|
285 |
" fig, ax = plt.subplots(ncols=3, figsize=(10, 3))\n",
|
@@ -304,17 +312,18 @@
|
|
304 |
" topo_map.da.plot(ax = ax[2], cmap='Greys', add_colorbar=False, add_labels=False)\n",
|
305 |
" ax[1].imshow(hillshade, cmap=\"Greys\", alpha=0.5)\n",
|
306 |
"\n",
|
307 |
-
" output_picks = pd.DataFrame({'station_name' : [],
|
|
|
|
|
308 |
" 'p_phase, s' : [], 'p_uncertainty, s' : [], \n",
|
309 |
" 's_phase, s' : [], 's_uncertainty, s' : [],\n",
|
310 |
" 'velocity_p, km/s' : [], 'velocity_s, km/s' : []})\n",
|
311 |
-
"
|
312 |
-
" \n",
|
313 |
" for i in range(len(waveforms)):\n",
|
314 |
" print(f\"Plotting waveform {i+1}/{len(waveforms)}\")\n",
|
315 |
-
" current_P = p_phases[i
|
316 |
-
" current_S = s_phases[i
|
317 |
-
"\n",
|
318 |
" x = [t0s[i] + pd.Timedelta(seconds=k/100) for k in np.linspace(0,6000,6000)]\n",
|
319 |
" x = mdates.date2num(x)\n",
|
320 |
"\n",
|
@@ -322,32 +331,40 @@
|
|
322 |
" p_conf = 1/(current_P.std()/p_max_confidence).item()\n",
|
323 |
" s_conf = 1/(current_S.std()/s_max_confidence).item()\n",
|
324 |
"\n",
|
325 |
-
"
|
326 |
"\n",
|
327 |
-
" ax[0].
|
328 |
-
" ax[0].scatter(x[int(current_S.mean()*waveforms[i][0].shape[-1])], waveforms[i][0, 0].mean()+distances[i]*111.2, color='b', alpha=s_conf, marker='|')\n",
|
329 |
-
" ax[0].set_ylabel('Z')\n",
|
330 |
"\n",
|
331 |
-
"
|
|
|
|
|
|
|
|
|
|
|
332 |
"\n",
|
333 |
-
"
|
334 |
-
"
|
|
|
|
|
|
|
|
|
|
|
335 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
336 |
" print(f\"Station {st_lats[i]}, {st_lons[i]} has P velocity {velocity_p} and S velocity {velocity_s}\")\n",
|
337 |
"\n",
|
338 |
-
" output_picks = output_picks.append(pd.DataFrame({'station_name': [names[i]],
|
|
|
|
|
339 |
" 'p_phase, s' : [(delta_t+current_P.mean()*60).item()], 'p_uncertainty, s' : [current_P.std().item()*60], \n",
|
340 |
" 's_phase, s' : [(delta_t+current_S.mean()*60).item()], 's_uncertainty, s' : [current_S.std().item()*60],\n",
|
341 |
" 'velocity_p, km/s' : [velocity_p], 'velocity_s, km/s' : [velocity_s]}))\n",
|
342 |
" \n",
|
343 |
-
" # Generate an array from st_lat to eq_lat and from st_lon to eq_lon\n",
|
344 |
-
" x = np.linspace(st_lons[i], eq_lon, 50)\n",
|
345 |
-
" y = np.linspace(st_lats[i], eq_lat, 50)\n",
|
346 |
" \n",
|
347 |
-
" # Plot the array\n",
|
348 |
-
" ax[1].scatter(x, y, c=np.zeros_like(x)+velocity_p, alpha=0.1, vmin=0, vmax=8)\n",
|
349 |
-
" ax[2].scatter(x, y, c=np.zeros_like(x)+velocity_s, alpha=0.1, vmin=0, vmax=8)\n",
|
350 |
-
"\n",
|
351 |
" # Add legend\n",
|
352 |
" ax[0].scatter(None, None, color='r', marker='|', label='P')\n",
|
353 |
" ax[0].scatter(None, None, color='b', marker='|', label='S')\n",
|
@@ -379,18 +396,11 @@
|
|
379 |
" fig.canvas.draw();\n",
|
380 |
" image = np.array(fig.canvas.renderer.buffer_rgba())\n",
|
381 |
" plt.close(fig)\n",
|
382 |
-
"\n",
|
383 |
-
"
|
384 |
-
" output_csv = 'data/picks.csv'\n",
|
385 |
"\n",
|
386 |
" return image, output_picks, output_csv\n",
|
387 |
"\n",
|
388 |
-
"def download_picks(output_picks):\n",
|
389 |
-
" output_csv = io.BytesIO()\n",
|
390 |
-
" output_picks.to_csv(output_csv, index=False)\n",
|
391 |
-
" output_csv.seek(0)\n",
|
392 |
-
" return output_csv\n",
|
393 |
-
"\n",
|
394 |
"model = torch.jit.load(\"model.pt\")\n",
|
395 |
"\n",
|
396 |
"with gr.Blocks() as demo:\n",
|
@@ -448,7 +458,7 @@
|
|
448 |
" gr.HTML(\"\"\"\n",
|
449 |
" <div style=\"padding: 20px; border-radius: 10px; font-size: 16px;\">\n",
|
450 |
" <p style=\"font-weight: bold; font-size: 24px; margin-bottom: 20px;\">Using PhaseHunter to Analyze Seismic Waveforms</p>\n",
|
451 |
-
" <p>Select an earthquake from the global earthquake catalogue and the app will download the waveform from the FDSN client of your choice. The app will use a velocity model of your choice to select appropriate time windows for each station within a specified radius of the earthquake.</p>\n",
|
452 |
" <p>The app will then analyze the waveforms and mark the detected phases on the waveform. Pick data for each waveform is reported in seconds from the start of the waveform.</p>\n",
|
453 |
" <p>Velocities are derived from distance and travel time determined by PhaseHunter picks (<span style=\"font-style: italic;\">v = distance/predicted_pick_time</span>). The background of the velocity plot is colored by DEM.</p>\n",
|
454 |
" </div>\n",
|
@@ -500,7 +510,8 @@
|
|
500 |
" with gr.Column(scale=2):\n",
|
501 |
" radius_inputs = gr.Slider(minimum=1, \n",
|
502 |
" maximum=200, \n",
|
503 |
-
" value=50,
|
|
|
504 |
" step=10,\n",
|
505 |
" info=\"\"\"Select the radius around the earthquake to download data from.\\n \n",
|
506 |
" Note that the larger the radius, the longer the app will take to run.\"\"\",\n",
|
@@ -514,6 +525,23 @@
|
|
514 |
" info=\"Maximum number of waveforms to show per section\\n (to avoid long prediction times)\",\n",
|
515 |
" interactive=True,\n",
|
516 |
" )\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
517 |
" \n",
|
518 |
" button = gr.Button(\"Predict phases\")\n",
|
519 |
" output_image = gr.Image(label='Waveforms with Phases Marked', type='numpy', interactive=False)\n",
|
@@ -528,7 +556,8 @@
|
|
528 |
" inputs=[client_inputs, timestamp_inputs, \n",
|
529 |
" eq_lat_inputs, eq_lon_inputs, \n",
|
530 |
" radius_inputs, source_depth_inputs, \n",
|
531 |
-
" velocity_inputs, max_waveforms_inputs
|
|
|
532 |
" outputs=[output_image, output_picks, output_csv])\n",
|
533 |
"\n",
|
534 |
"demo.launch()"
|
|
|
152 |
" coeff = 1 - (var / (residuals.max() - residuals.min()))\n",
|
153 |
" return coeff\n",
|
154 |
"\n",
|
155 |
+
"def predict_on_section(client_name, timestamp, eq_lat, eq_lon, radius_km, source_depth_km, velocity_model, max_waveforms, conf_thres_P, conf_thres_S):\n",
|
156 |
" distances, t0s, st_lats, st_lons, waveforms, names = [], [], [], [], [], []\n",
|
157 |
" \n",
|
158 |
" taup_model = TauPyModel(model=velocity_model)\n",
|
|
|
175 |
" minlongitude=(eq_lon-window), maxlongitude=(eq_lon+window), \n",
|
176 |
" level='station')\n",
|
177 |
" print('Finished downloading inventory')\n",
|
178 |
+
" \n",
|
179 |
" except (IndexError, FDSNNoDataException, FDSNTimeoutException, FDSNInternalServerException):\n",
|
180 |
" fig, ax = plt.subplots()\n",
|
181 |
" ax.text(0.5,0.5,'Something is wrong with the data provider, try another')\n",
|
|
|
188 |
" cached_waveforms = glob(\"data/cached/*.mseed\")\n",
|
189 |
"\n",
|
190 |
" for network in inv:\n",
|
|
|
191 |
" if network.code == 'SY':\n",
|
192 |
" continue\n",
|
193 |
" for station in network:\n",
|
|
|
203 |
" starttime = obspy.UTCDateTime(timestamp) + arrivals[0].time - 15\n",
|
204 |
" endtime = starttime + 60\n",
|
205 |
" try:\n",
|
206 |
+
" filename=f'{network.code}_{station.code}_{starttime}'\n",
|
207 |
+
" if f\"data/cached/{filename}.mseed\" not in cached_waveforms:\n",
|
208 |
+
" print(f'Downloading waveform for {filename}')\n",
|
209 |
" waveform = client.get_waveforms(network=network.code, station=station.code, location=\"*\", channel=\"*\", \n",
|
210 |
" starttime=starttime, endtime=endtime)\n",
|
211 |
" waveform.write(f\"data/cached/{network.code}_{station.code}_{starttime}.mseed\", format=\"MSEED\")\n",
|
|
|
246 |
" \n",
|
247 |
" # If there are no waveforms, return an empty plot\n",
|
248 |
" if len(waveforms) == 0:\n",
|
249 |
+
" print('No waveforms found')\n",
|
250 |
" fig, ax = plt.subplots()\n",
|
251 |
" ax.text(0.5,0.5,'No waveforms found')\n",
|
252 |
" fig.canvas.draw();\n",
|
253 |
" image = np.array(fig.canvas.renderer.buffer_rgba())\n",
|
254 |
" plt.close(fig)\n",
|
255 |
+
" output_picks = pd.DataFrame()\n",
|
256 |
+
" output_picks.to_csv('data/picks.csv', index=False)\n",
|
257 |
+
" output_csv = 'data/picks.csv'\n",
|
258 |
+
" return image, output_picks, output_csv\n",
|
259 |
" \n",
|
260 |
"\n",
|
261 |
" first_distances = bin_distances(distances, bin_size=10/111.2)\n",
|
|
|
282 |
" p_phases = output[:, 0]\n",
|
283 |
" s_phases = output[:, 1]\n",
|
284 |
"\n",
|
285 |
+
" p_phases = p_phases.reshape(len(waveforms),-1)\n",
|
286 |
+
" s_phases = s_phases.reshape(len(waveforms),-1)\n",
|
287 |
+
"\n",
|
288 |
+
" # Max confidence - min variance \n",
|
289 |
+
" p_max_confidence = p_phases.std(axis=-1).min()\n",
|
290 |
+
" s_max_confidence = s_phases.std(axis=-1).min()\n",
|
291 |
"\n",
|
292 |
" print(f\"Starting plotting {len(waveforms)} waveforms\")\n",
|
293 |
" fig, ax = plt.subplots(ncols=3, figsize=(10, 3))\n",
|
|
|
312 |
" topo_map.da.plot(ax = ax[2], cmap='Greys', add_colorbar=False, add_labels=False)\n",
|
313 |
" ax[1].imshow(hillshade, cmap=\"Greys\", alpha=0.5)\n",
|
314 |
"\n",
|
315 |
+
" output_picks = pd.DataFrame({'station_name' : [], \n",
|
316 |
+
" 'st_lat' : [], 'st_lon' : [],\n",
|
317 |
+
" 'starttime' : [], \n",
|
318 |
" 'p_phase, s' : [], 'p_uncertainty, s' : [], \n",
|
319 |
" 's_phase, s' : [], 's_uncertainty, s' : [],\n",
|
320 |
" 'velocity_p, km/s' : [], 'velocity_s, km/s' : []})\n",
|
321 |
+
" \n",
|
|
|
322 |
" for i in range(len(waveforms)):\n",
|
323 |
" print(f\"Plotting waveform {i+1}/{len(waveforms)}\")\n",
|
324 |
+
" current_P = p_phases[i]\n",
|
325 |
+
" current_S = s_phases[i]\n",
|
326 |
+
" \n",
|
327 |
" x = [t0s[i] + pd.Timedelta(seconds=k/100) for k in np.linspace(0,6000,6000)]\n",
|
328 |
" x = mdates.date2num(x)\n",
|
329 |
"\n",
|
|
|
331 |
" p_conf = 1/(current_P.std()/p_max_confidence).item()\n",
|
332 |
" s_conf = 1/(current_S.std()/s_max_confidence).item()\n",
|
333 |
"\n",
|
334 |
+
" delta_t = t0s[i].timestamp - obspy.UTCDateTime(timestamp).timestamp\n",
|
335 |
"\n",
|
336 |
+
" ax[0].plot(x, waveforms[i][0, 0]*10+distances[i]*111.2, color='black', alpha=0.5, lw=1)\n",
|
|
|
|
|
337 |
"\n",
|
338 |
+
" if (current_P.std().item()*60 < conf_thres_P) or (current_S.std().item()*60 < conf_thres_S):\n",
|
339 |
+
" ax[0].scatter(x[int(current_P.mean()*waveforms[i][0].shape[-1])], waveforms[i][0, 0].mean()+distances[i]*111.2, color='r', alpha=p_conf, marker='|')\n",
|
340 |
+
" ax[0].scatter(x[int(current_S.mean()*waveforms[i][0].shape[-1])], waveforms[i][0, 0].mean()+distances[i]*111.2, color='b', alpha=s_conf, marker='|')\n",
|
341 |
+
" \n",
|
342 |
+
" velocity_p = (distances[i]*111.2)/(delta_t+current_P.mean()*60).item()\n",
|
343 |
+
" velocity_s = (distances[i]*111.2)/(delta_t+current_S.mean()*60).item()\n",
|
344 |
"\n",
|
345 |
+
" # Generate an array from st_lat to eq_lat and from st_lon to eq_lon\n",
|
346 |
+
" x = np.linspace(st_lons[i], eq_lon, 50)\n",
|
347 |
+
" y = np.linspace(st_lats[i], eq_lat, 50)\n",
|
348 |
+
" \n",
|
349 |
+
" # Plot the array\n",
|
350 |
+
" ax[1].scatter(x, y, c=np.zeros_like(x)+velocity_p, alpha=0.1, vmin=0, vmax=8)\n",
|
351 |
+
" ax[2].scatter(x, y, c=np.zeros_like(x)+velocity_s, alpha=0.1, vmin=0, vmax=8)\n",
|
352 |
"\n",
|
353 |
+
" else:\n",
|
354 |
+
" velocity_p = np.nan\n",
|
355 |
+
" velocity_s = np.nan\n",
|
356 |
+
" \n",
|
357 |
+
" ax[0].set_ylabel('Z')\n",
|
358 |
" print(f\"Station {st_lats[i]}, {st_lons[i]} has P velocity {velocity_p} and S velocity {velocity_s}\")\n",
|
359 |
"\n",
|
360 |
+
" output_picks = output_picks.append(pd.DataFrame({'station_name': [names[i]], \n",
|
361 |
+
" 'st_lat' : [st_lats[i]], 'st_lon' : [st_lons[i]],\n",
|
362 |
+
" 'starttime' : [str(t0s[i])], \n",
|
363 |
" 'p_phase, s' : [(delta_t+current_P.mean()*60).item()], 'p_uncertainty, s' : [current_P.std().item()*60], \n",
|
364 |
" 's_phase, s' : [(delta_t+current_S.mean()*60).item()], 's_uncertainty, s' : [current_S.std().item()*60],\n",
|
365 |
" 'velocity_p, km/s' : [velocity_p], 'velocity_s, km/s' : [velocity_s]}))\n",
|
366 |
" \n",
|
|
|
|
|
|
|
367 |
" \n",
|
|
|
|
|
|
|
|
|
368 |
" # Add legend\n",
|
369 |
" ax[0].scatter(None, None, color='r', marker='|', label='P')\n",
|
370 |
" ax[0].scatter(None, None, color='b', marker='|', label='S')\n",
|
|
|
396 |
" fig.canvas.draw();\n",
|
397 |
" image = np.array(fig.canvas.renderer.buffer_rgba())\n",
|
398 |
" plt.close(fig)\n",
|
399 |
+
" output_picks.to_csv(f'data/velocity/{eq_lat}_{eq_lon}_{timestamp}_{len(waveforms)}.csv', index=False)\n",
|
400 |
+
" output_csv = f'data/velocity/{eq_lat}_{eq_lon}_{timestamp}_{len(waveforms)}.csv'\n",
|
|
|
401 |
"\n",
|
402 |
" return image, output_picks, output_csv\n",
|
403 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
404 |
"model = torch.jit.load(\"model.pt\")\n",
|
405 |
"\n",
|
406 |
"with gr.Blocks() as demo:\n",
|
|
|
458 |
" gr.HTML(\"\"\"\n",
|
459 |
" <div style=\"padding: 20px; border-radius: 10px; font-size: 16px;\">\n",
|
460 |
" <p style=\"font-weight: bold; font-size: 24px; margin-bottom: 20px;\">Using PhaseHunter to Analyze Seismic Waveforms</p>\n",
|
461 |
+
" <p>Select an earthquake from the global earthquake catalogue (e.g. <a href=\"https://earthquake.usgs.gov/earthquakes/map\">USGS</a>) and the app will download the waveform from the FDSN client of your choice. The app will use a velocity model of your choice to select appropriate time windows for each station within a specified radius of the earthquake.</p>\n",
|
462 |
" <p>The app will then analyze the waveforms and mark the detected phases on the waveform. Pick data for each waveform is reported in seconds from the start of the waveform.</p>\n",
|
463 |
" <p>Velocities are derived from distance and travel time determined by PhaseHunter picks (<span style=\"font-style: italic;\">v = distance/predicted_pick_time</span>). The background of the velocity plot is colored by DEM.</p>\n",
|
464 |
" </div>\n",
|
|
|
510 |
" with gr.Column(scale=2):\n",
|
511 |
" radius_inputs = gr.Slider(minimum=1, \n",
|
512 |
" maximum=200, \n",
|
513 |
+
" value=50, \n",
|
514 |
+
" label=\"Radius (km)\", \n",
|
515 |
" step=10,\n",
|
516 |
" info=\"\"\"Select the radius around the earthquake to download data from.\\n \n",
|
517 |
" Note that the larger the radius, the longer the app will take to run.\"\"\",\n",
|
|
|
525 |
" info=\"Maximum number of waveforms to show per section\\n (to avoid long prediction times)\",\n",
|
526 |
" interactive=True,\n",
|
527 |
" )\n",
|
528 |
+
" with gr.Column(scale=2):\n",
|
529 |
+
" P_thres_inputs = gr.Slider(minimum=0.01,\n",
|
530 |
+
" maximum=1,\n",
|
531 |
+
" value=0.1,\n",
|
532 |
+
" label=\"P uncertainty threshold, s\",\n",
|
533 |
+
" step=0.01,\n",
|
534 |
+
" info=\"Acceptable uncertainty for P picks expressed in std() seconds\",\n",
|
535 |
+
" interactive=True,\n",
|
536 |
+
" )\n",
|
537 |
+
" S_thres_inputs = gr.Slider(minimum=0.01,\n",
|
538 |
+
" maximum=1,\n",
|
539 |
+
" value=0.2,\n",
|
540 |
+
" label=\"S uncertainty threshold, s\",\n",
|
541 |
+
" step=0.01,\n",
|
542 |
+
" info=\"Acceptable uncertainty for S picks expressed in std() seconds\",\n",
|
543 |
+
" interactive=True,\n",
|
544 |
+
" )\n",
|
545 |
" \n",
|
546 |
" button = gr.Button(\"Predict phases\")\n",
|
547 |
" output_image = gr.Image(label='Waveforms with Phases Marked', type='numpy', interactive=False)\n",
|
|
|
556 |
" inputs=[client_inputs, timestamp_inputs, \n",
|
557 |
" eq_lat_inputs, eq_lon_inputs, \n",
|
558 |
" radius_inputs, source_depth_inputs, \n",
|
559 |
+
" velocity_inputs, max_waveforms_inputs,\n",
|
560 |
+
" P_thres_inputs, S_thres_inputs],\n",
|
561 |
" outputs=[output_image, output_picks, output_csv])\n",
|
562 |
"\n",
|
563 |
"demo.launch()"
|
data/.DS_Store
CHANGED
Binary files a/data/.DS_Store and b/data/.DS_Store differ
|
|