crimeacs commited on
Commit
5734e39
1 Parent(s): 4ea56e8

finished Alpha version

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .vscode/settings.json +3 -0
  2. Gradio_app.ipynb +230 -125
  3. app.py +226 -72
  4. data/.DS_Store +0 -0
  5. data/cached/BC_JARAX_2023-04-01T01:16:13.997267Z.mseed +0 -0
  6. data/cached/CE_24944_2023-04-01T01:16:14.044529Z.mseed +0 -0
  7. data/cached/CE_24945_2023-04-01T01:16:13.506381Z.mseed +0 -0
  8. data/cached/CI_ADO_2019-07-04T17:33:53.650962Z.mseed +0 -0
  9. data/cached/CI_ARV_2019-07-04T17:33:53.096986Z.mseed +0 -0
  10. data/cached/CI_CJV2_2023-04-01T01:16:16.011425Z.mseed +0 -0
  11. data/cached/CI_CWC_2019-07-04T17:33:47.189005Z.mseed +0 -0
  12. data/cached/CI_EDW2_2019-07-04T17:33:49.567241Z.mseed +0 -0
  13. data/cached/CI_FUR_2019-07-04T17:33:49.310305Z.mseed +0 -0
  14. data/cached/CI_GRA_2019-07-04T17:33:53.959922Z.mseed +0 -0
  15. data/cached/CI_GSC_2019-07-04T17:33:47.536363Z.mseed +0 -0
  16. data/cached/CI_HEC_2019-07-04T17:33:56.148977Z.mseed +0 -0
  17. data/cached/CI_ISA_2019-07-04T17:33:46.297658Z.mseed +0 -0
  18. data/cached/CI_JNH2_2023-04-01T01:16:13.664044Z.mseed +0 -0
  19. data/cached/CI_JRC2_2019-07-04T17:33:39.947494Z.mseed +0 -0
  20. data/cached/CI_LRL_2019-07-04T17:33:40.248999Z.mseed +0 -0
  21. data/cached/CI_LRR2_2023-04-01T01:16:15.095101Z.mseed +0 -0
  22. data/cached/CI_MPM_2019-07-04T17:33:40.443346Z.mseed +0 -0
  23. data/cached/CI_OSI_2019-07-04T17:33:57.203547Z.mseed +0 -0
  24. data/cached/CI_PUT_2023-04-01T01:16:16.558724Z.mseed +0 -0
  25. data/cached/CI_Q0013_2019-07-04T17:33:54.489098Z.mseed +0 -0
  26. data/cached/CI_Q0024_2019-07-04T17:33:55.620507Z.mseed +0 -0
  27. data/cached/CI_Q0035_2019-07-04T17:33:56.041452Z.mseed +0 -0
  28. data/cached/CI_Q0056_2019-07-04T17:33:50.407027Z.mseed +0 -0
  29. data/cached/CI_Q0061_2019-07-04T17:33:53.114695Z.mseed +0 -0
  30. data/cached/CI_Q0068_2019-07-04T17:33:46.411249Z.mseed +0 -0
  31. data/cached/CI_Q0072_2019-07-04T17:33:38.390421Z.mseed +0 -0
  32. data/cached/CI_RRX_2019-07-04T17:33:50.712219Z.mseed +0 -0
  33. data/cached/CI_SBB2_2023-04-01T01:16:15.609344Z.mseed +0 -0
  34. data/cached/CI_SHO_2019-07-04T17:33:51.673022Z.mseed +0 -0
  35. data/cached/CI_SLA_2019-07-04T17:33:40.190746Z.mseed +0 -0
  36. data/cached/CI_SRA_2023-04-01T01:16:14.887472Z.mseed +0 -0
  37. data/cached/CI_SRT_2019-07-04T17:33:38.029990Z.mseed +0 -0
  38. data/cached/CI_TIN_2019-07-04T17:33:55.946998Z.mseed +0 -0
  39. data/cached/CI_TOW2_2019-07-04T17:33:37.991033Z.mseed +0 -0
  40. data/cached/CI_VCS_2023-04-01T01:16:15.307897Z.mseed +0 -0
  41. data/cached/CI_VTV_2019-07-04T17:33:53.688913Z.mseed +0 -0
  42. data/cached/CI_WBM_2019-07-04T17:33:40.063644Z.mseed +0 -0
  43. data/cached/CI_WLS2_2023-04-01T01:16:13.623472Z.mseed +0 -0
  44. data/cached/CI_WMF_2019-07-04T17:33:41.867962Z.mseed +0 -0
  45. data/cached/CI_WRC2_2019-07-04T17:33:38.698107Z.mseed +0 -0
  46. data/cached/CI_WRV2_2019-07-04T17:33:40.843660Z.mseed +0 -0
  47. data/cached/CI_WVP2_2019-07-04T17:33:39.650418Z.mseed +0 -0
  48. data/cached/LB_DAC_2019-07-04T17:33:43.387184Z.mseed +0 -0
  49. data/cached/NN_FMT_2019-07-04T17:33:51.798341Z.mseed +0 -0
  50. data/cached/NN_GVN_2019-07-04T17:33:54.053591Z.mseed +0 -0
.vscode/settings.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "python.formatting.provider": "black"
3
+ }
Gradio_app.ipynb CHANGED
@@ -2,42 +2,14 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
  "metadata": {},
7
  "outputs": [
8
- {
9
- "data": {
10
- "text/plain": [
11
- "'hi!'"
12
- ]
13
- },
14
- "execution_count": 1,
15
- "metadata": {},
16
- "output_type": "execute_result"
17
- }
18
- ],
19
- "source": [
20
- "'hi!'"
21
- ]
22
- },
23
- {
24
- "cell_type": "code",
25
- "execution_count": 4,
26
- "metadata": {},
27
- "outputs": [
28
- {
29
- "name": "stderr",
30
- "output_type": "stream",
31
- "text": [
32
- "/Users/anovosel/miniconda3/envs/phasehunter/lib/python3.11/site-packages/gradio/outputs.py:43: UserWarning: Usage of gradio.outputs is deprecated, and will not be supported in the future, please import your components from gradio.components\n",
33
- " warnings.warn(\n"
34
- ]
35
- },
36
  {
37
  "name": "stdout",
38
  "output_type": "stream",
39
  "text": [
40
- "Running on local URL: http://127.0.0.1:7862\n",
41
  "\n",
42
  "To create a public link, set `share=True` in `launch()`.\n"
43
  ]
@@ -45,7 +17,7 @@
45
  {
46
  "data": {
47
  "text/html": [
48
- "<div><iframe src=\"http://127.0.0.1:7862/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
49
  ],
50
  "text/plain": [
51
  "<IPython.core.display.HTML object>"
@@ -58,30 +30,9 @@
58
  "data": {
59
  "text/plain": []
60
  },
61
- "execution_count": 4,
62
  "metadata": {},
63
  "output_type": "execute_result"
64
- },
65
- {
66
- "name": "stdout",
67
- "output_type": "stream",
68
- "text": [
69
- "Error in callback <function _draw_all_if_interactive at 0x1774d0ea0> (for post_execute):\n"
70
- ]
71
- },
72
- {
73
- "ename": "KeyboardInterrupt",
74
- "evalue": "",
75
- "output_type": "error",
76
- "traceback": [
77
- "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
78
- "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
79
- "File \u001b[0;32m~/miniconda3/envs/phasehunter/lib/python3.11/site-packages/matplotlib/pyplot.py:120\u001b[0m, in \u001b[0;36m_draw_all_if_interactive\u001b[0;34m()\u001b[0m\n\u001b[1;32m 118\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_draw_all_if_interactive\u001b[39m():\n\u001b[1;32m 119\u001b[0m \u001b[39mif\u001b[39;00m matplotlib\u001b[39m.\u001b[39mis_interactive():\n\u001b[0;32m--> 120\u001b[0m draw_all()\n",
80
- "File \u001b[0;32m~/miniconda3/envs/phasehunter/lib/python3.11/site-packages/matplotlib/_pylab_helpers.py:132\u001b[0m, in \u001b[0;36mGcf.draw_all\u001b[0;34m(cls, force)\u001b[0m\n\u001b[1;32m 130\u001b[0m \u001b[39mfor\u001b[39;00m manager \u001b[39min\u001b[39;00m \u001b[39mcls\u001b[39m\u001b[39m.\u001b[39mget_all_fig_managers():\n\u001b[1;32m 131\u001b[0m \u001b[39mif\u001b[39;00m force \u001b[39mor\u001b[39;00m manager\u001b[39m.\u001b[39mcanvas\u001b[39m.\u001b[39mfigure\u001b[39m.\u001b[39mstale:\n\u001b[0;32m--> 132\u001b[0m manager\u001b[39m.\u001b[39;49mcanvas\u001b[39m.\u001b[39;49mdraw_idle()\n",
81
- "File \u001b[0;32m~/miniconda3/envs/phasehunter/lib/python3.11/site-packages/matplotlib/backend_bases.py:2082\u001b[0m, in \u001b[0;36mFigureCanvasBase.draw_idle\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 2080\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_is_idle_drawing:\n\u001b[1;32m 2081\u001b[0m \u001b[39mwith\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_idle_draw_cntx():\n\u001b[0;32m-> 2082\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mdraw(\u001b[39m*\u001b[39;49margs, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n",
82
- "File \u001b[0;32m~/miniconda3/envs/phasehunter/lib/python3.11/site-packages/matplotlib/backends/backend_agg.py:397\u001b[0m, in \u001b[0;36mFigureCanvasAgg.draw\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 395\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mrenderer\u001b[39m.\u001b[39mclear()\n\u001b[1;32m 396\u001b[0m \u001b[39m# Acquire a lock on the shared font cache.\u001b[39;00m\n\u001b[0;32m--> 397\u001b[0m \u001b[39mwith\u001b[39;49;00m RendererAgg\u001b[39m.\u001b[39;49mlock, \\\n\u001b[1;32m 398\u001b[0m (\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtoolbar\u001b[39m.\u001b[39;49m_wait_cursor_for_draw_cm() \u001b[39mif\u001b[39;49;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtoolbar\n\u001b[1;32m 399\u001b[0m \u001b[39melse\u001b[39;49;00m nullcontext()):\n\u001b[1;32m 400\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mfigure\u001b[39m.\u001b[39;49mdraw(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mrenderer)\n\u001b[1;32m 401\u001b[0m \u001b[39m# A GUI class may be need to update a window using this draw, so\u001b[39;49;00m\n\u001b[1;32m 402\u001b[0m \u001b[39m# don't forget to call the superclass.\u001b[39;49;00m\n",
83
- "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
84
- ]
85
  }
86
  ],
87
  "source": [
@@ -95,6 +46,8 @@
95
  "import torch\n",
96
  "\n",
97
  "from scipy.stats import gaussian_kde\n",
 
 
98
  "\n",
99
  "import obspy\n",
100
  "from obspy.clients.fdsn import Client\n",
@@ -107,6 +60,7 @@
107
  "\n",
108
  "import matplotlib.pyplot as plt\n",
109
  "import matplotlib.dates as mdates\n",
 
110
  "\n",
111
  "from glob import glob\n",
112
  "\n",
@@ -123,21 +77,25 @@
123
  "\n",
124
  " return processed_input, p_phase, s_phase\n",
125
  "\n",
126
- "def mark_phases(waveform):\n",
 
 
 
 
127
  " processed_input, p_phase, s_phase = make_prediction(waveform)\n",
128
  "\n",
129
  " # Create a plot of the waveform with the phases marked\n",
130
  " if sum(processed_input[0][2] == 0): #if input is 1C\n",
131
  " fig, ax = plt.subplots(nrows=2, figsize=(10, 2), sharex=True)\n",
132
  "\n",
133
- " ax[0].plot(processed_input[0][0])\n",
134
  " ax[0].set_ylabel('Norm. Ampl.')\n",
135
  "\n",
136
  " else: #if input is 3C\n",
137
  " fig, ax = plt.subplots(nrows=4, figsize=(10, 6), sharex=True)\n",
138
- " ax[0].plot(processed_input[0][0])\n",
139
- " ax[1].plot(processed_input[0][1])\n",
140
- " ax[2].plot(processed_input[0][2])\n",
141
  "\n",
142
  " ax[0].set_ylabel('Z')\n",
143
  " ax[1].set_ylabel('N')\n",
@@ -169,22 +127,39 @@
169
  " plt.close(fig)\n",
170
  " return image\n",
171
  "\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
172
  "def variance_coefficient(residuals):\n",
173
  " # calculate the variance of the residuals\n",
174
  " var = residuals.var()\n",
175
- " \n",
176
  " # scale the variance to a coefficient between 0 and 1\n",
177
  " coeff = 1 - (var / (residuals.max() - residuals.min()))\n",
178
- " \n",
179
  " return coeff\n",
180
  "\n",
181
- "def predict_on_section(client_name, timestamp, eq_lat, eq_lon, radius_km, source_depth_km, velocity_model):\n",
182
  " distances, t0s, st_lats, st_lons, waveforms = [], [], [], [], []\n",
183
  " \n",
184
  " taup_model = TauPyModel(model=velocity_model)\n",
185
  " client = Client(client_name)\n",
186
  "\n",
187
  " window = radius_km / 111.2\n",
 
188
  "\n",
189
  " assert eq_lat - window > -90 and eq_lat + window < 90, \"Latitude out of bounds\"\n",
190
  " assert eq_lon - window > -180 and eq_lon + window < 180, \"Longitude out of bounds\"\n",
@@ -192,59 +167,107 @@
192
  " starttime = obspy.UTCDateTime(timestamp)\n",
193
  " endtime = starttime + 120\n",
194
  "\n",
195
- " inv = client.get_stations(network=\"*\", station=\"*\", location=\"*\", channel=\"*H*\", \n",
196
- " starttime=starttime, endtime=endtime, \n",
197
- " minlatitude=(eq_lat-window), maxlatitude=(eq_lat+window),\n",
198
- " minlongitude=(eq_lon-window), maxlongitude=(eq_lon+window), \n",
199
- " level='station')\n",
 
 
 
 
 
 
 
 
 
 
200
  " \n",
201
  " waveforms = []\n",
202
  " cached_waveforms = glob(\"data/cached/*.mseed\")\n",
203
  "\n",
204
  " for network in inv:\n",
 
 
 
205
  " for station in network:\n",
206
- " try:\n",
207
- " distance = locations2degrees(eq_lat, eq_lon, station.latitude, station.longitude)\n",
208
- "\n",
209
- " arrivals = taup_model.get_travel_times(source_depth_in_km=source_depth_km, \n",
210
- " distance_in_degree=distance, \n",
211
- " phase_list=[\"P\", \"S\"])\n",
212
  "\n",
213
- " if len(arrivals) > 0:\n",
 
 
214
  "\n",
215
- " starttime = obspy.UTCDateTime(timestamp) + arrivals[0].time - 15\n",
216
- " endtime = starttime + 60\n",
217
  "\n",
 
 
 
218
  " if f\"data/cached/{network.code}_{station.code}_{starttime}.mseed\" not in cached_waveforms:\n",
 
219
  " waveform = client.get_waveforms(network=network.code, station=station.code, location=\"*\", channel=\"*\", \n",
220
  " starttime=starttime, endtime=endtime)\n",
221
  " waveform.write(f\"data/cached/{network.code}_{station.code}_{starttime}.mseed\", format=\"MSEED\")\n",
 
222
  " else:\n",
 
223
  " waveform = obspy.read(f\"data/cached/{network.code}_{station.code}_{starttime}.mseed\")\n",
224
- " \n",
225
- " waveform = waveform.select(channel=\"H[BH][ZNE]\")\n",
226
- " waveform = waveform.merge(fill_value=0)\n",
227
- " waveform = waveform[:3]\n",
228
- " \n",
229
- " len_check = [len(x.data) for x in waveform]\n",
230
- " if len(set(len_check)) > 1:\n",
231
- " continue\n",
 
 
 
 
 
 
 
 
 
232
  "\n",
233
- " if len(waveform) == 3:\n",
234
- " try:\n",
235
- " waveform = prepare_waveform(np.stack([x.data for x in waveform]))\n",
236
- " except:\n",
237
- " continue\n",
238
- " \n",
239
  " distances.append(distance)\n",
240
  " t0s.append(starttime)\n",
241
  " st_lats.append(station.latitude)\n",
242
  " st_lons.append(station.longitude)\n",
243
  " waveforms.append(waveform)\n",
244
  "\n",
245
- " except (IndexError, FDSNNoDataException, FDSNTimeoutException):\n",
246
- " continue\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
247
  "\n",
 
 
 
248
  " with torch.no_grad():\n",
249
  " waveforms_torch = torch.vstack(waveforms)\n",
250
  " output = model(waveforms_torch)\n",
@@ -256,8 +279,31 @@
256
  " p_max_confidence = np.min([p_phases[i::len(waveforms)].std() for i in range(len(waveforms))]) \n",
257
  " s_max_confidence = np.min([s_phases[i::len(waveforms)].std() for i in range(len(waveforms))])\n",
258
  "\n",
259
- " fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(10, 3), sharex=True)\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
260
  " for i in range(len(waveforms)):\n",
 
261
  " current_P = p_phases[i::len(waveforms)]\n",
262
  " current_S = s_phases[i::len(waveforms)]\n",
263
  "\n",
@@ -275,17 +321,46 @@
275
  " ax[0].set_ylabel('Z')\n",
276
  "\n",
277
  " ax[0].xaxis.set_major_formatter(mdates.DateFormatter('%H:%M:%S'))\n",
278
- " ax[0].xaxis.set_major_locator(mdates.SecondLocator(interval=5))\n",
 
 
 
 
 
 
 
279
  " \n",
 
 
 
 
 
 
 
 
 
280
  " ax[0].scatter(None, None, color='r', marker='|', label='P')\n",
281
  " ax[0].scatter(None, None, color='b', marker='|', label='S')\n",
282
  " ax[0].legend()\n",
283
  "\n",
284
- " ax[1].scatter(st_lats, st_lons, color='b', marker='d', label='Stations')\n",
285
- " ax[1].scatter(eq_lat, eq_lon, color='r', marker='*', label='Earthquake')\n",
286
- " ax[1].legend()\n",
287
- " plt.subplots_adjust(hspace=0., wspace=0.)\n",
288
- " \n",
 
 
 
 
 
 
 
 
 
 
 
 
 
289
  " fig.canvas.draw();\n",
290
  " image = np.array(fig.canvas.renderer.buffer_rgba())\n",
291
  " plt.close(fig)\n",
@@ -299,30 +374,54 @@
299
  "model.eval()\n",
300
  "\n",
301
  "with gr.Blocks() as demo:\n",
302
- " gr.Markdown(\"# PhaseHunter\")\n",
303
- " gr.Markdown(\"\"\"This app allows one to detect P and S seismic phases along with uncertainty of the detection. \n",
304
- " The app can be used in three ways: either by selecting one of the sample waveforms;\n",
305
- " or by selecting an earthquake from the global earthquake catalogue;\n",
306
- " or by uploading a waveform of interest.\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
307
  " \"\"\")\n",
308
- " with gr.Tab(\"Default example\"):\n",
309
- " # Define the input and output types for Gradio\n",
310
- " inputs = gr.Dropdown(\n",
311
- " [\"data/sample/sample_0.npy\", \n",
312
- " \"data/sample/sample_1.npy\", \n",
313
- " \"data/sample/sample_2.npy\"], \n",
314
- " label=\"Sample waveform\", \n",
315
- " info=\"Select one of the samples\",\n",
316
- " value = \"data/sample/sample_0.npy\"\n",
317
- " )\n",
 
 
 
318
  "\n",
319
  " button = gr.Button(\"Predict phases\")\n",
320
- " outputs = gr.outputs.Image(label='Waveform with Phases Marked', type='numpy')\n",
321
  " \n",
322
- " button.click(mark_phases, inputs=inputs, outputs=outputs)\n",
323
  " \n",
324
  " with gr.Tab(\"Select earthquake from catalogue\"):\n",
325
- " gr.Markdown('TEST')\n",
 
326
  " \n",
327
  " client_inputs = gr.Dropdown(\n",
328
  " choices = list(URL_MAPPINGS.keys()), \n",
@@ -331,6 +430,7 @@
331
  " value = \"IRIS\",\n",
332
  " interactive=True\n",
333
  " )\n",
 
334
  " with gr.Row(): \n",
335
  "\n",
336
  " timestamp_inputs = gr.Textbox(value='2019-07-04 17:33:49',\n",
@@ -364,13 +464,23 @@
364
  " interactive=True)\n",
365
  " \n",
366
  " velocity_inputs = gr.Dropdown(\n",
367
- " choices = ['1066a', '1066b', 'ak135', 'ak135f', 'herrin', 'iasp91', 'jb', 'prem', 'pwdk'], \n",
 
 
368
  " label=\"1D velocity model\", \n",
369
  " info=\"Velocity model for station selection\",\n",
370
  " value = \"1066a\",\n",
371
  " interactive=True\n",
372
  " )\n",
373
- " \n",
 
 
 
 
 
 
 
 
374
  " \n",
375
  " button = gr.Button(\"Predict phases\")\n",
376
  " outputs_section = gr.Image(label='Waveforms with Phases Marked', type='numpy', interactive=False)\n",
@@ -378,15 +488,10 @@
378
  " button.click(predict_on_section, \n",
379
  " inputs=[client_inputs, timestamp_inputs, \n",
380
  " eq_lat_inputs, eq_lon_inputs, \n",
381
- " radius_inputs, source_depth_inputs, velocity_inputs],\n",
 
382
  " outputs=outputs_section)\n",
383
  "\n",
384
- " with gr.Tab(\"Predict on your own waveform\"):\n",
385
- " gr.Markdown(\"\"\"\n",
386
- " Please upload your waveform in .npy (numpy) format. \n",
387
- " Your waveform should be sampled at 100 sps and have 3 (Z, N, E) or 1 (Z) channels.\n",
388
- " \"\"\")\n",
389
- "\n",
390
  "demo.launch()"
391
  ]
392
  },
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 64,
6
  "metadata": {},
7
  "outputs": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  {
9
  "name": "stdout",
10
  "output_type": "stream",
11
  "text": [
12
+ "Running on local URL: http://127.0.0.1:7904\n",
13
  "\n",
14
  "To create a public link, set `share=True` in `launch()`.\n"
15
  ]
 
17
  {
18
  "data": {
19
  "text/html": [
20
+ "<div><iframe src=\"http://127.0.0.1:7904/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
21
  ],
22
  "text/plain": [
23
  "<IPython.core.display.HTML object>"
 
30
  "data": {
31
  "text/plain": []
32
  },
33
+ "execution_count": 64,
34
  "metadata": {},
35
  "output_type": "execute_result"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  }
37
  ],
38
  "source": [
 
46
  "import torch\n",
47
  "\n",
48
  "from scipy.stats import gaussian_kde\n",
49
+ "from bmi_topography import Topography\n",
50
+ "import earthpy.spatial as es\n",
51
  "\n",
52
  "import obspy\n",
53
  "from obspy.clients.fdsn import Client\n",
 
60
  "\n",
61
  "import matplotlib.pyplot as plt\n",
62
  "import matplotlib.dates as mdates\n",
63
+ "from matplotlib.colors import LightSource\n",
64
  "\n",
65
  "from glob import glob\n",
66
  "\n",
 
77
  "\n",
78
  " return processed_input, p_phase, s_phase\n",
79
  "\n",
80
+ "def mark_phases(waveform, uploaded_file):\n",
81
+ "\n",
82
+ " if uploaded_file is not None:\n",
83
+ " waveform = uploaded_file.name\n",
84
+ "\n",
85
  " processed_input, p_phase, s_phase = make_prediction(waveform)\n",
86
  "\n",
87
  " # Create a plot of the waveform with the phases marked\n",
88
  " if sum(processed_input[0][2] == 0): #if input is 1C\n",
89
  " fig, ax = plt.subplots(nrows=2, figsize=(10, 2), sharex=True)\n",
90
  "\n",
91
+ " ax[0].plot(processed_input[0][0], color='black', lw=1)\n",
92
  " ax[0].set_ylabel('Norm. Ampl.')\n",
93
  "\n",
94
  " else: #if input is 3C\n",
95
  " fig, ax = plt.subplots(nrows=4, figsize=(10, 6), sharex=True)\n",
96
+ " ax[0].plot(processed_input[0][0], color='black', lw=1)\n",
97
+ " ax[1].plot(processed_input[0][1], color='black', lw=1)\n",
98
+ " ax[2].plot(processed_input[0][2], color='black', lw=1)\n",
99
  "\n",
100
  " ax[0].set_ylabel('Z')\n",
101
  " ax[1].set_ylabel('N')\n",
 
127
  " plt.close(fig)\n",
128
  " return image\n",
129
  "\n",
130
+ "def bin_distances(distances, bin_size=10):\n",
131
+ " # Bin the distances into groups of `bin_size` kilometers\n",
132
+ " binned_distances = {}\n",
133
+ " for i, distance in enumerate(distances):\n",
134
+ " bin_index = distance // bin_size\n",
135
+ " if bin_index not in binned_distances:\n",
136
+ " binned_distances[bin_index] = (distance, i)\n",
137
+ " elif i < binned_distances[bin_index][1]:\n",
138
+ " binned_distances[bin_index] = (distance, i)\n",
139
+ "\n",
140
+ " # Select the first distance in each bin and its index\n",
141
+ " first_distances = []\n",
142
+ " for bin_index in binned_distances:\n",
143
+ " first_distance, first_distance_index = binned_distances[bin_index]\n",
144
+ " first_distances.append(first_distance_index)\n",
145
+ " \n",
146
+ " return first_distances\n",
147
+ "\n",
148
  "def variance_coefficient(residuals):\n",
149
  " # calculate the variance of the residuals\n",
150
  " var = residuals.var()\n",
 
151
  " # scale the variance to a coefficient between 0 and 1\n",
152
  " coeff = 1 - (var / (residuals.max() - residuals.min()))\n",
 
153
  " return coeff\n",
154
  "\n",
155
+ "def predict_on_section(client_name, timestamp, eq_lat, eq_lon, radius_km, source_depth_km, velocity_model, max_waveforms):\n",
156
  " distances, t0s, st_lats, st_lons, waveforms = [], [], [], [], []\n",
157
  " \n",
158
  " taup_model = TauPyModel(model=velocity_model)\n",
159
  " client = Client(client_name)\n",
160
  "\n",
161
  " window = radius_km / 111.2\n",
162
+ " max_waveforms = int(max_waveforms)\n",
163
  "\n",
164
  " assert eq_lat - window > -90 and eq_lat + window < 90, \"Latitude out of bounds\"\n",
165
  " assert eq_lon - window > -180 and eq_lon + window < 180, \"Longitude out of bounds\"\n",
 
167
  " starttime = obspy.UTCDateTime(timestamp)\n",
168
  " endtime = starttime + 120\n",
169
  "\n",
170
+ " try:\n",
171
+ " print('Starting to download inventory')\n",
172
+ " inv = client.get_stations(network=\"*\", station=\"*\", location=\"*\", channel=\"*H*\", \n",
173
+ " starttime=starttime, endtime=endtime, \n",
174
+ " minlatitude=(eq_lat-window), maxlatitude=(eq_lat+window),\n",
175
+ " minlongitude=(eq_lon-window), maxlongitude=(eq_lon+window), \n",
176
+ " level='station')\n",
177
+ " print('Finished downloading inventory')\n",
178
+ " except (IndexError, FDSNNoDataException, FDSNTimeoutException, FDSNInternalServerException):\n",
179
+ " fig, ax = plt.subplots()\n",
180
+ " ax.text(0.5,0.5,'Something is wrong with the data provider, try another')\n",
181
+ " fig.canvas.draw();\n",
182
+ " image = np.array(fig.canvas.renderer.buffer_rgba())\n",
183
+ " plt.close(fig)\n",
184
+ " return image\n",
185
  " \n",
186
  " waveforms = []\n",
187
  " cached_waveforms = glob(\"data/cached/*.mseed\")\n",
188
  "\n",
189
  " for network in inv:\n",
190
+ " # Skip the SYntetic networks\n",
191
+ " if network.code == 'SY':\n",
192
+ " continue\n",
193
  " for station in network:\n",
194
+ " print(f\"Processing {network.code}.{station.code}...\")\n",
195
+ " distance = locations2degrees(eq_lat, eq_lon, station.latitude, station.longitude)\n",
 
 
 
 
196
  "\n",
197
+ " arrivals = taup_model.get_travel_times(source_depth_in_km=source_depth_km, \n",
198
+ " distance_in_degree=distance, \n",
199
+ " phase_list=[\"P\", \"S\"])\n",
200
  "\n",
201
+ " if len(arrivals) > 0:\n",
 
202
  "\n",
203
+ " starttime = obspy.UTCDateTime(timestamp) + arrivals[0].time - 15\n",
204
+ " endtime = starttime + 60\n",
205
+ " try:\n",
206
  " if f\"data/cached/{network.code}_{station.code}_{starttime}.mseed\" not in cached_waveforms:\n",
207
+ " print('Downloading waveform')\n",
208
  " waveform = client.get_waveforms(network=network.code, station=station.code, location=\"*\", channel=\"*\", \n",
209
  " starttime=starttime, endtime=endtime)\n",
210
  " waveform.write(f\"data/cached/{network.code}_{station.code}_{starttime}.mseed\", format=\"MSEED\")\n",
211
+ " print('Finished downloading and caching waveform')\n",
212
  " else:\n",
213
+ " print('Reading cached waveform')\n",
214
  " waveform = obspy.read(f\"data/cached/{network.code}_{station.code}_{starttime}.mseed\")\n",
215
+ " \n",
216
+ "\n",
217
+ " except (IndexError, FDSNNoDataException, FDSNTimeoutException, FDSNInternalServerException):\n",
218
+ " print(f'Skipping {network.code}_{station.code}_{starttime}')\n",
219
+ " continue\n",
220
+ " \n",
221
+ " waveform = waveform.select(channel=\"H[BH][ZNE]\")\n",
222
+ " waveform = waveform.merge(fill_value=0)\n",
223
+ " waveform = waveform[:3]\n",
224
+ " \n",
225
+ " len_check = [len(x.data) for x in waveform]\n",
226
+ " if len(set(len_check)) > 1:\n",
227
+ " continue\n",
228
+ "\n",
229
+ " if len(waveform) == 3:\n",
230
+ " try:\n",
231
+ " waveform = prepare_waveform(np.stack([x.data for x in waveform]))\n",
232
  "\n",
 
 
 
 
 
 
233
  " distances.append(distance)\n",
234
  " t0s.append(starttime)\n",
235
  " st_lats.append(station.latitude)\n",
236
  " st_lons.append(station.longitude)\n",
237
  " waveforms.append(waveform)\n",
238
  "\n",
239
+ " print(f\"Added {network.code}.{station.code} to the list of waveforms\")\n",
240
+ "\n",
241
+ " except:\n",
242
+ " continue\n",
243
+ " \n",
244
+ " \n",
245
+ " # If there are no waveforms, return an empty plot\n",
246
+ " if len(waveforms) == 0:\n",
247
+ " fig, ax = plt.subplots()\n",
248
+ " ax.text(0.5,0.5,'No waveforms found')\n",
249
+ " fig.canvas.draw();\n",
250
+ " image = np.array(fig.canvas.renderer.buffer_rgba())\n",
251
+ " plt.close(fig)\n",
252
+ " return image\n",
253
+ " \n",
254
+ "\n",
255
+ " first_distances = bin_distances(distances, bin_size=10/111.2)\n",
256
+ "\n",
257
+ " # Edge case when there are way too many waveforms to process\n",
258
+ " selection_indexes = np.random.choice(first_distances, \n",
259
+ " np.min([len(first_distances), max_waveforms]),\n",
260
+ " replace=False)\n",
261
+ "\n",
262
+ " waveforms = np.array(waveforms)[selection_indexes]\n",
263
+ " distances = np.array(distances)[selection_indexes]\n",
264
+ " t0s = np.array(t0s)[selection_indexes]\n",
265
+ " st_lats = np.array(st_lats)[selection_indexes]\n",
266
+ " st_lons = np.array(st_lons)[selection_indexes]\n",
267
  "\n",
268
+ " waveforms = [torch.tensor(waveform) for waveform in waveforms]\n",
269
+ "\n",
270
+ " print('Starting to run predictions')\n",
271
  " with torch.no_grad():\n",
272
  " waveforms_torch = torch.vstack(waveforms)\n",
273
  " output = model(waveforms_torch)\n",
 
279
  " p_max_confidence = np.min([p_phases[i::len(waveforms)].std() for i in range(len(waveforms))]) \n",
280
  " s_max_confidence = np.min([s_phases[i::len(waveforms)].std() for i in range(len(waveforms))])\n",
281
  "\n",
282
+ " print(f\"Starting plotting {len(waveforms)} waveforms\")\n",
283
+ " fig, ax = plt.subplots(nrows=1, ncols=3, figsize=(10, 3))\n",
284
+ "\n",
285
+ " # Plot topography\n",
286
+ " print('Fetching topography')\n",
287
+ " params = Topography.DEFAULT.copy()\n",
288
+ " extra_window = 0.5\n",
289
+ " params[\"south\"] = np.min([st_lats.min(), eq_lat])-extra_window\n",
290
+ " params[\"north\"] = np.max([st_lats.max(), eq_lat])+extra_window\n",
291
+ " params[\"west\"] = np.min([st_lons.min(), eq_lon])-extra_window\n",
292
+ " params[\"east\"] = np.max([st_lons.max(), eq_lon])+extra_window\n",
293
+ "\n",
294
+ " topo_map = Topography(**params)\n",
295
+ " topo_map.fetch()\n",
296
+ " topo_map.load()\n",
297
+ "\n",
298
+ " print('Plotting topo')\n",
299
+ " hillshade = es.hillshade(topo_map.da[0], altitude=10)\n",
300
+ " \n",
301
+ " topo_map.da.plot(ax = ax[1], cmap='Greys', add_colorbar=False, add_labels=False)\n",
302
+ " topo_map.da.plot(ax = ax[2], cmap='Greys', add_colorbar=False, add_labels=False)\n",
303
+ " ax[1].imshow(hillshade, cmap=\"Greys\", alpha=0.5)\n",
304
+ "\n",
305
  " for i in range(len(waveforms)):\n",
306
+ " print(f\"Plotting waveform {i+1}/{len(waveforms)}\")\n",
307
  " current_P = p_phases[i::len(waveforms)]\n",
308
  " current_S = s_phases[i::len(waveforms)]\n",
309
  "\n",
 
321
  " ax[0].set_ylabel('Z')\n",
322
  "\n",
323
  " ax[0].xaxis.set_major_formatter(mdates.DateFormatter('%H:%M:%S'))\n",
324
+ " ax[0].xaxis.set_major_locator(mdates.SecondLocator(interval=20))\n",
325
+ "\n",
326
+ " delta_t = t0s[i].timestamp - obspy.UTCDateTime(timestamp).timestamp\n",
327
+ "\n",
328
+ " velocity_p = (distances[i]*111.2)/(delta_t+current_P.mean()*60).item()\n",
329
+ " velocity_s = (distances[i]*111.2)/(delta_t+current_S.mean()*60).item()\n",
330
+ "\n",
331
+ " print(f\"Station {st_lats[i]}, {st_lons[i]} has P velocity {velocity_p} and S velocity {velocity_s}\")\n",
332
  " \n",
333
+ " # Generate an array from st_lat to eq_lat and from st_lon to eq_lon\n",
334
+ " x = np.linspace(st_lons[i], eq_lon, 50)\n",
335
+ " y = np.linspace(st_lats[i], eq_lat, 50)\n",
336
+ " \n",
337
+ " # Plot the array\n",
338
+ " ax[1].scatter(x, y, c=np.zeros_like(x)+velocity_p, alpha=0.5, vmin=0, vmax=8)\n",
339
+ " ax[2].scatter(x, y, c=np.zeros_like(x)+velocity_s, alpha=0.5, vmin=0, vmax=8)\n",
340
+ "\n",
341
+ " # Add legend\n",
342
  " ax[0].scatter(None, None, color='r', marker='|', label='P')\n",
343
  " ax[0].scatter(None, None, color='b', marker='|', label='S')\n",
344
  " ax[0].legend()\n",
345
  "\n",
346
+ " print('Plotting stations')\n",
347
+ " for i in range(1,3):\n",
348
+ " ax[i].scatter(st_lons, st_lats, color='b', label='Stations')\n",
349
+ " ax[i].scatter(eq_lon, eq_lat, color='r', marker='*', label='Earthquake')\n",
350
+ "\n",
351
+ " # Generate colorbar for the velocity plot\n",
352
+ " cbar = plt.colorbar(ax[1].scatter(None, None, c=velocity_p, alpha=0.5, vmin=0, vmax=8), ax=ax[1])\n",
353
+ " cbar.set_label('P Velocity (km/s)')\n",
354
+ " ax[1].set_title('P Velocity')\n",
355
+ "\n",
356
+ " cbar = plt.colorbar(ax[2].scatter(None, None, c=velocity_s, alpha=0.5, vmin=0, vmax=8), ax=ax[2])\n",
357
+ " cbar.set_label('S Velocity (km/s)')\n",
358
+ " ax[2].set_title('S Velocity')\n",
359
+ "\n",
360
+ "\n",
361
+ "\n",
362
+ " plt.subplots_adjust(hspace=0., wspace=0.5)\n",
363
+ "\n",
364
  " fig.canvas.draw();\n",
365
  " image = np.array(fig.canvas.renderer.buffer_rgba())\n",
366
  " plt.close(fig)\n",
 
374
  "model.eval()\n",
375
  "\n",
376
  "with gr.Blocks() as demo:\n",
377
+ " gr.HTML(\"\"\"<h1>PhaseHunter</h1>\n",
378
+ "<p>This app allows one to detect <mark style=\"background-color: red; color: white;\">P</mark> and <mark style=\"background-color: blue; color: white;\">S</mark> seismic phases along with \n",
379
+ "\n",
380
+ "<span style=\"font-size: 24px; font-weight: bold;\">\n",
381
+ " <span style=\"color: #4c4c4c; text-shadow: 1px 1px 0 #ccc, -1px -1px 0 #ccc, 1px -1px 0 #ccc, -1px 1px 0 #ccc;\">u</span>\n",
382
+ " <span style=\"color: #808080; text-shadow: 1px 1px 0 #bbb, -1px -1px 0 #bbb, 1px -1px 0 #bbb, -1px 1px 0 #bbb;\">n</span>\n",
383
+ " <span style=\"color: #b3b3b3; text-shadow: 1px 1px 0 #aaa, -1px -1px 0 #aaa, 1px -1px 0 #aaa, -1px 1px 0 #aaa;\">c</span>\n",
384
+ " <span style=\"color: #d9d9d9; text-shadow: 1px 1px 0 #ccc, -1px -1px 0 #ccc, 1px -1px 0 #ccc, -1px 1px 0 #ccc;\">e</span>\n",
385
+ " <span style=\"color: #f2f2f2; text-shadow: 1px 1px 0 #eee, -1px -1px 0 #eee, 1px -1px 0 #eee, -1px 1px 0 #eee;\">r</span>\n",
386
+ " <span style=\"color: #d9d9d9; text-shadow: 1px 1px 0 #ccc, -1px -1px 0 #ccc, 1px -1px 0 #ccc, -1px 1px 0 #ccc;\">t</span>\n",
387
+ " <span style=\"color: #b3b3b3; text-shadow: 1px 1px 0 #aaa, -1px -1px 0 #aaa, 1px -1px 0 #aaa, -1px 1px 0 #aaa;\">a</span>\n",
388
+ " <span style=\"color: #808080; text-shadow: 1px 1px 0 #bbb, -1px -1px 0 #bbb, 1px -1px 0 #bbb, -1px 1px 0 #bbb;\">i</span>\n",
389
+ " <span style=\"color: #4c4c4c; text-shadow: 1px 1px 0 #ccc, -1px -1px 0 #ccc, 1px -1px 0 #ccc, -1px 1px 0 #ccc;\">n</span>\n",
390
+ " <span style=\"color: #808080; text-shadow: 1px 1px 0 #bbb, -1px -1px 0 #bbb, 1px -1px 0 #bbb, -1px 1px 0 #bbb;\">t</span>\n",
391
+ " <span style=\"color: #b3b3b3; text-shadow: 1px 1px 0 #aaa, -1px -1px 0 #aaa, 1px -1px 0 #aaa, -1px 1px 0 #aaa;\">y</span>\n",
392
+ " \n",
393
+ "</span>\n",
394
+ " of the detection.</p>\n",
395
+ "<ol>\n",
396
+ " <li>By selecting one of the sample waveforms.</li>\n",
397
+ " <li>By uploading your own waveform.</li>\n",
398
+ " <li>By selecting an earthquake from the global earthquake catalogue.</li>\n",
399
+ "</ol>\n",
400
+ "<p>Please upload your waveform in <code>.npy</code> (numpy) format.</p>\n",
401
+ "<p>Your waveform should be sampled at 100 samples per second and have 3 (Z, N, E) or 1 (Z) channels. If your file is longer than 60 seconds, the app will only use the first 60 seconds of the waveform.</p>\n",
402
  " \"\"\")\n",
403
+ " with gr.Tab(\"Try on a single station\"):\n",
404
+ " with gr.Row(): \n",
405
+ " # Define the input and output types for Gradio\n",
406
+ " inputs = gr.Dropdown(\n",
407
+ " [\"data/sample/sample_0.npy\", \n",
408
+ " \"data/sample/sample_1.npy\", \n",
409
+ " \"data/sample/sample_2.npy\"], \n",
410
+ " label=\"Sample waveform\", \n",
411
+ " info=\"Select one of the samples\",\n",
412
+ " value = \"data/sample/sample_0.npy\"\n",
413
+ " )\n",
414
+ "\n",
415
+ " upload = gr.File(label=\"Or upload your own waveform\")\n",
416
  "\n",
417
  " button = gr.Button(\"Predict phases\")\n",
418
+ " outputs = gr.Image(label='Waveform with Phases Marked', type='numpy', interactive=False)\n",
419
  " \n",
420
+ " button.click(mark_phases, inputs=[inputs, upload], outputs=outputs)\n",
421
  " \n",
422
  " with gr.Tab(\"Select earthquake from catalogue\"):\n",
423
+ " gr.Markdown(\"\"\"Select an earthquake from the global earthquake catalogue and the app will download the waveform from the FDSN client of your choice.\n",
424
+ " \"\"\")\n",
425
  " \n",
426
  " client_inputs = gr.Dropdown(\n",
427
  " choices = list(URL_MAPPINGS.keys()), \n",
 
430
  " value = \"IRIS\",\n",
431
  " interactive=True\n",
432
  " )\n",
433
+ "\n",
434
  " with gr.Row(): \n",
435
  "\n",
436
  " timestamp_inputs = gr.Textbox(value='2019-07-04 17:33:49',\n",
 
464
  " interactive=True)\n",
465
  " \n",
466
  " velocity_inputs = gr.Dropdown(\n",
467
+ " choices = ['1066a', '1066b', 'ak135', \n",
468
+ " 'ak135f', 'herrin', 'iasp91', \n",
469
+ " 'jb', 'prem', 'pwdk'], \n",
470
  " label=\"1D velocity model\", \n",
471
  " info=\"Velocity model for station selection\",\n",
472
  " value = \"1066a\",\n",
473
  " interactive=True\n",
474
  " )\n",
475
+ "\n",
476
+ " max_waveforms_inputs = gr.Slider(minimum=1,\n",
477
+ " maximum=100,\n",
478
+ " value=10,\n",
479
+ " label=\"Max waveforms per section\",\n",
480
+ " step=1,\n",
481
+ " info=\"Maximum number of waveforms to show per section\\n (to avoid long prediction times)\",\n",
482
+ " interactive=True,\n",
483
+ " )\n",
484
  " \n",
485
  " button = gr.Button(\"Predict phases\")\n",
486
  " outputs_section = gr.Image(label='Waveforms with Phases Marked', type='numpy', interactive=False)\n",
 
488
  " button.click(predict_on_section, \n",
489
  " inputs=[client_inputs, timestamp_inputs, \n",
490
  " eq_lat_inputs, eq_lon_inputs, \n",
491
+ " radius_inputs, source_depth_inputs, \n",
492
+ " velocity_inputs, max_waveforms_inputs],\n",
493
  " outputs=outputs_section)\n",
494
  "\n",
 
 
 
 
 
 
495
  "demo.launch()"
496
  ]
497
  },
app.py CHANGED
@@ -8,6 +8,8 @@ from phasehunter.data_preparation import prepare_waveform
8
  import torch
9
 
10
  from scipy.stats import gaussian_kde
 
 
11
 
12
  import obspy
13
  from obspy.clients.fdsn import Client
@@ -20,6 +22,7 @@ from obspy.clients.fdsn.header import URL_MAPPINGS
20
 
21
  import matplotlib.pyplot as plt
22
  import matplotlib.dates as mdates
 
23
 
24
  from glob import glob
25
 
@@ -36,21 +39,25 @@ def make_prediction(waveform):
36
 
37
  return processed_input, p_phase, s_phase
38
 
39
- def mark_phases(waveform):
 
 
 
 
40
  processed_input, p_phase, s_phase = make_prediction(waveform)
41
 
42
  # Create a plot of the waveform with the phases marked
43
  if sum(processed_input[0][2] == 0): #if input is 1C
44
  fig, ax = plt.subplots(nrows=2, figsize=(10, 2), sharex=True)
45
 
46
- ax[0].plot(processed_input[0][0])
47
  ax[0].set_ylabel('Norm. Ampl.')
48
 
49
  else: #if input is 3C
50
  fig, ax = plt.subplots(nrows=4, figsize=(10, 6), sharex=True)
51
- ax[0].plot(processed_input[0][0])
52
- ax[1].plot(processed_input[0][1])
53
- ax[2].plot(processed_input[0][2])
54
 
55
  ax[0].set_ylabel('Z')
56
  ax[1].set_ylabel('N')
@@ -82,22 +89,39 @@ def mark_phases(waveform):
82
  plt.close(fig)
83
  return image
84
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  def variance_coefficient(residuals):
86
  # calculate the variance of the residuals
87
  var = residuals.var()
88
-
89
  # scale the variance to a coefficient between 0 and 1
90
  coeff = 1 - (var / (residuals.max() - residuals.min()))
91
-
92
  return coeff
93
 
94
- def predict_on_section(client_name, timestamp, eq_lat, eq_lon, radius_km, source_depth_km, velocity_model):
95
  distances, t0s, st_lats, st_lons, waveforms = [], [], [], [], []
96
 
97
  taup_model = TauPyModel(model=velocity_model)
98
  client = Client(client_name)
99
 
100
  window = radius_km / 111.2
 
101
 
102
  assert eq_lat - window > -90 and eq_lat + window < 90, "Latitude out of bounds"
103
  assert eq_lon - window > -180 and eq_lon + window < 180, "Longitude out of bounds"
@@ -105,59 +129,107 @@ def predict_on_section(client_name, timestamp, eq_lat, eq_lon, radius_km, source
105
  starttime = obspy.UTCDateTime(timestamp)
106
  endtime = starttime + 120
107
 
108
- inv = client.get_stations(network="*", station="*", location="*", channel="*H*",
109
- starttime=starttime, endtime=endtime,
110
- minlatitude=(eq_lat-window), maxlatitude=(eq_lat+window),
111
- minlongitude=(eq_lon-window), maxlongitude=(eq_lon+window),
112
- level='station')
 
 
 
 
 
 
 
 
 
 
113
 
114
  waveforms = []
115
  cached_waveforms = glob("data/cached/*.mseed")
116
 
117
  for network in inv:
 
 
 
118
  for station in network:
119
- try:
120
- distance = locations2degrees(eq_lat, eq_lon, station.latitude, station.longitude)
121
 
122
- arrivals = taup_model.get_travel_times(source_depth_in_km=source_depth_km,
123
- distance_in_degree=distance,
124
- phase_list=["P", "S"])
125
 
126
- if len(arrivals) > 0:
127
-
128
- starttime = obspy.UTCDateTime(timestamp) + arrivals[0].time - 15
129
- endtime = starttime + 60
130
 
 
 
 
131
  if f"data/cached/{network.code}_{station.code}_{starttime}.mseed" not in cached_waveforms:
 
132
  waveform = client.get_waveforms(network=network.code, station=station.code, location="*", channel="*",
133
  starttime=starttime, endtime=endtime)
134
  waveform.write(f"data/cached/{network.code}_{station.code}_{starttime}.mseed", format="MSEED")
 
135
  else:
 
136
  waveform = obspy.read(f"data/cached/{network.code}_{station.code}_{starttime}.mseed")
137
-
138
- waveform = waveform.select(channel="H[BH][ZNE]")
139
- waveform = waveform.merge(fill_value=0)
140
- waveform = waveform[:3]
141
-
142
- len_check = [len(x.data) for x in waveform]
143
- if len(set(len_check)) > 1:
144
- continue
 
 
 
 
 
 
 
 
 
145
 
146
- if len(waveform) == 3:
147
- try:
148
- waveform = prepare_waveform(np.stack([x.data for x in waveform]))
149
- except:
150
- continue
151
-
152
  distances.append(distance)
153
  t0s.append(starttime)
154
  st_lats.append(station.latitude)
155
  st_lons.append(station.longitude)
156
  waveforms.append(waveform)
157
 
158
- except (IndexError, FDSNNoDataException, FDSNTimeoutException):
159
- continue
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
 
 
 
 
161
  with torch.no_grad():
162
  waveforms_torch = torch.vstack(waveforms)
163
  output = model(waveforms_torch)
@@ -169,8 +241,31 @@ def predict_on_section(client_name, timestamp, eq_lat, eq_lon, radius_km, source
169
  p_max_confidence = np.min([p_phases[i::len(waveforms)].std() for i in range(len(waveforms))])
170
  s_max_confidence = np.min([s_phases[i::len(waveforms)].std() for i in range(len(waveforms))])
171
 
172
- fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(10, 3), sharex=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
173
  for i in range(len(waveforms)):
 
174
  current_P = p_phases[i::len(waveforms)]
175
  current_S = s_phases[i::len(waveforms)]
176
 
@@ -188,17 +283,46 @@ def predict_on_section(client_name, timestamp, eq_lat, eq_lon, radius_km, source
188
  ax[0].set_ylabel('Z')
189
 
190
  ax[0].xaxis.set_major_formatter(mdates.DateFormatter('%H:%M:%S'))
191
- ax[0].xaxis.set_major_locator(mdates.SecondLocator(interval=5))
 
 
 
 
 
 
 
 
 
 
 
192
 
 
 
 
 
 
193
  ax[0].scatter(None, None, color='r', marker='|', label='P')
194
  ax[0].scatter(None, None, color='b', marker='|', label='S')
195
  ax[0].legend()
196
 
197
- ax[1].scatter(st_lats, st_lons, color='b', marker='d', label='Stations')
198
- ax[1].scatter(eq_lat, eq_lon, color='r', marker='*', label='Earthquake')
199
- ax[1].legend()
200
- plt.subplots_adjust(hspace=0., wspace=0.)
201
-
 
 
 
 
 
 
 
 
 
 
 
 
 
202
  fig.canvas.draw();
203
  image = np.array(fig.canvas.renderer.buffer_rgba())
204
  plt.close(fig)
@@ -212,30 +336,54 @@ model = Onset_picker.load_from_checkpoint("./weights.ckpt",
212
  model.eval()
213
 
214
  with gr.Blocks() as demo:
215
- gr.Markdown("# PhaseHunter")
216
- gr.Markdown("""This app allows one to detect P and S seismic phases along with uncertainty of the detection.
217
- The app can be used in three ways: either by selecting one of the sample waveforms;
218
- or by selecting an earthquake from the global earthquake catalogue;
219
- or by uploading a waveform of interest.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
220
  """)
221
- with gr.Tab("Default example"):
222
- # Define the input and output types for Gradio
223
- inputs = gr.Dropdown(
224
- ["data/sample/sample_0.npy",
225
- "data/sample/sample_1.npy",
226
- "data/sample/sample_2.npy"],
227
- label="Sample waveform",
228
- info="Select one of the samples",
229
- value = "data/sample/sample_0.npy"
230
- )
 
 
 
231
 
232
  button = gr.Button("Predict phases")
233
- outputs = gr.outputs.Image(label='Waveform with Phases Marked', type='numpy')
234
 
235
- button.click(mark_phases, inputs=inputs, outputs=outputs)
236
 
237
  with gr.Tab("Select earthquake from catalogue"):
238
- gr.Markdown('TEST')
 
239
 
240
  client_inputs = gr.Dropdown(
241
  choices = list(URL_MAPPINGS.keys()),
@@ -244,6 +392,7 @@ with gr.Blocks() as demo:
244
  value = "IRIS",
245
  interactive=True
246
  )
 
247
  with gr.Row():
248
 
249
  timestamp_inputs = gr.Textbox(value='2019-07-04 17:33:49',
@@ -277,13 +426,23 @@ with gr.Blocks() as demo:
277
  interactive=True)
278
 
279
  velocity_inputs = gr.Dropdown(
280
- choices = ['1066a', '1066b', 'ak135', 'ak135f', 'herrin', 'iasp91', 'jb', 'prem', 'pwdk'],
 
 
281
  label="1D velocity model",
282
  info="Velocity model for station selection",
283
  value = "1066a",
284
  interactive=True
285
  )
286
-
 
 
 
 
 
 
 
 
287
 
288
  button = gr.Button("Predict phases")
289
  outputs_section = gr.Image(label='Waveforms with Phases Marked', type='numpy', interactive=False)
@@ -291,13 +450,8 @@ with gr.Blocks() as demo:
291
  button.click(predict_on_section,
292
  inputs=[client_inputs, timestamp_inputs,
293
  eq_lat_inputs, eq_lon_inputs,
294
- radius_inputs, source_depth_inputs, velocity_inputs],
 
295
  outputs=outputs_section)
296
 
297
- with gr.Tab("Predict on your own waveform"):
298
- gr.Markdown("""
299
- Please upload your waveform in .npy (numpy) format.
300
- Your waveform should be sampled at 100 sps and have 3 (Z, N, E) or 1 (Z) channels.
301
- """)
302
-
303
  demo.launch()
 
8
  import torch
9
 
10
  from scipy.stats import gaussian_kde
11
+ from bmi_topography import Topography
12
+ import earthpy.spatial as es
13
 
14
  import obspy
15
  from obspy.clients.fdsn import Client
 
22
 
23
  import matplotlib.pyplot as plt
24
  import matplotlib.dates as mdates
25
+ from matplotlib.colors import LightSource
26
 
27
  from glob import glob
28
 
 
39
 
40
  return processed_input, p_phase, s_phase
41
 
42
+ def mark_phases(waveform, uploaded_file):
43
+
44
+ if uploaded_file is not None:
45
+ waveform = uploaded_file.name
46
+
47
  processed_input, p_phase, s_phase = make_prediction(waveform)
48
 
49
  # Create a plot of the waveform with the phases marked
50
  if sum(processed_input[0][2] == 0): #if input is 1C
51
  fig, ax = plt.subplots(nrows=2, figsize=(10, 2), sharex=True)
52
 
53
+ ax[0].plot(processed_input[0][0], color='black', lw=1)
54
  ax[0].set_ylabel('Norm. Ampl.')
55
 
56
  else: #if input is 3C
57
  fig, ax = plt.subplots(nrows=4, figsize=(10, 6), sharex=True)
58
+ ax[0].plot(processed_input[0][0], color='black', lw=1)
59
+ ax[1].plot(processed_input[0][1], color='black', lw=1)
60
+ ax[2].plot(processed_input[0][2], color='black', lw=1)
61
 
62
  ax[0].set_ylabel('Z')
63
  ax[1].set_ylabel('N')
 
89
  plt.close(fig)
90
  return image
91
 
92
+ def bin_distances(distances, bin_size=10):
93
+ # Bin the distances into groups of `bin_size` kilometers
94
+ binned_distances = {}
95
+ for i, distance in enumerate(distances):
96
+ bin_index = distance // bin_size
97
+ if bin_index not in binned_distances:
98
+ binned_distances[bin_index] = (distance, i)
99
+ elif i < binned_distances[bin_index][1]:
100
+ binned_distances[bin_index] = (distance, i)
101
+
102
+ # Select the first distance in each bin and its index
103
+ first_distances = []
104
+ for bin_index in binned_distances:
105
+ first_distance, first_distance_index = binned_distances[bin_index]
106
+ first_distances.append(first_distance_index)
107
+
108
+ return first_distances
109
+
110
  def variance_coefficient(residuals):
111
  # calculate the variance of the residuals
112
  var = residuals.var()
 
113
  # scale the variance to a coefficient between 0 and 1
114
  coeff = 1 - (var / (residuals.max() - residuals.min()))
 
115
  return coeff
116
 
117
+ def predict_on_section(client_name, timestamp, eq_lat, eq_lon, radius_km, source_depth_km, velocity_model, max_waveforms):
118
  distances, t0s, st_lats, st_lons, waveforms = [], [], [], [], []
119
 
120
  taup_model = TauPyModel(model=velocity_model)
121
  client = Client(client_name)
122
 
123
  window = radius_km / 111.2
124
+ max_waveforms = int(max_waveforms)
125
 
126
  assert eq_lat - window > -90 and eq_lat + window < 90, "Latitude out of bounds"
127
  assert eq_lon - window > -180 and eq_lon + window < 180, "Longitude out of bounds"
 
129
  starttime = obspy.UTCDateTime(timestamp)
130
  endtime = starttime + 120
131
 
132
+ try:
133
+ print('Starting to download inventory')
134
+ inv = client.get_stations(network="*", station="*", location="*", channel="*H*",
135
+ starttime=starttime, endtime=endtime,
136
+ minlatitude=(eq_lat-window), maxlatitude=(eq_lat+window),
137
+ minlongitude=(eq_lon-window), maxlongitude=(eq_lon+window),
138
+ level='station')
139
+ print('Finished downloading inventory')
140
+ except (IndexError, FDSNNoDataException, FDSNTimeoutException, FDSNInternalServerException):
141
+ fig, ax = plt.subplots()
142
+ ax.text(0.5,0.5,'Something is wrong with the data provider, try another')
143
+ fig.canvas.draw();
144
+ image = np.array(fig.canvas.renderer.buffer_rgba())
145
+ plt.close(fig)
146
+ return image
147
 
148
  waveforms = []
149
  cached_waveforms = glob("data/cached/*.mseed")
150
 
151
  for network in inv:
152
+ # Skip the SYntetic networks
153
+ if network.code == 'SY':
154
+ continue
155
  for station in network:
156
+ print(f"Processing {network.code}.{station.code}...")
157
+ distance = locations2degrees(eq_lat, eq_lon, station.latitude, station.longitude)
158
 
159
+ arrivals = taup_model.get_travel_times(source_depth_in_km=source_depth_km,
160
+ distance_in_degree=distance,
161
+ phase_list=["P", "S"])
162
 
163
+ if len(arrivals) > 0:
 
 
 
164
 
165
+ starttime = obspy.UTCDateTime(timestamp) + arrivals[0].time - 15
166
+ endtime = starttime + 60
167
+ try:
168
  if f"data/cached/{network.code}_{station.code}_{starttime}.mseed" not in cached_waveforms:
169
+ print('Downloading waveform')
170
  waveform = client.get_waveforms(network=network.code, station=station.code, location="*", channel="*",
171
  starttime=starttime, endtime=endtime)
172
  waveform.write(f"data/cached/{network.code}_{station.code}_{starttime}.mseed", format="MSEED")
173
+ print('Finished downloading and caching waveform')
174
  else:
175
+ print('Reading cached waveform')
176
  waveform = obspy.read(f"data/cached/{network.code}_{station.code}_{starttime}.mseed")
177
+
178
+
179
+ except (IndexError, FDSNNoDataException, FDSNTimeoutException, FDSNInternalServerException):
180
+ print(f'Skipping {network.code}_{station.code}_{starttime}')
181
+ continue
182
+
183
+ waveform = waveform.select(channel="H[BH][ZNE]")
184
+ waveform = waveform.merge(fill_value=0)
185
+ waveform = waveform[:3]
186
+
187
+ len_check = [len(x.data) for x in waveform]
188
+ if len(set(len_check)) > 1:
189
+ continue
190
+
191
+ if len(waveform) == 3:
192
+ try:
193
+ waveform = prepare_waveform(np.stack([x.data for x in waveform]))
194
 
 
 
 
 
 
 
195
  distances.append(distance)
196
  t0s.append(starttime)
197
  st_lats.append(station.latitude)
198
  st_lons.append(station.longitude)
199
  waveforms.append(waveform)
200
 
201
+ print(f"Added {network.code}.{station.code} to the list of waveforms")
202
+
203
+ except:
204
+ continue
205
+
206
+
207
+ # If there are no waveforms, return an empty plot
208
+ if len(waveforms) == 0:
209
+ fig, ax = plt.subplots()
210
+ ax.text(0.5,0.5,'No waveforms found')
211
+ fig.canvas.draw();
212
+ image = np.array(fig.canvas.renderer.buffer_rgba())
213
+ plt.close(fig)
214
+ return image
215
+
216
+
217
+ first_distances = bin_distances(distances, bin_size=10/111.2)
218
+
219
+ # Edge case when there are way too many waveforms to process
220
+ selection_indexes = np.random.choice(first_distances,
221
+ np.min([len(first_distances), max_waveforms]),
222
+ replace=False)
223
+
224
+ waveforms = np.array(waveforms)[selection_indexes]
225
+ distances = np.array(distances)[selection_indexes]
226
+ t0s = np.array(t0s)[selection_indexes]
227
+ st_lats = np.array(st_lats)[selection_indexes]
228
+ st_lons = np.array(st_lons)[selection_indexes]
229
 
230
+ waveforms = [torch.tensor(waveform) for waveform in waveforms]
231
+
232
+ print('Starting to run predictions')
233
  with torch.no_grad():
234
  waveforms_torch = torch.vstack(waveforms)
235
  output = model(waveforms_torch)
 
241
  p_max_confidence = np.min([p_phases[i::len(waveforms)].std() for i in range(len(waveforms))])
242
  s_max_confidence = np.min([s_phases[i::len(waveforms)].std() for i in range(len(waveforms))])
243
 
244
+ print(f"Starting plotting {len(waveforms)} waveforms")
245
+ fig, ax = plt.subplots(nrows=1, ncols=3, figsize=(10, 3))
246
+
247
+ # Plot topography
248
+ print('Fetching topography')
249
+ params = Topography.DEFAULT.copy()
250
+ extra_window = 0.5
251
+ params["south"] = np.min([st_lats.min(), eq_lat])-extra_window
252
+ params["north"] = np.max([st_lats.max(), eq_lat])+extra_window
253
+ params["west"] = np.min([st_lons.min(), eq_lon])-extra_window
254
+ params["east"] = np.max([st_lons.max(), eq_lon])+extra_window
255
+
256
+ topo_map = Topography(**params)
257
+ topo_map.fetch()
258
+ topo_map.load()
259
+
260
+ print('Plotting topo')
261
+ hillshade = es.hillshade(topo_map.da[0], altitude=10)
262
+
263
+ topo_map.da.plot(ax = ax[1], cmap='Greys', add_colorbar=False, add_labels=False)
264
+ topo_map.da.plot(ax = ax[2], cmap='Greys', add_colorbar=False, add_labels=False)
265
+ ax[1].imshow(hillshade, cmap="Greys", alpha=0.5)
266
+
267
  for i in range(len(waveforms)):
268
+ print(f"Plotting waveform {i+1}/{len(waveforms)}")
269
  current_P = p_phases[i::len(waveforms)]
270
  current_S = s_phases[i::len(waveforms)]
271
 
 
283
  ax[0].set_ylabel('Z')
284
 
285
  ax[0].xaxis.set_major_formatter(mdates.DateFormatter('%H:%M:%S'))
286
+ ax[0].xaxis.set_major_locator(mdates.SecondLocator(interval=20))
287
+
288
+ delta_t = t0s[i].timestamp - obspy.UTCDateTime(timestamp).timestamp
289
+
290
+ velocity_p = (distances[i]*111.2)/(delta_t+current_P.mean()*60).item()
291
+ velocity_s = (distances[i]*111.2)/(delta_t+current_S.mean()*60).item()
292
+
293
+ print(f"Station {st_lats[i]}, {st_lons[i]} has P velocity {velocity_p} and S velocity {velocity_s}")
294
+
295
+ # Generate an array from st_lat to eq_lat and from st_lon to eq_lon
296
+ x = np.linspace(st_lons[i], eq_lon, 50)
297
+ y = np.linspace(st_lats[i], eq_lat, 50)
298
 
299
+ # Plot the array
300
+ ax[1].scatter(x, y, c=np.zeros_like(x)+velocity_p, alpha=0.5, vmin=0, vmax=8)
301
+ ax[2].scatter(x, y, c=np.zeros_like(x)+velocity_s, alpha=0.5, vmin=0, vmax=8)
302
+
303
+ # Add legend
304
  ax[0].scatter(None, None, color='r', marker='|', label='P')
305
  ax[0].scatter(None, None, color='b', marker='|', label='S')
306
  ax[0].legend()
307
 
308
+ print('Plotting stations')
309
+ for i in range(1,3):
310
+ ax[i].scatter(st_lons, st_lats, color='b', label='Stations')
311
+ ax[i].scatter(eq_lon, eq_lat, color='r', marker='*', label='Earthquake')
312
+
313
+ # Generate colorbar for the velocity plot
314
+ cbar = plt.colorbar(ax[1].scatter(None, None, c=velocity_p, alpha=0.5, vmin=0, vmax=8), ax=ax[1])
315
+ cbar.set_label('P Velocity (km/s)')
316
+ ax[1].set_title('P Velocity')
317
+
318
+ cbar = plt.colorbar(ax[2].scatter(None, None, c=velocity_s, alpha=0.5, vmin=0, vmax=8), ax=ax[2])
319
+ cbar.set_label('S Velocity (km/s)')
320
+ ax[2].set_title('S Velocity')
321
+
322
+
323
+
324
+ plt.subplots_adjust(hspace=0., wspace=0.5)
325
+
326
  fig.canvas.draw();
327
  image = np.array(fig.canvas.renderer.buffer_rgba())
328
  plt.close(fig)
 
336
  model.eval()
337
 
338
  with gr.Blocks() as demo:
339
+ gr.HTML("""<h1>PhaseHunter</h1>
340
+ <p>This app allows one to detect <mark style="background-color: red; color: white;">P</mark> and <mark style="background-color: blue; color: white;">S</mark> seismic phases along with
341
+
342
+ <span style="font-size: 24px; font-weight: bold;">
343
+ <span style="color: #4c4c4c; text-shadow: 1px 1px 0 #ccc, -1px -1px 0 #ccc, 1px -1px 0 #ccc, -1px 1px 0 #ccc;">u</span>
344
+ <span style="color: #808080; text-shadow: 1px 1px 0 #bbb, -1px -1px 0 #bbb, 1px -1px 0 #bbb, -1px 1px 0 #bbb;">n</span>
345
+ <span style="color: #b3b3b3; text-shadow: 1px 1px 0 #aaa, -1px -1px 0 #aaa, 1px -1px 0 #aaa, -1px 1px 0 #aaa;">c</span>
346
+ <span style="color: #d9d9d9; text-shadow: 1px 1px 0 #ccc, -1px -1px 0 #ccc, 1px -1px 0 #ccc, -1px 1px 0 #ccc;">e</span>
347
+ <span style="color: #f2f2f2; text-shadow: 1px 1px 0 #eee, -1px -1px 0 #eee, 1px -1px 0 #eee, -1px 1px 0 #eee;">r</span>
348
+ <span style="color: #d9d9d9; text-shadow: 1px 1px 0 #ccc, -1px -1px 0 #ccc, 1px -1px 0 #ccc, -1px 1px 0 #ccc;">t</span>
349
+ <span style="color: #b3b3b3; text-shadow: 1px 1px 0 #aaa, -1px -1px 0 #aaa, 1px -1px 0 #aaa, -1px 1px 0 #aaa;">a</span>
350
+ <span style="color: #808080; text-shadow: 1px 1px 0 #bbb, -1px -1px 0 #bbb, 1px -1px 0 #bbb, -1px 1px 0 #bbb;">i</span>
351
+ <span style="color: #4c4c4c; text-shadow: 1px 1px 0 #ccc, -1px -1px 0 #ccc, 1px -1px 0 #ccc, -1px 1px 0 #ccc;">n</span>
352
+ <span style="color: #808080; text-shadow: 1px 1px 0 #bbb, -1px -1px 0 #bbb, 1px -1px 0 #bbb, -1px 1px 0 #bbb;">t</span>
353
+ <span style="color: #b3b3b3; text-shadow: 1px 1px 0 #aaa, -1px -1px 0 #aaa, 1px -1px 0 #aaa, -1px 1px 0 #aaa;">y</span>
354
+
355
+ </span>
356
+ of the detection.</p>
357
+ <ol>
358
+ <li>By selecting one of the sample waveforms.</li>
359
+ <li>By uploading your own waveform.</li>
360
+ <li>By selecting an earthquake from the global earthquake catalogue.</li>
361
+ </ol>
362
+ <p>Please upload your waveform in <code>.npy</code> (numpy) format.</p>
363
+ <p>Your waveform should be sampled at 100 samples per second and have 3 (Z, N, E) or 1 (Z) channels. If your file is longer than 60 seconds, the app will only use the first 60 seconds of the waveform.</p>
364
  """)
365
+ with gr.Tab("Try on a single station"):
366
+ with gr.Row():
367
+ # Define the input and output types for Gradio
368
+ inputs = gr.Dropdown(
369
+ ["data/sample/sample_0.npy",
370
+ "data/sample/sample_1.npy",
371
+ "data/sample/sample_2.npy"],
372
+ label="Sample waveform",
373
+ info="Select one of the samples",
374
+ value = "data/sample/sample_0.npy"
375
+ )
376
+
377
+ upload = gr.File(label="Or upload your own waveform")
378
 
379
  button = gr.Button("Predict phases")
380
+ outputs = gr.Image(label='Waveform with Phases Marked', type='numpy', interactive=False)
381
 
382
+ button.click(mark_phases, inputs=[inputs, upload], outputs=outputs)
383
 
384
  with gr.Tab("Select earthquake from catalogue"):
385
+ gr.Markdown("""Select an earthquake from the global earthquake catalogue and the app will download the waveform from the FDSN client of your choice.
386
+ """)
387
 
388
  client_inputs = gr.Dropdown(
389
  choices = list(URL_MAPPINGS.keys()),
 
392
  value = "IRIS",
393
  interactive=True
394
  )
395
+
396
  with gr.Row():
397
 
398
  timestamp_inputs = gr.Textbox(value='2019-07-04 17:33:49',
 
426
  interactive=True)
427
 
428
  velocity_inputs = gr.Dropdown(
429
+ choices = ['1066a', '1066b', 'ak135',
430
+ 'ak135f', 'herrin', 'iasp91',
431
+ 'jb', 'prem', 'pwdk'],
432
  label="1D velocity model",
433
  info="Velocity model for station selection",
434
  value = "1066a",
435
  interactive=True
436
  )
437
+
438
+ max_waveforms_inputs = gr.Slider(minimum=1,
439
+ maximum=100,
440
+ value=10,
441
+ label="Max waveforms per section",
442
+ step=1,
443
+ info="Maximum number of waveforms to show per section\n (to avoid long prediction times)",
444
+ interactive=True,
445
+ )
446
 
447
  button = gr.Button("Predict phases")
448
  outputs_section = gr.Image(label='Waveforms with Phases Marked', type='numpy', interactive=False)
 
450
  button.click(predict_on_section,
451
  inputs=[client_inputs, timestamp_inputs,
452
  eq_lat_inputs, eq_lon_inputs,
453
+ radius_inputs, source_depth_inputs,
454
+ velocity_inputs, max_waveforms_inputs],
455
  outputs=outputs_section)
456
 
 
 
 
 
 
 
457
  demo.launch()
data/.DS_Store CHANGED
Binary files a/data/.DS_Store and b/data/.DS_Store differ
 
data/cached/BC_JARAX_2023-04-01T01:16:13.997267Z.mseed ADDED
Binary file (73.7 kB). View file
 
data/cached/CE_24944_2023-04-01T01:16:14.044529Z.mseed ADDED
Binary file (28.7 kB). View file
 
data/cached/CE_24945_2023-04-01T01:16:13.506381Z.mseed ADDED
Binary file (28.7 kB). View file
 
data/cached/CI_ADO_2019-07-04T17:33:53.650962Z.mseed ADDED
Binary file (154 kB). View file
 
data/cached/CI_ARV_2019-07-04T17:33:53.096986Z.mseed ADDED
Binary file (124 kB). View file
 
data/cached/CI_CJV2_2023-04-01T01:16:16.011425Z.mseed ADDED
Binary file (188 kB). View file
 
data/cached/CI_CWC_2019-07-04T17:33:47.189005Z.mseed ADDED
Binary file (154 kB). View file
 
data/cached/CI_EDW2_2019-07-04T17:33:49.567241Z.mseed ADDED
Binary file (143 kB). View file
 
data/cached/CI_FUR_2019-07-04T17:33:49.310305Z.mseed ADDED
Binary file (147 kB). View file
 
data/cached/CI_GRA_2019-07-04T17:33:53.959922Z.mseed ADDED
Binary file (133 kB). View file
 
data/cached/CI_GSC_2019-07-04T17:33:47.536363Z.mseed ADDED
Binary file (155 kB). View file
 
data/cached/CI_HEC_2019-07-04T17:33:56.148977Z.mseed ADDED
Binary file (140 kB). View file
 
data/cached/CI_ISA_2019-07-04T17:33:46.297658Z.mseed ADDED
Binary file (148 kB). View file
 
data/cached/CI_JNH2_2023-04-01T01:16:13.664044Z.mseed ADDED
Binary file (180 kB). View file
 
data/cached/CI_JRC2_2019-07-04T17:33:39.947494Z.mseed ADDED
Binary file (225 kB). View file
 
data/cached/CI_LRL_2019-07-04T17:33:40.248999Z.mseed ADDED
Binary file (164 kB). View file
 
data/cached/CI_LRR2_2023-04-01T01:16:15.095101Z.mseed ADDED
Binary file (180 kB). View file
 
data/cached/CI_MPM_2019-07-04T17:33:40.443346Z.mseed ADDED
Binary file (124 kB). View file
 
data/cached/CI_OSI_2019-07-04T17:33:57.203547Z.mseed ADDED
Binary file (131 kB). View file
 
data/cached/CI_PUT_2023-04-01T01:16:16.558724Z.mseed ADDED
Binary file (332 kB). View file
 
data/cached/CI_Q0013_2019-07-04T17:33:54.489098Z.mseed ADDED
Binary file (75.3 kB). View file
 
data/cached/CI_Q0024_2019-07-04T17:33:55.620507Z.mseed ADDED
Binary file (75.8 kB). View file
 
data/cached/CI_Q0035_2019-07-04T17:33:56.041452Z.mseed ADDED
Binary file (75.3 kB). View file
 
data/cached/CI_Q0056_2019-07-04T17:33:50.407027Z.mseed ADDED
Binary file (86.5 kB). View file
 
data/cached/CI_Q0061_2019-07-04T17:33:53.114695Z.mseed ADDED
Binary file (81.9 kB). View file
 
data/cached/CI_Q0068_2019-07-04T17:33:46.411249Z.mseed ADDED
Binary file (80.9 kB). View file
 
data/cached/CI_Q0072_2019-07-04T17:33:38.390421Z.mseed ADDED
Binary file (83.5 kB). View file
 
data/cached/CI_RRX_2019-07-04T17:33:50.712219Z.mseed ADDED
Binary file (144 kB). View file
 
data/cached/CI_SBB2_2023-04-01T01:16:15.609344Z.mseed ADDED
Binary file (180 kB). View file
 
data/cached/CI_SHO_2019-07-04T17:33:51.673022Z.mseed ADDED
Binary file (147 kB). View file
 
data/cached/CI_SLA_2019-07-04T17:33:40.190746Z.mseed ADDED
Binary file (151 kB). View file
 
data/cached/CI_SRA_2023-04-01T01:16:14.887472Z.mseed ADDED
Binary file (180 kB). View file
 
data/cached/CI_SRT_2019-07-04T17:33:38.029990Z.mseed ADDED
Binary file (221 kB). View file
 
data/cached/CI_TIN_2019-07-04T17:33:55.946998Z.mseed ADDED
Binary file (135 kB). View file
 
data/cached/CI_TOW2_2019-07-04T17:33:37.991033Z.mseed ADDED
Binary file (229 kB). View file
 
data/cached/CI_VCS_2023-04-01T01:16:15.307897Z.mseed ADDED
Binary file (180 kB). View file
 
data/cached/CI_VTV_2019-07-04T17:33:53.688913Z.mseed ADDED
Binary file (153 kB). View file
 
data/cached/CI_WBM_2019-07-04T17:33:40.063644Z.mseed ADDED
Binary file (225 kB). View file
 
data/cached/CI_WLS2_2023-04-01T01:16:13.623472Z.mseed ADDED
Binary file (319 kB). View file
 
data/cached/CI_WMF_2019-07-04T17:33:41.867962Z.mseed ADDED
Binary file (217 kB). View file
 
data/cached/CI_WRC2_2019-07-04T17:33:38.698107Z.mseed ADDED
Binary file (229 kB). View file
 
data/cached/CI_WRV2_2019-07-04T17:33:40.843660Z.mseed ADDED
Binary file (94.2 kB). View file
 
data/cached/CI_WVP2_2019-07-04T17:33:39.650418Z.mseed ADDED
Binary file (94.2 kB). View file
 
data/cached/LB_DAC_2019-07-04T17:33:43.387184Z.mseed ADDED
Binary file (115 kB). View file
 
data/cached/NN_FMT_2019-07-04T17:33:51.798341Z.mseed ADDED
Binary file (30.2 kB). View file
 
data/cached/NN_GVN_2019-07-04T17:33:54.053591Z.mseed ADDED
Binary file (30.2 kB). View file