Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,3 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
import pandas as pd
|
3 |
import numpy as np
|
@@ -9,45 +18,57 @@ import cartopy.feature as cfeature
|
|
9 |
import plotly.graph_objects as go
|
10 |
import plotly.express as px
|
11 |
from plotly.subplots import make_subplots
|
12 |
-
|
13 |
-
import requests
|
14 |
-
import os
|
15 |
-
import argparse
|
16 |
-
from datetime import datetime
|
17 |
-
import statsmodels.api as sm
|
18 |
-
import shutil
|
19 |
-
import tempfile
|
20 |
-
import csv
|
21 |
-
from collections import defaultdict
|
22 |
from sklearn.manifold import TSNE
|
23 |
from sklearn.cluster import DBSCAN
|
|
|
24 |
from scipy.interpolate import interp1d
|
25 |
|
26 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
import tropycal.tracks as tracks
|
28 |
|
29 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
parser = argparse.ArgumentParser(description='Typhoon Analysis Dashboard')
|
31 |
parser.add_argument('--data_path', type=str, default=os.getcwd(), help='Path to the data directory')
|
32 |
args = parser.parse_args()
|
33 |
DATA_PATH = args.data_path
|
34 |
|
35 |
-
#
|
36 |
ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
|
37 |
TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
|
|
|
38 |
|
39 |
-
#
|
|
|
|
|
|
|
|
|
40 |
BASIN_FILES = {
|
41 |
'EP': 'ibtracs.EP.list.v04r01.csv',
|
42 |
'NA': 'ibtracs.NA.list.v04r01.csv',
|
43 |
'WP': 'ibtracs.WP.list.v04r01.csv'
|
44 |
}
|
45 |
-
IBTRACS_BASE_URL = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/'
|
46 |
-
|
47 |
-
CACHE_FILE = 'ibtracs_cache.pkl'
|
48 |
-
CACHE_EXPIRY_DAYS = 0 # Force refresh for testing
|
49 |
|
50 |
-
#
|
|
|
|
|
51 |
color_map = {
|
52 |
'C5 Super Typhoon': 'rgb(255, 0, 0)',
|
53 |
'C4 Very Strong Typhoon': 'rgb(255, 165, 0)',
|
@@ -73,7 +94,9 @@ taiwan_standard = {
|
|
73 |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'}
|
74 |
}
|
75 |
|
76 |
-
#
|
|
|
|
|
77 |
season_months = {
|
78 |
'all': list(range(1, 13)),
|
79 |
'summer': [6, 7, 8],
|
@@ -88,7 +111,9 @@ regions = {
|
|
88 |
"Philippines": {"lat_min": 5, "lat_max": 21, "lon_min": 115, "lon_max": 130}
|
89 |
}
|
90 |
|
91 |
-
#
|
|
|
|
|
92 |
def download_oni_file(url, filename):
|
93 |
response = requests.get(url)
|
94 |
response.raise_for_status()
|
@@ -98,8 +123,8 @@ def download_oni_file(url, filename):
|
|
98 |
|
99 |
def convert_oni_ascii_to_csv(input_file, output_file):
|
100 |
data = defaultdict(lambda: [''] * 12)
|
101 |
-
season_to_month = {'DJF':
|
102 |
-
'JJA':
|
103 |
with open(input_file, 'r') as f:
|
104 |
lines = f.readlines()[1:]
|
105 |
for line in lines:
|
@@ -109,11 +134,11 @@ def convert_oni_ascii_to_csv(input_file, output_file):
|
|
109 |
if season in season_to_month:
|
110 |
month = season_to_month[season]
|
111 |
if season == 'DJF':
|
112 |
-
year = str(int(year)
|
113 |
data[year][month-1] = anom
|
114 |
with open(output_file, 'w', newline='') as f:
|
115 |
writer = csv.writer(f)
|
116 |
-
writer.writerow(['Year',
|
117 |
for year in sorted(data.keys()):
|
118 |
writer.writerow([year] + data[year])
|
119 |
|
@@ -138,10 +163,10 @@ def load_data(oni_path, typhoon_path):
|
|
138 |
|
139 |
def process_oni_data(oni_data):
|
140 |
oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
|
141 |
-
month_map = {'Jan':
|
142 |
-
'Jul':
|
143 |
oni_long['Month'] = oni_long['Month'].map(month_map)
|
144 |
-
oni_long['Date'] = pd.to_datetime(oni_long['Year'].astype(str)
|
145 |
oni_long['ONI'] = pd.to_numeric(oni_long['ONI'], errors='coerce')
|
146 |
return oni_long
|
147 |
|
@@ -150,10 +175,10 @@ def process_typhoon_data(typhoon_data):
|
|
150 |
typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
|
151 |
typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce')
|
152 |
typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce')
|
153 |
-
|
154 |
typhoon_max = typhoon_data.groupby('SID').agg({
|
155 |
-
'USA_WIND':
|
156 |
-
'LAT':
|
157 |
}).reset_index()
|
158 |
typhoon_max['Month'] = typhoon_max['ISO_TIME'].dt.strftime('%m')
|
159 |
typhoon_max['Year'] = typhoon_max['ISO_TIME'].dt.year
|
@@ -161,7 +186,7 @@ def process_typhoon_data(typhoon_data):
|
|
161 |
return typhoon_max
|
162 |
|
163 |
def merge_data(oni_long, typhoon_max):
|
164 |
-
return pd.merge(typhoon_max, oni_long, on=['Year',
|
165 |
|
166 |
def categorize_typhoon(wind_speed):
|
167 |
if wind_speed >= 137:
|
@@ -189,12 +214,12 @@ def classify_enso_phases(oni_value):
|
|
189 |
else:
|
190 |
return 'Neutral'
|
191 |
|
192 |
-
#
|
193 |
def perform_wind_regression(start_year, start_month, end_year, end_month):
|
194 |
start_date = datetime(start_year, start_month, 1)
|
195 |
end_date = datetime(end_year, end_month, 28)
|
196 |
-
data = merged_data[(merged_data['ISO_TIME']
|
197 |
-
data['severe_typhoon'] = (data['USA_WIND']
|
198 |
X = sm.add_constant(data['ONI'])
|
199 |
y = data['severe_typhoon']
|
200 |
model = sm.Logit(y, X).fit(disp=0)
|
@@ -206,8 +231,8 @@ def perform_wind_regression(start_year, start_month, end_year, end_month):
|
|
206 |
def perform_pressure_regression(start_year, start_month, end_year, end_month):
|
207 |
start_date = datetime(start_year, start_month, 1)
|
208 |
end_date = datetime(end_year, end_month, 28)
|
209 |
-
data = merged_data[(merged_data['ISO_TIME']
|
210 |
-
data['intense_typhoon'] = (data['USA_PRES']
|
211 |
X = sm.add_constant(data['ONI'])
|
212 |
y = data['intense_typhoon']
|
213 |
model = sm.Logit(y, X).fit(disp=0)
|
@@ -219,54 +244,54 @@ def perform_pressure_regression(start_year, start_month, end_year, end_month):
|
|
219 |
def perform_longitude_regression(start_year, start_month, end_year, end_month):
|
220 |
start_date = datetime(start_year, start_month, 1)
|
221 |
end_date = datetime(end_year, end_month, 28)
|
222 |
-
data = merged_data[(merged_data['ISO_TIME']
|
223 |
-
data['western_typhoon'] = (data['LON']
|
224 |
X = sm.add_constant(data['ONI'])
|
225 |
y = data['western_typhoon']
|
226 |
-
model = sm.
|
227 |
beta_1 = model.params['ONI']
|
228 |
exp_beta_1 = np.exp(beta_1)
|
229 |
p_value = model.pvalues['ONI']
|
230 |
return f"Longitude Regression: β1={beta_1:.4f}, Odds Ratio={exp_beta_1:.4f}, P-value={p_value:.4f}"
|
231 |
|
232 |
-
#
|
233 |
def load_ibtracs_data():
|
234 |
ibtracs_data = {}
|
235 |
for basin, filename in BASIN_FILES.items():
|
236 |
local_path = os.path.join(DATA_PATH, filename)
|
237 |
if not os.path.exists(local_path):
|
238 |
-
|
239 |
-
response = requests.get(
|
240 |
response.raise_for_status()
|
241 |
with open(local_path, 'wb') as f:
|
242 |
f.write(response.content)
|
243 |
-
|
244 |
try:
|
245 |
-
|
246 |
ds = tracks.TrackDataset(source='ibtracs', ibtracs_url=local_path)
|
247 |
-
|
248 |
ibtracs_data[basin] = ds
|
249 |
except ValueError as e:
|
250 |
-
|
251 |
ibtracs_data[basin] = None
|
252 |
return ibtracs_data
|
253 |
|
254 |
ibtracs = load_ibtracs_data()
|
255 |
|
256 |
-
#
|
257 |
update_oni_data()
|
258 |
oni_data, typhoon_data = load_data(ONI_DATA_PATH, TYPHOON_DATA_PATH)
|
259 |
oni_long = process_oni_data(oni_data)
|
260 |
typhoon_max = process_typhoon_data(typhoon_data)
|
261 |
merged_data = merge_data(oni_long, typhoon_max)
|
262 |
|
263 |
-
#
|
264 |
def generate_typhoon_tracks(filtered_data, typhoon_search):
|
265 |
fig = go.Figure()
|
266 |
for sid in filtered_data['SID'].unique():
|
267 |
storm_data = filtered_data[filtered_data['SID'] == sid]
|
268 |
phase = storm_data['ENSO_Phase'].iloc[0]
|
269 |
-
color = {'El Nino':
|
270 |
fig.add_trace(go.Scattergeo(
|
271 |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines',
|
272 |
name=storm_data['NAME'].iloc[0], line=dict(width=2, color=color)
|
@@ -287,32 +312,36 @@ def generate_typhoon_tracks(filtered_data, typhoon_search):
|
|
287 |
return fig
|
288 |
|
289 |
def generate_wind_oni_scatter(filtered_data, typhoon_search):
|
290 |
-
fig = px.scatter(filtered_data, x='ONI', y='USA_WIND', color='Category',
|
291 |
-
|
|
|
|
|
292 |
color_discrete_map=color_map)
|
293 |
if typhoon_search:
|
294 |
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
295 |
if mask.any():
|
296 |
fig.add_trace(go.Scatter(
|
297 |
-
x=filtered_data.loc[mask,
|
298 |
mode='markers', marker=dict(size=10, color='red', symbol='star'),
|
299 |
name=f'Matched: {typhoon_search}',
|
300 |
-
text=filtered_data.loc[mask,
|
301 |
))
|
302 |
return fig
|
303 |
|
304 |
def generate_pressure_oni_scatter(filtered_data, typhoon_search):
|
305 |
-
fig = px.scatter(filtered_data, x='ONI', y='USA_PRES', color='Category',
|
306 |
-
|
|
|
|
|
307 |
color_discrete_map=color_map)
|
308 |
if typhoon_search:
|
309 |
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
310 |
if mask.any():
|
311 |
fig.add_trace(go.Scatter(
|
312 |
-
x=filtered_data.loc[mask,
|
313 |
mode='markers', marker=dict(size=10, color='red', symbol='star'),
|
314 |
name=f'Matched: {typhoon_search}',
|
315 |
-
text=filtered_data.loc[mask,
|
316 |
))
|
317 |
return fig
|
318 |
|
@@ -320,7 +349,7 @@ def generate_regression_analysis(filtered_data):
|
|
320 |
fig = px.scatter(filtered_data, x='LON', y='ONI', hover_data=['NAME'],
|
321 |
title='Typhoon Generation Longitude vs ONI (All Years)')
|
322 |
if len(filtered_data) > 1:
|
323 |
-
X = np.array(filtered_data['LON']).reshape(-1,
|
324 |
y = filtered_data['ONI']
|
325 |
model = sm.OLS(y, sm.add_constant(X)).fit()
|
326 |
y_pred = model.predict(sm.add_constant(X))
|
@@ -334,7 +363,7 @@ def generate_regression_analysis(filtered_data):
|
|
334 |
def generate_main_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
335 |
start_date = datetime(start_year, start_month, 1)
|
336 |
end_date = datetime(end_year, end_month, 28)
|
337 |
-
filtered_data = merged_data[(merged_data['ISO_TIME']
|
338 |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
339 |
if enso_phase != 'all':
|
340 |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
@@ -347,7 +376,7 @@ def generate_main_analysis(start_year, start_month, end_year, end_month, enso_ph
|
|
347 |
def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
348 |
start_date = datetime(start_year, start_month, 1)
|
349 |
end_date = datetime(end_year, end_month, 28)
|
350 |
-
filtered_data = merged_data[(merged_data['ISO_TIME']
|
351 |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
352 |
if enso_phase != 'all':
|
353 |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
@@ -355,27 +384,25 @@ def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, ty
|
|
355 |
count = len(unique_storms)
|
356 |
fig = go.Figure()
|
357 |
for sid in unique_storms:
|
358 |
-
storm_data = typhoon_data[typhoon_data['SID']
|
359 |
name = storm_data['NAME'].iloc[0] if pd.notnull(storm_data['NAME'].iloc[0]) else "Unnamed"
|
360 |
-
storm_oni = filtered_data[filtered_data['SID']
|
361 |
-
color = 'red' if storm_oni
|
362 |
fig.add_trace(go.Scattergeo(
|
363 |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines',
|
364 |
name=f"{name} ({storm_data['SEASON'].iloc[0]})",
|
365 |
-
line=dict(width=1.5, color=color),
|
366 |
-
hoverinfo="name"
|
367 |
))
|
368 |
if typhoon_search:
|
369 |
search_mask = typhoon_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
370 |
if search_mask.any():
|
371 |
for sid in typhoon_data[search_mask]['SID'].unique():
|
372 |
-
storm_data = typhoon_data[typhoon_data['SID']
|
373 |
fig.add_trace(go.Scattergeo(
|
374 |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines+markers',
|
375 |
name=f"MATCHED: {storm_data['NAME'].iloc[0]} ({storm_data['SEASON'].iloc[0]})",
|
376 |
line=dict(width=3, color='yellow'),
|
377 |
-
marker=dict(size=5),
|
378 |
-
hoverinfo="name"
|
379 |
))
|
380 |
fig.update_layout(
|
381 |
title=f"Typhoon Tracks ({start_year}-{start_month} to {end_year}-{end_month})",
|
@@ -383,9 +410,9 @@ def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, ty
|
|
383 |
projection_type='natural earth',
|
384 |
showland=True,
|
385 |
showcoastlines=True,
|
386 |
-
landcolor='rgb(243,
|
387 |
-
countrycolor='rgb(204,
|
388 |
-
coastlinecolor='rgb(204,
|
389 |
center=dict(lon=140, lat=20),
|
390 |
projection_scale=3
|
391 |
),
|
@@ -416,8 +443,8 @@ def get_longitude_analysis(start_year, start_month, end_year, end_month, enso_ph
|
|
416 |
regression = perform_longitude_regression(start_year, start_month, end_year, end_month)
|
417 |
return results[3], results[4], regression
|
418 |
|
419 |
-
def categorize_typhoon_by_standard(wind_speed, standard):
|
420 |
-
if standard
|
421 |
wind_speed_ms = wind_speed * 0.514444
|
422 |
if wind_speed_ms >= 51.0:
|
423 |
return 'Strong Typhoon', taiwan_standard['Strong Typhoon']['hex']
|
@@ -441,11 +468,120 @@ def categorize_typhoon_by_standard(wind_speed, standard):
|
|
441 |
return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex']
|
442 |
return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex']
|
443 |
|
444 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
445 |
def generate_track_video_from_csv(year, storm_id, standard):
|
446 |
storm_df = typhoon_data[typhoon_data['SID'] == storm_id].copy()
|
447 |
if storm_df.empty:
|
448 |
-
|
449 |
return None
|
450 |
storm_df = storm_df.sort_values('ISO_TIME')
|
451 |
lats = storm_df['LAT'].astype(float).values
|
@@ -460,42 +596,41 @@ def generate_track_video_from_csv(year, storm_id, standard):
|
|
460 |
|
461 |
min_lat, max_lat = np.min(lats), np.max(lats)
|
462 |
min_lon, max_lon = np.min(lons), np.max(lons)
|
463 |
-
lat_padding = max((max_lat - min_lat)
|
464 |
-
lon_padding = max((max_lon - min_lon)
|
465 |
|
466 |
-
fig = plt.figure(figsize=(12,
|
467 |
-
ax = plt.axes([0.05, 0.05, 0.60, 0.
|
468 |
-
projection=ccrs.PlateCarree(central_longitude
|
|
|
469 |
ax.set_extent([min_lon - lon_padding, max_lon + lon_padding, min_lat - lat_padding, max_lat + lat_padding],
|
470 |
crs=ccrs.PlateCarree())
|
|
|
|
|
|
|
|
|
471 |
|
472 |
-
ax.
|
473 |
-
ax.
|
474 |
-
ax.
|
475 |
-
|
476 |
-
|
477 |
-
|
478 |
-
|
479 |
-
line, = ax.plot([], [], 'b-', linewidth=2, transform=ccrs.PlateCarree())
|
480 |
-
point, = ax.plot([], [], 'o', markersize=10, transform=ccrs.PlateCarree())
|
481 |
-
date_text = ax.text(0.02, 0.02, '', transform=ax.transAxes, fontsize=12,
|
482 |
-
bbox=dict(facecolor='white', alpha=0.8))
|
483 |
-
# Dynamic state display at right side
|
484 |
-
state_text = fig.text(0.70, 0.60, '', fontsize=14, verticalalignment='top',
|
485 |
-
bbox=dict(facecolor='white', alpha=0.8, boxstyle='round,pad=0.5'))
|
486 |
|
487 |
-
|
488 |
-
|
489 |
-
|
490 |
-
|
491 |
-
|
|
|
|
|
492 |
|
493 |
def init():
|
494 |
line.set_data([], [])
|
495 |
point.set_data([], [])
|
496 |
date_text.set_text('')
|
497 |
-
|
498 |
-
return line, point, date_text,
|
499 |
|
500 |
def update(frame):
|
501 |
line.set_data(lons[:frame+1], lats[:frame+1])
|
@@ -505,12 +640,12 @@ def generate_track_video_from_csv(year, storm_id, standard):
|
|
505 |
point.set_color(color)
|
506 |
dt_str = pd.to_datetime(times[frame]).strftime('%Y-%m-%d %H:%M')
|
507 |
date_text.set_text(dt_str)
|
508 |
-
|
509 |
-
|
510 |
-
|
511 |
-
|
512 |
-
|
513 |
-
return line, point, date_text,
|
514 |
|
515 |
ani = animation.FuncAnimation(fig, update, init_func=init, frames=len(times),
|
516 |
interval=200, blit=True, repeat=True)
|
@@ -526,7 +661,7 @@ def simplified_track_video(year, basin, typhoon, standard):
|
|
526 |
storm_id = typhoon.split('(')[-1].strip(')')
|
527 |
return generate_track_video_from_csv(year, storm_id, standard)
|
528 |
|
529 |
-
#
|
530 |
basin_to_prefix = {
|
531 |
"All Basins": "all",
|
532 |
"NA - North Atlantic": "NA",
|
@@ -545,18 +680,18 @@ def update_typhoon_options(year, basin):
|
|
545 |
continue
|
546 |
summaries.append(season_data.summary())
|
547 |
if len(summaries) == 0:
|
548 |
-
|
549 |
return gr.update(choices=[], value=None)
|
550 |
combined_summary = pd.concat(summaries, ignore_index=True)
|
551 |
else:
|
552 |
prefix = basin_to_prefix.get(basin)
|
553 |
ds = ibtracs.get(prefix)
|
554 |
if ds is None:
|
555 |
-
|
556 |
return gr.update(choices=[], value=None)
|
557 |
season_data = ds.get_season(int(year))
|
558 |
if season_data.summary().empty:
|
559 |
-
|
560 |
return gr.update(choices=[], value=None)
|
561 |
combined_summary = season_data.summary()
|
562 |
options = []
|
@@ -569,17 +704,16 @@ def update_typhoon_options(year, basin):
|
|
569 |
continue
|
570 |
return gr.update(choices=options, value=options[0] if options else None)
|
571 |
except Exception as e:
|
572 |
-
|
573 |
return gr.update(choices=[], value=None)
|
574 |
|
575 |
def update_typhoon_options_anim(year, basin):
|
576 |
try:
|
577 |
-
# For animation, use the processed CSV data (without filtering by a specific prefix)
|
578 |
data = typhoon_data.copy()
|
579 |
-
data['Year'] =
|
580 |
season_data = data[data['Year'] == int(year)]
|
581 |
if season_data.empty:
|
582 |
-
|
583 |
return gr.update(choices=[], value=None)
|
584 |
summary = season_data.groupby('SID').first().reset_index()
|
585 |
options = []
|
@@ -588,188 +722,127 @@ def update_typhoon_options_anim(year, basin):
|
|
588 |
options.append(f"{name} ({row['SID']})")
|
589 |
return gr.update(choices=options, value=options[0] if options else None)
|
590 |
except Exception as e:
|
591 |
-
|
592 |
return gr.update(choices=[], value=None)
|
593 |
|
594 |
-
#
|
595 |
-
def update_route_clusters(start_year, start_month, end_year, end_month, enso_value, season):
|
596 |
-
# Use only WP storms from processed CSV for clustering.
|
597 |
-
wp_data = typhoon_data[typhoon_data['SID'].str.startswith("WP")]
|
598 |
-
if wp_data.empty:
|
599 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No West Pacific storms found."
|
600 |
-
wp_data['Year'] = pd.to_datetime(wp_data['ISO_TIME']).dt.year
|
601 |
-
wp_season = wp_data[wp_data['Year'] == int(start_year)]
|
602 |
-
if wp_season.empty:
|
603 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No storms found for the given period in WP."
|
604 |
-
|
605 |
-
all_storms_data = []
|
606 |
-
for sid, group in wp_data.groupby('SID'):
|
607 |
-
group = group.sort_values('ISO_TIME')
|
608 |
-
times = pd.to_datetime(group['ISO_TIME']).values
|
609 |
-
lats = group['LAT'].astype(float).values
|
610 |
-
lons = group['LON'].astype(float).values
|
611 |
-
if len(lons) < 2:
|
612 |
-
continue
|
613 |
-
if season != 'all':
|
614 |
-
month = pd.to_datetime(group['ISO_TIME']).iloc[0].month
|
615 |
-
if season == 'summer' and not (4 <= month <= 8):
|
616 |
-
continue
|
617 |
-
if season == 'winter' and not (9 <= month <= 12):
|
618 |
-
continue
|
619 |
-
all_storms_data.append((lons, lats, np.array(group['USA_WIND'].astype(float)), times, sid))
|
620 |
-
if not all_storms_data:
|
621 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid WP storms for clustering."
|
622 |
-
|
623 |
-
max_length = max(len(item[0]) for item in all_storms_data)
|
624 |
-
route_vectors = []
|
625 |
-
filtered_storms = []
|
626 |
-
for lons, lats, winds, times, sid in all_storms_data:
|
627 |
-
t = np.linspace(0, 1, len(lons))
|
628 |
-
t_new = np.linspace(0, 1, max_length)
|
629 |
-
try:
|
630 |
-
lon_i = interp1d(t, lons, kind='linear', fill_value='extrapolate')(t_new)
|
631 |
-
lat_i = interp1d(t, lats, kind='linear', fill_value='extrapolate')(t_new)
|
632 |
-
except Exception:
|
633 |
-
continue
|
634 |
-
route_vector = np.column_stack((lon_i, lat_i)).flatten()
|
635 |
-
if np.isnan(route_vector).any():
|
636 |
-
continue
|
637 |
-
route_vectors.append(route_vector)
|
638 |
-
filtered_storms.append((lon_i, lat_i, winds, times, sid))
|
639 |
-
route_vectors = np.array(route_vectors)
|
640 |
-
if len(route_vectors) == 0:
|
641 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms after interpolation."
|
642 |
-
|
643 |
-
tsne = TSNE(n_components=2, random_state=42, verbose=1)
|
644 |
-
tsne_results = tsne.fit_transform(route_vectors)
|
645 |
-
dbscan = DBSCAN(eps=5, min_samples=3)
|
646 |
-
best_labels = dbscan.fit_predict(tsne_results)
|
647 |
-
unique_labels = sorted(set(best_labels) - {-1})
|
648 |
-
fig_tsne = go.Figure()
|
649 |
-
colors = px.colors.qualitative.Safe
|
650 |
-
for i, label in enumerate(unique_labels):
|
651 |
-
indices = np.where(best_labels == label)[0]
|
652 |
-
fig_tsne.add_trace(go.Scatter(
|
653 |
-
x=tsne_results[indices, 0],
|
654 |
-
y=tsne_results[indices, 1],
|
655 |
-
mode='markers',
|
656 |
-
marker=dict(color=colors[i % len(colors)]),
|
657 |
-
name=f"Cluster {label}"
|
658 |
-
))
|
659 |
-
fig_tsne.update_layout(title="t-SNE of WP Storm Routes")
|
660 |
-
fig_routes = go.Figure() # Placeholder
|
661 |
-
fig_stats = make_subplots(rows=2, cols=1) # Placeholder
|
662 |
-
info = "TSNE clustering complete."
|
663 |
-
return fig_tsne, fig_routes, fig_stats, info
|
664 |
-
|
665 |
-
# ------------------ Gradio Interface ------------------
|
666 |
with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
667 |
gr.Markdown("# Typhoon Analysis Dashboard")
|
668 |
|
669 |
with gr.Tab("Overview"):
|
670 |
gr.Markdown("""
|
671 |
## Welcome to the Typhoon Analysis Dashboard
|
672 |
-
|
673 |
This dashboard allows you to analyze typhoon data in relation to ENSO phases.
|
674 |
-
|
675 |
### Features:
|
676 |
-
- **Track Visualization**: View typhoon tracks by time period and ENSO phase
|
677 |
-
- **Wind Analysis**: Examine wind speed vs ONI relationships
|
678 |
-
- **Pressure Analysis**: Analyze pressure vs ONI relationships
|
679 |
-
- **Longitude Analysis**: Study typhoon generation longitude vs ONI
|
680 |
-
- **Path Animation**:
|
681 |
-
- **TSNE Cluster**: Perform t-SNE clustering on WP storm routes
|
682 |
-
|
683 |
-
Select a tab above to begin your analysis.
|
684 |
""")
|
685 |
|
686 |
with gr.Tab("Track Visualization"):
|
687 |
with gr.Row():
|
688 |
start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
689 |
-
start_month = gr.Dropdown(label="Start Month", choices=list(range(1,
|
690 |
end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
|
691 |
-
end_month = gr.Dropdown(label="End Month", choices=list(range(1,
|
692 |
-
enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all',
|
693 |
typhoon_search = gr.Textbox(label="Typhoon Search")
|
694 |
analyze_btn = gr.Button("Generate Tracks")
|
695 |
tracks_plot = gr.Plot(label="Typhoon Tracks", elem_id="tracks_plot")
|
696 |
typhoon_count = gr.Textbox(label="Number of Typhoons Displayed")
|
697 |
-
analyze_btn.click(fn=get_full_tracks,
|
|
|
|
|
698 |
|
699 |
with gr.Tab("Wind Analysis"):
|
700 |
with gr.Row():
|
701 |
wind_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
702 |
-
wind_start_month = gr.Dropdown(label="Start Month", choices=list(range(1,
|
703 |
wind_end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
|
704 |
-
wind_end_month = gr.Dropdown(label="End Month", choices=list(range(1,
|
705 |
-
wind_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all',
|
706 |
wind_typhoon_search = gr.Textbox(label="Typhoon Search")
|
707 |
wind_analyze_btn = gr.Button("Generate Wind Analysis")
|
708 |
wind_scatter = gr.Plot(label="Wind Speed vs ONI")
|
709 |
wind_regression_results = gr.Textbox(label="Wind Regression Results")
|
710 |
-
wind_analyze_btn.click(fn=get_wind_analysis,
|
|
|
|
|
711 |
|
712 |
with gr.Tab("Pressure Analysis"):
|
713 |
with gr.Row():
|
714 |
pressure_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
715 |
-
pressure_start_month = gr.Dropdown(label="Start Month", choices=list(range(1,
|
716 |
pressure_end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
|
717 |
-
pressure_end_month = gr.Dropdown(label="End Month", choices=list(range(1,
|
718 |
-
pressure_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all',
|
719 |
pressure_typhoon_search = gr.Textbox(label="Typhoon Search")
|
720 |
pressure_analyze_btn = gr.Button("Generate Pressure Analysis")
|
721 |
pressure_scatter = gr.Plot(label="Pressure vs ONI")
|
722 |
pressure_regression_results = gr.Textbox(label="Pressure Regression Results")
|
723 |
-
pressure_analyze_btn.click(fn=get_pressure_analysis,
|
|
|
|
|
724 |
|
725 |
with gr.Tab("Longitude Analysis"):
|
726 |
with gr.Row():
|
727 |
lon_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
728 |
-
lon_start_month = gr.Dropdown(label="Start Month", choices=list(range(1,
|
729 |
-
lon_end_year = gr.Number(label="End Year", value=
|
730 |
-
lon_end_month = gr.Dropdown(label="End Month", choices=list(range(1,
|
731 |
-
lon_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all',
|
732 |
lon_typhoon_search = gr.Textbox(label="Typhoon Search (Optional)")
|
733 |
lon_analyze_btn = gr.Button("Generate Longitude Analysis")
|
734 |
regression_plot = gr.Plot(label="Longitude vs ONI")
|
735 |
slopes_text = gr.Textbox(label="Regression Slopes")
|
736 |
lon_regression_results = gr.Textbox(label="Longitude Regression Results")
|
737 |
-
lon_analyze_btn.click(fn=get_longitude_analysis,
|
|
|
|
|
738 |
|
739 |
with gr.Tab("Tropical Cyclone Path Animation"):
|
740 |
with gr.Row():
|
741 |
-
year_dropdown = gr.Dropdown(label="Year", choices=[str(y) for y in range(1950,
|
742 |
-
basin_dropdown = gr.Dropdown(label="Basin", choices=["NA - North Atlantic",
|
743 |
with gr.Row():
|
744 |
typhoon_dropdown = gr.Dropdown(label="Tropical Cyclone")
|
745 |
-
standard_dropdown = gr.Dropdown(label="Classification Standard", choices=['atlantic',
|
746 |
animate_btn = gr.Button("Generate Animation")
|
747 |
path_video = gr.Video(label="Tropical Cyclone Path Animation", format="mp4", interactive=False, elem_id="path_video")
|
748 |
animation_info = gr.Markdown("""
|
749 |
### Animation Instructions
|
750 |
-
1. Select a year and basin
|
751 |
2. Choose a tropical cyclone from the populated list.
|
752 |
3. Select a classification standard (Atlantic or Taiwan).
|
753 |
4. Click "Generate Animation".
|
754 |
-
5. The animation displays the storm track
|
755 |
""")
|
756 |
year_dropdown.change(fn=update_typhoon_options_anim, inputs=[year_dropdown, basin_dropdown], outputs=typhoon_dropdown)
|
757 |
basin_dropdown.change(fn=update_typhoon_options_anim, inputs=[year_dropdown, basin_dropdown], outputs=typhoon_dropdown)
|
758 |
-
animate_btn.click(fn=simplified_track_video,
|
|
|
|
|
759 |
|
760 |
with gr.Tab("TSNE Cluster"):
|
761 |
with gr.Row():
|
762 |
tsne_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
763 |
-
tsne_start_month = gr.Dropdown(label="Start Month", choices=list(range(1,
|
764 |
tsne_end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
|
765 |
-
tsne_end_month = gr.Dropdown(label="End Month", choices=list(range(1,
|
766 |
-
tsne_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all',
|
767 |
-
tsne_season = gr.Dropdown(label="Season", choices=['all',
|
768 |
tsne_analyze_btn = gr.Button("Analyze")
|
769 |
tsne_plot = gr.Plot(label="t-SNE Clusters")
|
770 |
routes_plot = gr.Plot(label="Typhoon Routes with Mean Routes")
|
771 |
stats_plot = gr.Plot(label="Cluster Statistics")
|
772 |
cluster_info = gr.Textbox(label="Cluster Information", lines=10)
|
773 |
-
tsne_analyze_btn.click(fn=update_route_clusters,
|
|
|
|
|
774 |
|
775 |
demo.launch(share=True)
|
|
|
1 |
+
import os
|
2 |
+
import argparse
|
3 |
+
import logging
|
4 |
+
import pickle
|
5 |
+
import threading
|
6 |
+
import time
|
7 |
+
from datetime import datetime, timedelta
|
8 |
+
from collections import defaultdict
|
9 |
+
|
10 |
import gradio as gr
|
11 |
import pandas as pd
|
12 |
import numpy as np
|
|
|
18 |
import plotly.graph_objects as go
|
19 |
import plotly.express as px
|
20 |
from plotly.subplots import make_subplots
|
21 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
from sklearn.manifold import TSNE
|
23 |
from sklearn.cluster import DBSCAN
|
24 |
+
from sklearn.preprocessing import StandardScaler
|
25 |
from scipy.interpolate import interp1d
|
26 |
|
27 |
+
import requests
|
28 |
+
import tempfile
|
29 |
+
import shutil
|
30 |
+
import xarray as xr
|
31 |
+
|
32 |
+
try:
|
33 |
+
import cdsapi
|
34 |
+
CDSAPI_AVAILABLE = True
|
35 |
+
except ImportError:
|
36 |
+
CDSAPI_AVAILABLE = False
|
37 |
+
|
38 |
import tropycal.tracks as tracks
|
39 |
|
40 |
+
# -----------------------------
|
41 |
+
# Configuration and Setup
|
42 |
+
# -----------------------------
|
43 |
+
logging.basicConfig(
|
44 |
+
level=logging.INFO, # Use DEBUG for more details
|
45 |
+
format='%(asctime)s - %(levelname)s - %(message)s'
|
46 |
+
)
|
47 |
+
|
48 |
parser = argparse.ArgumentParser(description='Typhoon Analysis Dashboard')
|
49 |
parser.add_argument('--data_path', type=str, default=os.getcwd(), help='Path to the data directory')
|
50 |
args = parser.parse_args()
|
51 |
DATA_PATH = args.data_path
|
52 |
|
53 |
+
# Data paths
|
54 |
ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
|
55 |
TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
|
56 |
+
MERGED_DATA_CSV = os.path.join(DATA_PATH, 'merged_typhoon_era5_data.csv') # used by other analyses
|
57 |
|
58 |
+
# IBTrACS (used only for typhoon option updates)
|
59 |
+
LOCAL_IBTRACS_PATH = os.path.join(DATA_PATH, 'ibtracs.WP.list.v04r01.csv')
|
60 |
+
IBTRACS_URI = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/ibtracs.WP.list.v04r01.csv'
|
61 |
+
CACHE_FILE = os.path.join(DATA_PATH, 'ibtracs_cache.pkl')
|
62 |
+
CACHE_EXPIRY_DAYS = 1
|
63 |
BASIN_FILES = {
|
64 |
'EP': 'ibtracs.EP.list.v04r01.csv',
|
65 |
'NA': 'ibtracs.NA.list.v04r01.csv',
|
66 |
'WP': 'ibtracs.WP.list.v04r01.csv'
|
67 |
}
|
|
|
|
|
|
|
|
|
68 |
|
69 |
+
# -----------------------------
|
70 |
+
# Color Maps and Standards
|
71 |
+
# -----------------------------
|
72 |
color_map = {
|
73 |
'C5 Super Typhoon': 'rgb(255, 0, 0)',
|
74 |
'C4 Very Strong Typhoon': 'rgb(255, 165, 0)',
|
|
|
94 |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'}
|
95 |
}
|
96 |
|
97 |
+
# -----------------------------
|
98 |
+
# Season and Regions
|
99 |
+
# -----------------------------
|
100 |
season_months = {
|
101 |
'all': list(range(1, 13)),
|
102 |
'summer': [6, 7, 8],
|
|
|
111 |
"Philippines": {"lat_min": 5, "lat_max": 21, "lon_min": 115, "lon_max": 130}
|
112 |
}
|
113 |
|
114 |
+
# -----------------------------
|
115 |
+
# ONI and Typhoon Data Functions
|
116 |
+
# -----------------------------
|
117 |
def download_oni_file(url, filename):
|
118 |
response = requests.get(url)
|
119 |
response.raise_for_status()
|
|
|
123 |
|
124 |
def convert_oni_ascii_to_csv(input_file, output_file):
|
125 |
data = defaultdict(lambda: [''] * 12)
|
126 |
+
season_to_month = {'DJF':12, 'JFM':1, 'FMA':2, 'MAM':3, 'AMJ':4, 'MJJ':5,
|
127 |
+
'JJA':6, 'JAS':7, 'ASO':8, 'SON':9, 'OND':10, 'NDJ':11}
|
128 |
with open(input_file, 'r') as f:
|
129 |
lines = f.readlines()[1:]
|
130 |
for line in lines:
|
|
|
134 |
if season in season_to_month:
|
135 |
month = season_to_month[season]
|
136 |
if season == 'DJF':
|
137 |
+
year = str(int(year)-1)
|
138 |
data[year][month-1] = anom
|
139 |
with open(output_file, 'w', newline='') as f:
|
140 |
writer = csv.writer(f)
|
141 |
+
writer.writerow(['Year','Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'])
|
142 |
for year in sorted(data.keys()):
|
143 |
writer.writerow([year] + data[year])
|
144 |
|
|
|
163 |
|
164 |
def process_oni_data(oni_data):
|
165 |
oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
|
166 |
+
month_map = {'Jan':'01','Feb':'02','Mar':'03','Apr':'04','May':'05','Jun':'06',
|
167 |
+
'Jul':'07','Aug':'08','Sep':'09','Oct':'10','Nov':'11','Dec':'12'}
|
168 |
oni_long['Month'] = oni_long['Month'].map(month_map)
|
169 |
+
oni_long['Date'] = pd.to_datetime(oni_long['Year'].astype(str)+'-'+oni_long['Month']+'-01')
|
170 |
oni_long['ONI'] = pd.to_numeric(oni_long['ONI'], errors='coerce')
|
171 |
return oni_long
|
172 |
|
|
|
175 |
typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
|
176 |
typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce')
|
177 |
typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce')
|
178 |
+
logging.info(f"Unique basins in typhoon_data: {typhoon_data['SID'].str[:2].unique()}")
|
179 |
typhoon_max = typhoon_data.groupby('SID').agg({
|
180 |
+
'USA_WIND':'max','USA_PRES':'min','ISO_TIME':'first','SEASON':'first','NAME':'first',
|
181 |
+
'LAT':'first','LON':'first'
|
182 |
}).reset_index()
|
183 |
typhoon_max['Month'] = typhoon_max['ISO_TIME'].dt.strftime('%m')
|
184 |
typhoon_max['Year'] = typhoon_max['ISO_TIME'].dt.year
|
|
|
186 |
return typhoon_max
|
187 |
|
188 |
def merge_data(oni_long, typhoon_max):
|
189 |
+
return pd.merge(typhoon_max, oni_long, on=['Year','Month'])
|
190 |
|
191 |
def categorize_typhoon(wind_speed):
|
192 |
if wind_speed >= 137:
|
|
|
214 |
else:
|
215 |
return 'Neutral'
|
216 |
|
217 |
+
# ------------- Regression Functions -------------
|
218 |
def perform_wind_regression(start_year, start_month, end_year, end_month):
|
219 |
start_date = datetime(start_year, start_month, 1)
|
220 |
end_date = datetime(end_year, end_month, 28)
|
221 |
+
data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].dropna(subset=['USA_WIND','ONI'])
|
222 |
+
data['severe_typhoon'] = (data['USA_WIND']>=64).astype(int)
|
223 |
X = sm.add_constant(data['ONI'])
|
224 |
y = data['severe_typhoon']
|
225 |
model = sm.Logit(y, X).fit(disp=0)
|
|
|
231 |
def perform_pressure_regression(start_year, start_month, end_year, end_month):
|
232 |
start_date = datetime(start_year, start_month, 1)
|
233 |
end_date = datetime(end_year, end_month, 28)
|
234 |
+
data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].dropna(subset=['USA_PRES','ONI'])
|
235 |
+
data['intense_typhoon'] = (data['USA_PRES']<=950).astype(int)
|
236 |
X = sm.add_constant(data['ONI'])
|
237 |
y = data['intense_typhoon']
|
238 |
model = sm.Logit(y, X).fit(disp=0)
|
|
|
244 |
def perform_longitude_regression(start_year, start_month, end_year, end_month):
|
245 |
start_date = datetime(start_year, start_month, 1)
|
246 |
end_date = datetime(end_year, end_month, 28)
|
247 |
+
data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].dropna(subset=['LON','ONI'])
|
248 |
+
data['western_typhoon'] = (data['LON']<=140).astype(int)
|
249 |
X = sm.add_constant(data['ONI'])
|
250 |
y = data['western_typhoon']
|
251 |
+
model = sm.OLS(y, sm.add_constant(X)).fit()
|
252 |
beta_1 = model.params['ONI']
|
253 |
exp_beta_1 = np.exp(beta_1)
|
254 |
p_value = model.pvalues['ONI']
|
255 |
return f"Longitude Regression: β1={beta_1:.4f}, Odds Ratio={exp_beta_1:.4f}, P-value={p_value:.4f}"
|
256 |
|
257 |
+
# ------------- IBTrACS Data Loading -------------
|
258 |
def load_ibtracs_data():
|
259 |
ibtracs_data = {}
|
260 |
for basin, filename in BASIN_FILES.items():
|
261 |
local_path = os.path.join(DATA_PATH, filename)
|
262 |
if not os.path.exists(local_path):
|
263 |
+
logging.info(f"Downloading {basin} basin file...")
|
264 |
+
response = requests.get(IBTRACS_BASE_URI+filename)
|
265 |
response.raise_for_status()
|
266 |
with open(local_path, 'wb') as f:
|
267 |
f.write(response.content)
|
268 |
+
logging.info(f"Downloaded {basin} basin file.")
|
269 |
try:
|
270 |
+
logging.info(f"--> Starting to read in IBTrACS data for basin {basin}")
|
271 |
ds = tracks.TrackDataset(source='ibtracs', ibtracs_url=local_path)
|
272 |
+
logging.info(f"--> Completed reading in IBTrACS data for basin {basin}")
|
273 |
ibtracs_data[basin] = ds
|
274 |
except ValueError as e:
|
275 |
+
logging.warning(f"Skipping basin {basin} due to error: {e}")
|
276 |
ibtracs_data[basin] = None
|
277 |
return ibtracs_data
|
278 |
|
279 |
ibtracs = load_ibtracs_data()
|
280 |
|
281 |
+
# ------------- Load & Process Data -------------
|
282 |
update_oni_data()
|
283 |
oni_data, typhoon_data = load_data(ONI_DATA_PATH, TYPHOON_DATA_PATH)
|
284 |
oni_long = process_oni_data(oni_data)
|
285 |
typhoon_max = process_typhoon_data(typhoon_data)
|
286 |
merged_data = merge_data(oni_long, typhoon_max)
|
287 |
|
288 |
+
# ------------- Visualization Functions -------------
|
289 |
def generate_typhoon_tracks(filtered_data, typhoon_search):
|
290 |
fig = go.Figure()
|
291 |
for sid in filtered_data['SID'].unique():
|
292 |
storm_data = filtered_data[filtered_data['SID'] == sid]
|
293 |
phase = storm_data['ENSO_Phase'].iloc[0]
|
294 |
+
color = {'El Nino':'red','La Nina':'blue','Neutral':'green'}.get(phase, 'black')
|
295 |
fig.add_trace(go.Scattergeo(
|
296 |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines',
|
297 |
name=storm_data['NAME'].iloc[0], line=dict(width=2, color=color)
|
|
|
312 |
return fig
|
313 |
|
314 |
def generate_wind_oni_scatter(filtered_data, typhoon_search):
|
315 |
+
fig = px.scatter(filtered_data, x='ONI', y='USA_WIND', color='Category',
|
316 |
+
hover_data=['NAME','Year','Category'],
|
317 |
+
title='Wind Speed vs ONI',
|
318 |
+
labels={'ONI':'ONI Value','USA_WIND':'Max Wind Speed (knots)'},
|
319 |
color_discrete_map=color_map)
|
320 |
if typhoon_search:
|
321 |
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
322 |
if mask.any():
|
323 |
fig.add_trace(go.Scatter(
|
324 |
+
x=filtered_data.loc[mask,'ONI'], y=filtered_data.loc[mask,'USA_WIND'],
|
325 |
mode='markers', marker=dict(size=10, color='red', symbol='star'),
|
326 |
name=f'Matched: {typhoon_search}',
|
327 |
+
text=filtered_data.loc[mask,'NAME']+' ('+filtered_data.loc[mask,'Year'].astype(str)+')'
|
328 |
))
|
329 |
return fig
|
330 |
|
331 |
def generate_pressure_oni_scatter(filtered_data, typhoon_search):
|
332 |
+
fig = px.scatter(filtered_data, x='ONI', y='USA_PRES', color='Category',
|
333 |
+
hover_data=['NAME','Year','Category'],
|
334 |
+
title='Pressure vs ONI',
|
335 |
+
labels={'ONI':'ONI Value','USA_PRES':'Min Pressure (hPa)'},
|
336 |
color_discrete_map=color_map)
|
337 |
if typhoon_search:
|
338 |
mask = filtered_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
339 |
if mask.any():
|
340 |
fig.add_trace(go.Scatter(
|
341 |
+
x=filtered_data.loc[mask,'ONI'], y=filtered_data.loc[mask,'USA_PRES'],
|
342 |
mode='markers', marker=dict(size=10, color='red', symbol='star'),
|
343 |
name=f'Matched: {typhoon_search}',
|
344 |
+
text=filtered_data.loc[mask,'NAME']+' ('+filtered_data.loc[mask,'Year'].astype(str)+')'
|
345 |
))
|
346 |
return fig
|
347 |
|
|
|
349 |
fig = px.scatter(filtered_data, x='LON', y='ONI', hover_data=['NAME'],
|
350 |
title='Typhoon Generation Longitude vs ONI (All Years)')
|
351 |
if len(filtered_data) > 1:
|
352 |
+
X = np.array(filtered_data['LON']).reshape(-1,1)
|
353 |
y = filtered_data['ONI']
|
354 |
model = sm.OLS(y, sm.add_constant(X)).fit()
|
355 |
y_pred = model.predict(sm.add_constant(X))
|
|
|
363 |
def generate_main_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
364 |
start_date = datetime(start_year, start_month, 1)
|
365 |
end_date = datetime(end_year, end_month, 28)
|
366 |
+
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
|
367 |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
368 |
if enso_phase != 'all':
|
369 |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
|
|
376 |
def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
377 |
start_date = datetime(start_year, start_month, 1)
|
378 |
end_date = datetime(end_year, end_month, 28)
|
379 |
+
filtered_data = merged_data[(merged_data['ISO_TIME']>=start_date) & (merged_data['ISO_TIME']<=end_date)].copy()
|
380 |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
381 |
if enso_phase != 'all':
|
382 |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
|
|
384 |
count = len(unique_storms)
|
385 |
fig = go.Figure()
|
386 |
for sid in unique_storms:
|
387 |
+
storm_data = typhoon_data[typhoon_data['SID']==sid]
|
388 |
name = storm_data['NAME'].iloc[0] if pd.notnull(storm_data['NAME'].iloc[0]) else "Unnamed"
|
389 |
+
storm_oni = filtered_data[filtered_data['SID']==sid]['ONI'].iloc[0]
|
390 |
+
color = 'red' if storm_oni>=0.5 else ('blue' if storm_oni<=-0.5 else 'green')
|
391 |
fig.add_trace(go.Scattergeo(
|
392 |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines',
|
393 |
name=f"{name} ({storm_data['SEASON'].iloc[0]})",
|
394 |
+
line=dict(width=1.5, color=color), hoverinfo="name"
|
|
|
395 |
))
|
396 |
if typhoon_search:
|
397 |
search_mask = typhoon_data['NAME'].str.contains(typhoon_search, case=False, na=False)
|
398 |
if search_mask.any():
|
399 |
for sid in typhoon_data[search_mask]['SID'].unique():
|
400 |
+
storm_data = typhoon_data[typhoon_data['SID']==sid]
|
401 |
fig.add_trace(go.Scattergeo(
|
402 |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines+markers',
|
403 |
name=f"MATCHED: {storm_data['NAME'].iloc[0]} ({storm_data['SEASON'].iloc[0]})",
|
404 |
line=dict(width=3, color='yellow'),
|
405 |
+
marker=dict(size=5), hoverinfo="name"
|
|
|
406 |
))
|
407 |
fig.update_layout(
|
408 |
title=f"Typhoon Tracks ({start_year}-{start_month} to {end_year}-{end_month})",
|
|
|
410 |
projection_type='natural earth',
|
411 |
showland=True,
|
412 |
showcoastlines=True,
|
413 |
+
landcolor='rgb(243,243,243)',
|
414 |
+
countrycolor='rgb(204,204,204)',
|
415 |
+
coastlinecolor='rgb(204,204,204)',
|
416 |
center=dict(lon=140, lat=20),
|
417 |
projection_scale=3
|
418 |
),
|
|
|
443 |
regression = perform_longitude_regression(start_year, start_month, end_year, end_month)
|
444 |
return results[3], results[4], regression
|
445 |
|
446 |
+
def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
|
447 |
+
if standard=='taiwan':
|
448 |
wind_speed_ms = wind_speed * 0.514444
|
449 |
if wind_speed_ms >= 51.0:
|
450 |
return 'Strong Typhoon', taiwan_standard['Strong Typhoon']['hex']
|
|
|
468 |
return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex']
|
469 |
return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex']
|
470 |
|
471 |
+
# ------------- Updated TSNE Cluster Function -------------
|
472 |
+
def update_route_clusters(start_year, start_month, end_year, end_month, enso_value, season):
|
473 |
+
try:
|
474 |
+
# Use raw typhoon data (with multiple observations per storm) merged with ONI info.
|
475 |
+
raw_data = typhoon_data.copy()
|
476 |
+
raw_data['Year'] = raw_data['ISO_TIME'].dt.year
|
477 |
+
raw_data['Month'] = raw_data['ISO_TIME'].dt.strftime('%m')
|
478 |
+
merged_raw = pd.merge(raw_data, process_oni_data(oni_data), on=['Year','Month'], how='left')
|
479 |
+
|
480 |
+
# Filter by date
|
481 |
+
start_date = datetime(start_year, start_month, 1)
|
482 |
+
end_date = datetime(end_year, end_month, 28)
|
483 |
+
merged_raw = merged_raw[(merged_raw['ISO_TIME'] >= start_date) & (merged_raw['ISO_TIME'] <= end_date)]
|
484 |
+
logging.info(f"Total points after date filtering: {merged_raw.shape[0]}")
|
485 |
+
|
486 |
+
# Filter by ENSO phase if not 'all'
|
487 |
+
merged_raw['ENSO_Phase'] = merged_raw['ONI'].apply(classify_enso_phases)
|
488 |
+
if enso_value != 'all':
|
489 |
+
merged_raw = merged_raw[merged_raw['ENSO_Phase'] == enso_value.capitalize()]
|
490 |
+
logging.info(f"Total points after ENSO filtering: {merged_raw.shape[0]}")
|
491 |
+
|
492 |
+
# Use regional filtering for Western Pacific (adjust boundaries as needed)
|
493 |
+
wp_data = merged_raw[(merged_raw['LON'] >= 100) & (merged_raw['LON'] <= 180) &
|
494 |
+
(merged_raw['LAT'] >= 0) & (merged_raw['LAT'] <= 40)]
|
495 |
+
logging.info(f"Total points after WP regional filtering: {wp_data.shape[0]}")
|
496 |
+
if wp_data.empty:
|
497 |
+
logging.info("WP regional filter returned no data; using all filtered data.")
|
498 |
+
wp_data = merged_raw
|
499 |
+
|
500 |
+
# Group by SID so each storm route has multiple observations
|
501 |
+
all_storms_data = []
|
502 |
+
for sid, group in wp_data.groupby('SID'):
|
503 |
+
group = group.sort_values('ISO_TIME')
|
504 |
+
times = pd.to_datetime(group['ISO_TIME']).values
|
505 |
+
lats = group['LAT'].astype(float).values
|
506 |
+
lons = group['LON'].astype(float).values
|
507 |
+
if len(lons) < 2:
|
508 |
+
continue
|
509 |
+
all_storms_data.append((sid, lons, lats, times))
|
510 |
+
logging.info(f"Storms available for TSNE after grouping: {len(all_storms_data)}")
|
511 |
+
if not all_storms_data:
|
512 |
+
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms for clustering."
|
513 |
+
|
514 |
+
# Interpolate each storm's route to a common length
|
515 |
+
max_length = max(len(item[1]) for item in all_storms_data)
|
516 |
+
route_vectors = []
|
517 |
+
storm_ids = []
|
518 |
+
for sid, lons, lats, times in all_storms_data:
|
519 |
+
t = np.linspace(0, 1, len(lons))
|
520 |
+
t_new = np.linspace(0, 1, max_length)
|
521 |
+
try:
|
522 |
+
lon_interp = interp1d(t, lons, kind='linear', fill_value='extrapolate')(t_new)
|
523 |
+
lat_interp = interp1d(t, lats, kind='linear', fill_value='extrapolate')(t_new)
|
524 |
+
except Exception as ex:
|
525 |
+
logging.error(f"Interpolation error for storm {sid}: {ex}")
|
526 |
+
continue
|
527 |
+
route_vector = np.column_stack((lon_interp, lat_interp)).flatten()
|
528 |
+
if np.isnan(route_vector).any():
|
529 |
+
continue
|
530 |
+
route_vectors.append(route_vector)
|
531 |
+
storm_ids.append(sid)
|
532 |
+
logging.info(f"Storms with valid route vectors: {len(route_vectors)}")
|
533 |
+
if len(route_vectors) == 0:
|
534 |
+
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms after interpolation."
|
535 |
+
|
536 |
+
route_vectors = np.array(route_vectors)
|
537 |
+
tsne = TSNE(n_components=2, random_state=42, verbose=1)
|
538 |
+
tsne_results = tsne.fit_transform(route_vectors)
|
539 |
+
|
540 |
+
dbscan = DBSCAN(eps=5, min_samples=3)
|
541 |
+
labels = dbscan.fit_predict(tsne_results)
|
542 |
+
unique_labels = sorted(set(labels) - {-1})
|
543 |
+
|
544 |
+
fig_tsne = go.Figure()
|
545 |
+
colors = px.colors.qualitative.Safe
|
546 |
+
for i, label in enumerate(unique_labels):
|
547 |
+
indices = np.where(labels == label)[0]
|
548 |
+
fig_tsne.add_trace(go.Scatter(
|
549 |
+
x=tsne_results[indices, 0],
|
550 |
+
y=tsne_results[indices, 1],
|
551 |
+
mode='markers',
|
552 |
+
marker=dict(color=colors[i % len(colors)]),
|
553 |
+
name=f"Cluster {label}"
|
554 |
+
))
|
555 |
+
noise_indices = np.where(labels == -1)[0]
|
556 |
+
if len(noise_indices) > 0:
|
557 |
+
fig_tsne.add_trace(go.Scatter(
|
558 |
+
x=tsne_results[noise_indices, 0],
|
559 |
+
y=tsne_results[noise_indices, 1],
|
560 |
+
mode='markers',
|
561 |
+
marker=dict(color='grey'),
|
562 |
+
name='Noise'
|
563 |
+
))
|
564 |
+
fig_tsne.update_layout(
|
565 |
+
title="t-SNE of Storm Routes",
|
566 |
+
xaxis_title="t-SNE Dim 1",
|
567 |
+
yaxis_title="t-SNE Dim 2"
|
568 |
+
)
|
569 |
+
|
570 |
+
# Placeholder figures for routes and stats
|
571 |
+
fig_routes = go.Figure()
|
572 |
+
fig_stats = make_subplots(rows=2, cols=1, shared_xaxes=True,
|
573 |
+
subplot_titles=("Average Wind Speed", "Average Pressure"))
|
574 |
+
info = "TSNE clustering complete."
|
575 |
+
return fig_tsne, fig_routes, fig_stats, info
|
576 |
+
except Exception as e:
|
577 |
+
logging.error(f"Error in TSNE clustering: {e}")
|
578 |
+
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), f"Error in TSNE clustering: {e}"
|
579 |
+
|
580 |
+
# ------------- Animation Functions Using Processed CSV & Stock Map -------------
|
581 |
def generate_track_video_from_csv(year, storm_id, standard):
|
582 |
storm_df = typhoon_data[typhoon_data['SID'] == storm_id].copy()
|
583 |
if storm_df.empty:
|
584 |
+
logging.error(f"No data found for storm: {storm_id}")
|
585 |
return None
|
586 |
storm_df = storm_df.sort_values('ISO_TIME')
|
587 |
lats = storm_df['LAT'].astype(float).values
|
|
|
596 |
|
597 |
min_lat, max_lat = np.min(lats), np.max(lats)
|
598 |
min_lon, max_lon = np.min(lons), np.max(lons)
|
599 |
+
lat_padding = max((max_lat - min_lat)*0.3, 5)
|
600 |
+
lon_padding = max((max_lon - min_lon)*0.3, 5)
|
601 |
|
602 |
+
fig = plt.figure(figsize=(12,6), dpi=100)
|
603 |
+
ax = plt.axes([0.05, 0.05, 0.60, 0.85],
|
604 |
+
projection=ccrs.PlateCarree(central_longitude=180))
|
605 |
+
ax.stock_img()
|
606 |
ax.set_extent([min_lon - lon_padding, max_lon + lon_padding, min_lat - lat_padding, max_lat + lat_padding],
|
607 |
crs=ccrs.PlateCarree())
|
608 |
+
ax.coastlines(resolution='50m', color='black', linewidth=1)
|
609 |
+
gl = ax.gridlines(draw_labels=True, color='gray', alpha=0.4, linestyle='--')
|
610 |
+
gl.top_labels = gl.right_labels = False
|
611 |
+
ax.set_title(f"{year} {storm_name} - {season}", fontsize=14)
|
612 |
|
613 |
+
line, = ax.plot([], [], transform=ccrs.PlateCarree(), color='blue', linewidth=2)
|
614 |
+
point, = ax.plot([], [], 'o', markersize=8, transform=ccrs.PlateCarree())
|
615 |
+
date_text = ax.text(0.02, 0.02, '', transform=ax.transAxes, fontsize=10,
|
616 |
+
bbox=dict(facecolor='white', alpha=0.8))
|
617 |
+
storm_info_text = fig.text(0.70, 0.60, '', fontsize=10,
|
618 |
+
bbox=dict(facecolor='white', alpha=0.8, boxstyle='round,pad=0.5'))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
619 |
|
620 |
+
from matplotlib.lines import Line2D
|
621 |
+
standard_dict = atlantic_standard if standard=='atlantic' else taiwan_standard
|
622 |
+
legend_elements = [Line2D([0],[0], marker='o', color='w', label=cat,
|
623 |
+
markerfacecolor=details['hex'], markersize=8)
|
624 |
+
for cat, details in standard_dict.items()]
|
625 |
+
ax.legend(handles=legend_elements, title="Storm Categories",
|
626 |
+
loc='upper right', fontsize=9)
|
627 |
|
628 |
def init():
|
629 |
line.set_data([], [])
|
630 |
point.set_data([], [])
|
631 |
date_text.set_text('')
|
632 |
+
storm_info_text.set_text('')
|
633 |
+
return line, point, date_text, storm_info_text
|
634 |
|
635 |
def update(frame):
|
636 |
line.set_data(lons[:frame+1], lats[:frame+1])
|
|
|
640 |
point.set_color(color)
|
641 |
dt_str = pd.to_datetime(times[frame]).strftime('%Y-%m-%d %H:%M')
|
642 |
date_text.set_text(dt_str)
|
643 |
+
info_str = (f"Name: {storm_name}\n"
|
644 |
+
f"Date: {dt_str}\n"
|
645 |
+
f"Wind: {wind_speed:.1f} kt\n"
|
646 |
+
f"Category: {category}")
|
647 |
+
storm_info_text.set_text(info_str)
|
648 |
+
return line, point, date_text, storm_info_text
|
649 |
|
650 |
ani = animation.FuncAnimation(fig, update, init_func=init, frames=len(times),
|
651 |
interval=200, blit=True, repeat=True)
|
|
|
661 |
storm_id = typhoon.split('(')[-1].strip(')')
|
662 |
return generate_track_video_from_csv(year, storm_id, standard)
|
663 |
|
664 |
+
# ------------- Typhoon Options Update Functions -------------
|
665 |
basin_to_prefix = {
|
666 |
"All Basins": "all",
|
667 |
"NA - North Atlantic": "NA",
|
|
|
680 |
continue
|
681 |
summaries.append(season_data.summary())
|
682 |
if len(summaries) == 0:
|
683 |
+
logging.error("No storms found for given year and basin.")
|
684 |
return gr.update(choices=[], value=None)
|
685 |
combined_summary = pd.concat(summaries, ignore_index=True)
|
686 |
else:
|
687 |
prefix = basin_to_prefix.get(basin)
|
688 |
ds = ibtracs.get(prefix)
|
689 |
if ds is None:
|
690 |
+
logging.error(f"Dataset not found for basin {basin}")
|
691 |
return gr.update(choices=[], value=None)
|
692 |
season_data = ds.get_season(int(year))
|
693 |
if season_data.summary().empty:
|
694 |
+
logging.error("No storms found for given year and basin.")
|
695 |
return gr.update(choices=[], value=None)
|
696 |
combined_summary = season_data.summary()
|
697 |
options = []
|
|
|
704 |
continue
|
705 |
return gr.update(choices=options, value=options[0] if options else None)
|
706 |
except Exception as e:
|
707 |
+
logging.error(f"Error in update_typhoon_options: {e}")
|
708 |
return gr.update(choices=[], value=None)
|
709 |
|
710 |
def update_typhoon_options_anim(year, basin):
|
711 |
try:
|
|
|
712 |
data = typhoon_data.copy()
|
713 |
+
data['Year'] = data['ISO_TIME'].dt.year
|
714 |
season_data = data[data['Year'] == int(year)]
|
715 |
if season_data.empty:
|
716 |
+
logging.error(f"No storms found for year {year} in animation update.")
|
717 |
return gr.update(choices=[], value=None)
|
718 |
summary = season_data.groupby('SID').first().reset_index()
|
719 |
options = []
|
|
|
722 |
options.append(f"{name} ({row['SID']})")
|
723 |
return gr.update(choices=options, value=options[0] if options else None)
|
724 |
except Exception as e:
|
725 |
+
logging.error(f"Error in update_typhoon_options_anim: {e}")
|
726 |
return gr.update(choices=[], value=None)
|
727 |
|
728 |
+
# ------------- Gradio Interface -------------
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
729 |
with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
730 |
gr.Markdown("# Typhoon Analysis Dashboard")
|
731 |
|
732 |
with gr.Tab("Overview"):
|
733 |
gr.Markdown("""
|
734 |
## Welcome to the Typhoon Analysis Dashboard
|
735 |
+
|
736 |
This dashboard allows you to analyze typhoon data in relation to ENSO phases.
|
737 |
+
|
738 |
### Features:
|
739 |
+
- **Track Visualization**: View typhoon tracks by time period and ENSO phase.
|
740 |
+
- **Wind Analysis**: Examine wind speed vs ONI relationships.
|
741 |
+
- **Pressure Analysis**: Analyze pressure vs ONI relationships.
|
742 |
+
- **Longitude Analysis**: Study typhoon generation longitude vs ONI.
|
743 |
+
- **Path Animation**: View animated storm tracks on a free stock world map (centered at 180°) with a dynamic sidebar and persistent legend.
|
744 |
+
- **TSNE Cluster**: Perform t-SNE clustering on WP storm routes using raw merged typhoon+ONI data with detailed error management.
|
|
|
|
|
745 |
""")
|
746 |
|
747 |
with gr.Tab("Track Visualization"):
|
748 |
with gr.Row():
|
749 |
start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
750 |
+
start_month = gr.Dropdown(label="Start Month", choices=list(range(1,13)), value=1)
|
751 |
end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
|
752 |
+
end_month = gr.Dropdown(label="End Month", choices=list(range(1,13)), value=6)
|
753 |
+
enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all','El Nino','La Nina','Neutral'], value='all')
|
754 |
typhoon_search = gr.Textbox(label="Typhoon Search")
|
755 |
analyze_btn = gr.Button("Generate Tracks")
|
756 |
tracks_plot = gr.Plot(label="Typhoon Tracks", elem_id="tracks_plot")
|
757 |
typhoon_count = gr.Textbox(label="Number of Typhoons Displayed")
|
758 |
+
analyze_btn.click(fn=get_full_tracks,
|
759 |
+
inputs=[start_year, start_month, end_year, end_month, enso_phase, typhoon_search],
|
760 |
+
outputs=[tracks_plot, typhoon_count])
|
761 |
|
762 |
with gr.Tab("Wind Analysis"):
|
763 |
with gr.Row():
|
764 |
wind_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
765 |
+
wind_start_month = gr.Dropdown(label="Start Month", choices=list(range(1,13)), value=1)
|
766 |
wind_end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
|
767 |
+
wind_end_month = gr.Dropdown(label="End Month", choices=list(range(1,13)), value=6)
|
768 |
+
wind_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all','El Nino','La Nina','Neutral'], value='all')
|
769 |
wind_typhoon_search = gr.Textbox(label="Typhoon Search")
|
770 |
wind_analyze_btn = gr.Button("Generate Wind Analysis")
|
771 |
wind_scatter = gr.Plot(label="Wind Speed vs ONI")
|
772 |
wind_regression_results = gr.Textbox(label="Wind Regression Results")
|
773 |
+
wind_analyze_btn.click(fn=get_wind_analysis,
|
774 |
+
inputs=[wind_start_year, wind_start_month, wind_end_year, wind_end_month, wind_enso_phase, wind_typhoon_search],
|
775 |
+
outputs=[wind_scatter, wind_regression_results])
|
776 |
|
777 |
with gr.Tab("Pressure Analysis"):
|
778 |
with gr.Row():
|
779 |
pressure_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
780 |
+
pressure_start_month = gr.Dropdown(label="Start Month", choices=list(range(1,13)), value=1)
|
781 |
pressure_end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
|
782 |
+
pressure_end_month = gr.Dropdown(label="End Month", choices=list(range(1,13)), value=6)
|
783 |
+
pressure_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all','El Nino','La Nina','Neutral'], value='all')
|
784 |
pressure_typhoon_search = gr.Textbox(label="Typhoon Search")
|
785 |
pressure_analyze_btn = gr.Button("Generate Pressure Analysis")
|
786 |
pressure_scatter = gr.Plot(label="Pressure vs ONI")
|
787 |
pressure_regression_results = gr.Textbox(label="Pressure Regression Results")
|
788 |
+
pressure_analyze_btn.click(fn=get_pressure_analysis,
|
789 |
+
inputs=[pressure_start_year, pressure_start_month, pressure_end_year, pressure_end_month, pressure_enso_phase, pressure_typhoon_search],
|
790 |
+
outputs=[pressure_scatter, pressure_regression_results])
|
791 |
|
792 |
with gr.Tab("Longitude Analysis"):
|
793 |
with gr.Row():
|
794 |
lon_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
795 |
+
lon_start_month = gr.Dropdown(label="Start Month", choices=list(range(1,13)), value=1)
|
796 |
+
lon_end_year = gr.Number(label="End Year", value=2000, minimum=1900, maximum=2024, step=1)
|
797 |
+
lon_end_month = gr.Dropdown(label="End Month", choices=list(range(1,13)), value=6)
|
798 |
+
lon_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all','El Nino','La Nina','Neutral'], value='all')
|
799 |
lon_typhoon_search = gr.Textbox(label="Typhoon Search (Optional)")
|
800 |
lon_analyze_btn = gr.Button("Generate Longitude Analysis")
|
801 |
regression_plot = gr.Plot(label="Longitude vs ONI")
|
802 |
slopes_text = gr.Textbox(label="Regression Slopes")
|
803 |
lon_regression_results = gr.Textbox(label="Longitude Regression Results")
|
804 |
+
lon_analyze_btn.click(fn=get_longitude_analysis,
|
805 |
+
inputs=[lon_start_year, lon_start_month, lon_end_year, lon_end_month, lon_enso_phase, lon_typhoon_search],
|
806 |
+
outputs=[regression_plot, slopes_text, lon_regression_results])
|
807 |
|
808 |
with gr.Tab("Tropical Cyclone Path Animation"):
|
809 |
with gr.Row():
|
810 |
+
year_dropdown = gr.Dropdown(label="Year", choices=[str(y) for y in range(1950,2025)], value="2000")
|
811 |
+
basin_dropdown = gr.Dropdown(label="Basin", choices=["NA - North Atlantic","EP - Eastern North Pacific","WP - Western North Pacific","All Basins"], value="NA - North Atlantic")
|
812 |
with gr.Row():
|
813 |
typhoon_dropdown = gr.Dropdown(label="Tropical Cyclone")
|
814 |
+
standard_dropdown = gr.Dropdown(label="Classification Standard", choices=['atlantic','taiwan'], value='atlantic')
|
815 |
animate_btn = gr.Button("Generate Animation")
|
816 |
path_video = gr.Video(label="Tropical Cyclone Path Animation", format="mp4", interactive=False, elem_id="path_video")
|
817 |
animation_info = gr.Markdown("""
|
818 |
### Animation Instructions
|
819 |
+
1. Select a year and basin (data is from your processed CSV).
|
820 |
2. Choose a tropical cyclone from the populated list.
|
821 |
3. Select a classification standard (Atlantic or Taiwan).
|
822 |
4. Click "Generate Animation".
|
823 |
+
5. The animation displays the storm track on a free stock world map (centered at 180°) with a dynamic sidebar and a persistent legend.
|
824 |
""")
|
825 |
year_dropdown.change(fn=update_typhoon_options_anim, inputs=[year_dropdown, basin_dropdown], outputs=typhoon_dropdown)
|
826 |
basin_dropdown.change(fn=update_typhoon_options_anim, inputs=[year_dropdown, basin_dropdown], outputs=typhoon_dropdown)
|
827 |
+
animate_btn.click(fn=simplified_track_video,
|
828 |
+
inputs=[year_dropdown, basin_dropdown, typhoon_dropdown, standard_dropdown],
|
829 |
+
outputs=path_video)
|
830 |
|
831 |
with gr.Tab("TSNE Cluster"):
|
832 |
with gr.Row():
|
833 |
tsne_start_year = gr.Number(label="Start Year", value=2000, minimum=1900, maximum=2024, step=1)
|
834 |
+
tsne_start_month = gr.Dropdown(label="Start Month", choices=list(range(1,13)), value=1)
|
835 |
tsne_end_year = gr.Number(label="End Year", value=2024, minimum=1900, maximum=2024, step=1)
|
836 |
+
tsne_end_month = gr.Dropdown(label="End Month", choices=list(range(1,13)), value=12)
|
837 |
+
tsne_enso_phase = gr.Dropdown(label="ENSO Phase", choices=['all','El Nino','La Nina','Neutral'], value='all')
|
838 |
+
tsne_season = gr.Dropdown(label="Season", choices=['all','summer','winter'], value='all')
|
839 |
tsne_analyze_btn = gr.Button("Analyze")
|
840 |
tsne_plot = gr.Plot(label="t-SNE Clusters")
|
841 |
routes_plot = gr.Plot(label="Typhoon Routes with Mean Routes")
|
842 |
stats_plot = gr.Plot(label="Cluster Statistics")
|
843 |
cluster_info = gr.Textbox(label="Cluster Information", lines=10)
|
844 |
+
tsne_analyze_btn.click(fn=update_route_clusters,
|
845 |
+
inputs=[tsne_start_year, tsne_start_month, tsne_end_year, tsne_end_month, tsne_enso_phase, tsne_season],
|
846 |
+
outputs=[tsne_plot, routes_plot, stats_plot, cluster_info])
|
847 |
|
848 |
demo.launch(share=True)
|