Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,7 +9,6 @@ import cartopy.feature as cfeature
|
|
9 |
import plotly.graph_objects as go
|
10 |
import plotly.express as px
|
11 |
from plotly.subplots import make_subplots
|
12 |
-
import tropycal.tracks as tracks
|
13 |
import pickle
|
14 |
import requests
|
15 |
import os
|
@@ -20,33 +19,35 @@ import shutil
|
|
20 |
import tempfile
|
21 |
import csv
|
22 |
from collections import defaultdict
|
23 |
-
import filecmp
|
24 |
from sklearn.manifold import TSNE
|
25 |
from sklearn.cluster import DBSCAN
|
26 |
from scipy.interpolate import interp1d
|
27 |
|
28 |
-
#
|
|
|
|
|
|
|
29 |
parser = argparse.ArgumentParser(description='Typhoon Analysis Dashboard')
|
30 |
parser.add_argument('--data_path', type=str, default=os.getcwd(), help='Path to the data directory')
|
31 |
args = parser.parse_args()
|
32 |
DATA_PATH = args.data_path
|
33 |
|
|
|
34 |
ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
|
35 |
TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
|
36 |
|
37 |
-
#
|
38 |
BASIN_FILES = {
|
39 |
'EP': 'ibtracs.EP.list.v04r01.csv',
|
40 |
'NA': 'ibtracs.NA.list.v04r01.csv',
|
41 |
'WP': 'ibtracs.WP.list.v04r01.csv'
|
42 |
}
|
43 |
IBTRACS_BASE_URL = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/'
|
44 |
-
LOCAL_MERGED_PATH = os.path.join(DATA_PATH, 'ibtracs.merged.v04r01.csv')
|
45 |
|
46 |
CACHE_FILE = 'ibtracs_cache.pkl'
|
47 |
CACHE_EXPIRY_DAYS = 0 # Force refresh for testing
|
48 |
|
49 |
-
# Color
|
50 |
color_map = {
|
51 |
'C5 Super Typhoon': 'rgb(255, 0, 0)',
|
52 |
'C4 Very Strong Typhoon': 'rgb(255, 165, 0)',
|
@@ -56,8 +57,6 @@ color_map = {
|
|
56 |
'Tropical Storm': 'rgb(0, 0, 255)',
|
57 |
'Tropical Depression': 'rgb(128, 128, 128)'
|
58 |
}
|
59 |
-
|
60 |
-
# Classification standards with distinct colors for Matplotlib
|
61 |
atlantic_standard = {
|
62 |
'C5 Super Typhoon': {'wind_speed': 137, 'color': 'Red', 'hex': '#FF0000'},
|
63 |
'C4 Very Strong Typhoon': {'wind_speed': 113, 'color': 'Orange', 'hex': '#FFA500'},
|
@@ -67,7 +66,6 @@ atlantic_standard = {
|
|
67 |
'Tropical Storm': {'wind_speed': 34, 'color': 'Blue', 'hex': '#0000FF'},
|
68 |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'}
|
69 |
}
|
70 |
-
|
71 |
taiwan_standard = {
|
72 |
'Strong Typhoon': {'wind_speed': 51.0, 'color': 'Red', 'hex': '#FF0000'},
|
73 |
'Medium Typhoon': {'wind_speed': 33.7, 'color': 'Orange', 'hex': '#FFA500'},
|
@@ -75,14 +73,12 @@ taiwan_standard = {
|
|
75 |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'}
|
76 |
}
|
77 |
|
78 |
-
# Season
|
79 |
season_months = {
|
80 |
'all': list(range(1, 13)),
|
81 |
'summer': [6, 7, 8],
|
82 |
'winter': [12, 1, 2]
|
83 |
}
|
84 |
-
|
85 |
-
# Regions for duration calculations
|
86 |
regions = {
|
87 |
"Taiwan Land": {"lat_min": 21.8, "lat_max": 25.3, "lon_min": 119.5, "lon_max": 122.1},
|
88 |
"Taiwan Sea": {"lat_min": 19, "lat_max": 28, "lon_min": 117, "lon_max": 125},
|
@@ -92,7 +88,7 @@ regions = {
|
|
92 |
"Philippines": {"lat_min": 5, "lat_max": 21, "lon_min": 115, "lon_max": 130}
|
93 |
}
|
94 |
|
95 |
-
#
|
96 |
def download_oni_file(url, filename):
|
97 |
response = requests.get(url)
|
98 |
response.raise_for_status()
|
@@ -127,103 +123,12 @@ def update_oni_data():
|
|
127 |
input_file = os.path.join(DATA_PATH, "oni.ascii.txt")
|
128 |
output_file = ONI_DATA_PATH
|
129 |
if download_oni_file(url, temp_file):
|
130 |
-
if not os.path.exists(input_file) or not
|
131 |
os.replace(temp_file, input_file)
|
132 |
convert_oni_ascii_to_csv(input_file, output_file)
|
133 |
else:
|
134 |
os.remove(temp_file)
|
135 |
|
136 |
-
def load_ibtracs_data():
|
137 |
-
if os.path.exists(CACHE_FILE) and (datetime.now() - datetime.fromtimestamp(os.path.getmtime(CACHE_FILE))).days < CACHE_EXPIRY_DAYS:
|
138 |
-
with open(CACHE_FILE, 'rb') as f:
|
139 |
-
return pickle.load(f)
|
140 |
-
|
141 |
-
try:
|
142 |
-
# Check if merged file already exists
|
143 |
-
if os.path.exists(LOCAL_MERGED_PATH):
|
144 |
-
print("Loading merged basins file...")
|
145 |
-
ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=LOCAL_MERGED_PATH)
|
146 |
-
else:
|
147 |
-
print("Downloading and merging basin files...")
|
148 |
-
|
149 |
-
# Create temporary file for merged data
|
150 |
-
header = None
|
151 |
-
with open(LOCAL_MERGED_PATH, 'w', newline='') as merged_file:
|
152 |
-
writer = None
|
153 |
-
|
154 |
-
# Download and process each basin file
|
155 |
-
for basin, filename in BASIN_FILES.items():
|
156 |
-
basin_url = IBTRACS_BASE_URL + filename
|
157 |
-
local_path = os.path.join(DATA_PATH, filename)
|
158 |
-
|
159 |
-
# Download the basin file if it doesn't exist
|
160 |
-
if not os.path.exists(local_path):
|
161 |
-
print(f"Downloading {basin} basin file...")
|
162 |
-
response = requests.get(basin_url)
|
163 |
-
response.raise_for_status()
|
164 |
-
with open(local_path, 'wb') as f:
|
165 |
-
f.write(response.content)
|
166 |
-
print(f"Downloaded {basin} basin file.")
|
167 |
-
|
168 |
-
# Process and merge the basin file
|
169 |
-
with open(local_path, 'r', newline='') as basin_file:
|
170 |
-
reader = csv.reader(basin_file)
|
171 |
-
|
172 |
-
# Save header from the first file
|
173 |
-
if header is None:
|
174 |
-
header = next(reader)
|
175 |
-
writer = csv.writer(merged_file)
|
176 |
-
writer.writerow(header)
|
177 |
-
# Skip the second header line
|
178 |
-
next(reader)
|
179 |
-
else:
|
180 |
-
# Skip header lines in subsequent files
|
181 |
-
next(reader)
|
182 |
-
next(reader)
|
183 |
-
|
184 |
-
# Write all data rows
|
185 |
-
writer.writerows(reader)
|
186 |
-
|
187 |
-
print(f"Created merged basin file at {LOCAL_MERGED_PATH}")
|
188 |
-
ibtracs = tracks.TrackDataset(source='ibtracs', ibtracs_url=LOCAL_MERGED_PATH)
|
189 |
-
|
190 |
-
with open(CACHE_FILE, 'wb') as f:
|
191 |
-
pickle.dump(ibtracs, f)
|
192 |
-
|
193 |
-
return ibtracs
|
194 |
-
|
195 |
-
except Exception as e:
|
196 |
-
print(f"Error loading IBTrACS data: {e}")
|
197 |
-
print("Attempting to load default dataset...")
|
198 |
-
ibtracs = tracks.TrackDataset(basin='all')
|
199 |
-
with open(CACHE_FILE, 'wb') as f:
|
200 |
-
pickle.dump(ibtracs, f)
|
201 |
-
return ibtracs
|
202 |
-
|
203 |
-
def convert_typhoondata(input_file, output_file):
|
204 |
-
with open(input_file, 'r') as infile:
|
205 |
-
next(infile); next(infile)
|
206 |
-
reader = csv.reader(infile)
|
207 |
-
sid_data = defaultdict(list)
|
208 |
-
for row in reader:
|
209 |
-
if row:
|
210 |
-
sid = row[0]
|
211 |
-
sid_data[sid].append((row, row[6]))
|
212 |
-
with open(output_file, 'w', newline='') as outfile:
|
213 |
-
fieldnames = ['SID', 'ISO_TIME', 'LAT', 'LON', 'SEASON', 'NAME', 'WMO_WIND', 'WMO_PRES', 'USA_WIND', 'USA_PRES', 'START_DATE', 'END_DATE']
|
214 |
-
writer = csv.DictWriter(outfile, fieldnames=fieldnames)
|
215 |
-
writer.writeheader()
|
216 |
-
for sid, data in sid_data.items():
|
217 |
-
start_date = min(data, key=lambda x: x[1])[1]
|
218 |
-
end_date = max(data, key=lambda x: x[1])[1]
|
219 |
-
for row, iso_time in data:
|
220 |
-
writer.writerow({
|
221 |
-
'SID': row[0], 'ISO_TIME': iso_time, 'LAT': row[8], 'LON': row[9], 'SEASON': row[1], 'NAME': row[5],
|
222 |
-
'WMO_WIND': row[10].strip() or ' ', 'WMO_PRES': row[11].strip() or ' ',
|
223 |
-
'USA_WIND': row[23].strip() or ' ', 'USA_PRES': row[24].strip() or ' ',
|
224 |
-
'START_DATE': start_date, 'END_DATE': end_date
|
225 |
-
})
|
226 |
-
|
227 |
def load_data(oni_path, typhoon_path):
|
228 |
oni_data = pd.read_csv(oni_path)
|
229 |
typhoon_data = pd.read_csv(typhoon_path, low_memory=False)
|
@@ -245,9 +150,7 @@ def process_typhoon_data(typhoon_data):
|
|
245 |
typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
|
246 |
typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce')
|
247 |
typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce')
|
248 |
-
|
249 |
-
unique_basins = typhoon_data['SID'].str[:2].unique()
|
250 |
-
print(f"Unique basins in typhoon_data: {unique_basins}")
|
251 |
typhoon_max = typhoon_data.groupby('SID').agg({
|
252 |
'USA_WIND': 'max', 'USA_PRES': 'min', 'ISO_TIME': 'first', 'SEASON': 'first', 'NAME': 'first',
|
253 |
'LAT': 'first', 'LON': 'first'
|
@@ -261,18 +164,17 @@ def merge_data(oni_long, typhoon_max):
|
|
261 |
return pd.merge(typhoon_max, oni_long, on=['Year', 'Month'])
|
262 |
|
263 |
def categorize_typhoon(wind_speed):
|
264 |
-
|
265 |
-
if wind_speed_kt >= 137:
|
266 |
return 'C5 Super Typhoon'
|
267 |
-
elif
|
268 |
return 'C4 Very Strong Typhoon'
|
269 |
-
elif
|
270 |
return 'C3 Strong Typhoon'
|
271 |
-
elif
|
272 |
return 'C2 Typhoon'
|
273 |
-
elif
|
274 |
return 'C1 Typhoon'
|
275 |
-
elif
|
276 |
return 'Tropical Storm'
|
277 |
else:
|
278 |
return 'Tropical Depression'
|
@@ -287,21 +189,44 @@ def classify_enso_phases(oni_value):
|
|
287 |
else:
|
288 |
return 'Neutral'
|
289 |
|
290 |
-
#
|
291 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
292 |
ibtracs = load_ibtracs_data()
|
293 |
-
|
|
|
|
|
294 |
oni_data, typhoon_data = load_data(ONI_DATA_PATH, TYPHOON_DATA_PATH)
|
295 |
oni_long = process_oni_data(oni_data)
|
296 |
typhoon_max = process_typhoon_data(typhoon_data)
|
297 |
merged_data = merge_data(oni_long, typhoon_max)
|
298 |
|
299 |
-
#
|
300 |
def generate_typhoon_tracks(filtered_data, typhoon_search):
|
301 |
fig = go.Figure()
|
302 |
for sid in filtered_data['SID'].unique():
|
303 |
storm_data = filtered_data[filtered_data['SID'] == sid]
|
304 |
-
|
|
|
305 |
fig.add_trace(go.Scattergeo(
|
306 |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines',
|
307 |
name=storm_data['NAME'].iloc[0], line=dict(width=2, color=color)
|
@@ -369,29 +294,20 @@ def generate_regression_analysis(filtered_data):
|
|
369 |
def generate_main_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
370 |
start_date = datetime(start_year, start_month, 1)
|
371 |
end_date = datetime(end_year, end_month, 28)
|
372 |
-
filtered_data = merged_data[
|
373 |
-
(merged_data['ISO_TIME'] >= start_date) &
|
374 |
-
(merged_data['ISO_TIME'] <= end_date)
|
375 |
-
]
|
376 |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
377 |
if enso_phase != 'all':
|
378 |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
379 |
-
|
380 |
tracks_fig = generate_typhoon_tracks(filtered_data, typhoon_search)
|
381 |
wind_scatter = generate_wind_oni_scatter(filtered_data, typhoon_search)
|
382 |
pressure_scatter = generate_pressure_oni_scatter(filtered_data, typhoon_search)
|
383 |
regression_fig, slopes_text = generate_regression_analysis(filtered_data)
|
384 |
-
|
385 |
return tracks_fig, wind_scatter, pressure_scatter, regression_fig, slopes_text
|
386 |
|
387 |
-
# Get full tracks function for Track Visualization tab
|
388 |
def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
389 |
start_date = datetime(start_year, start_month, 1)
|
390 |
end_date = datetime(end_year, end_month, 28)
|
391 |
-
filtered_data = merged_data[
|
392 |
-
(merged_data['ISO_TIME'] >= start_date) &
|
393 |
-
(merged_data['ISO_TIME'] <= end_date)
|
394 |
-
]
|
395 |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
396 |
if enso_phase != 'all':
|
397 |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
@@ -400,7 +316,7 @@ def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, ty
|
|
400 |
fig = go.Figure()
|
401 |
for sid in unique_storms:
|
402 |
storm_data = typhoon_data[typhoon_data['SID'] == sid]
|
403 |
-
name = storm_data['NAME'].iloc[0] if
|
404 |
storm_oni = filtered_data[filtered_data['SID'] == sid]['ONI'].iloc[0]
|
405 |
color = 'red' if storm_oni >= 0.5 else ('blue' if storm_oni <= -0.5 else 'green')
|
406 |
fig.add_trace(go.Scattergeo(
|
@@ -445,7 +361,6 @@ def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, ty
|
|
445 |
)
|
446 |
return fig, f"Total typhoons displayed: {count}"
|
447 |
|
448 |
-
# Analysis functions for Wind, Pressure, and Longitude tabs
|
449 |
def get_wind_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
450 |
results = generate_main_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search)
|
451 |
regression = perform_wind_regression(start_year, start_month, end_year, end_month)
|
@@ -461,7 +376,6 @@ def get_longitude_analysis(start_year, start_month, end_year, end_month, enso_ph
|
|
461 |
regression = perform_longitude_regression(start_year, start_month, end_year, end_month)
|
462 |
return results[3], results[4], regression
|
463 |
|
464 |
-
# Video animation function with fixed sidebar and wind radius visualization
|
465 |
def categorize_typhoon_by_standard(wind_speed, standard):
|
466 |
if standard == 'taiwan':
|
467 |
wind_speed_ms = wind_speed * 0.514444
|
@@ -487,626 +401,236 @@ def categorize_typhoon_by_standard(wind_speed, standard):
|
|
487 |
return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex']
|
488 |
return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex']
|
489 |
|
490 |
-
|
491 |
-
|
|
|
|
|
|
|
|
|
492 |
return None
|
493 |
-
|
494 |
-
|
495 |
-
|
496 |
-
|
497 |
-
|
498 |
-
|
499 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
500 |
lat_padding = max((max_lat - min_lat) * 0.3, 5)
|
501 |
lon_padding = max((max_lon - min_lon) * 0.3, 5)
|
502 |
-
|
503 |
-
#
|
504 |
-
fig = plt.figure(figsize=(
|
505 |
-
ax = plt.axes([0.05, 0.05, 0.
|
506 |
-
|
507 |
-
|
508 |
-
|
509 |
-
|
510 |
-
|
511 |
-
basin_name = "Western North Pacific"
|
512 |
-
elif basin == "EP":
|
513 |
-
basin_name = "Eastern North Pacific"
|
514 |
-
elif basin == "NA" or basin == "AL":
|
515 |
-
basin_name = "North Atlantic"
|
516 |
-
elif basin == "NI" or basin == "IO":
|
517 |
-
basin_name = "North Indian"
|
518 |
-
elif basin == "SI":
|
519 |
-
basin_name = "South Indian"
|
520 |
-
elif basin == "SP":
|
521 |
-
basin_name = "South Pacific"
|
522 |
-
elif basin == "SA" or basin == "SL":
|
523 |
-
basin_name = "South Atlantic"
|
524 |
-
elif basin == "All":
|
525 |
-
basin_name = "Global Oceans"
|
526 |
-
|
527 |
-
# Add world map features
|
528 |
ax.add_feature(cfeature.LAND, facecolor='lightgray')
|
529 |
ax.add_feature(cfeature.OCEAN, facecolor='lightblue')
|
530 |
ax.add_feature(cfeature.COASTLINE, edgecolor='black')
|
531 |
ax.add_feature(cfeature.BORDERS, linestyle=':', edgecolor='gray')
|
532 |
ax.gridlines(draw_labels=True, linestyle='--', color='gray', alpha=0.5)
|
533 |
-
|
534 |
-
|
535 |
-
|
536 |
-
# Initialize the line and point
|
537 |
line, = ax.plot([], [], 'b-', linewidth=2, transform=ccrs.PlateCarree())
|
538 |
-
point, = ax.plot([], [], 'o', markersize=
|
539 |
-
|
540 |
-
|
541 |
-
|
542 |
-
|
543 |
-
|
544 |
-
|
545 |
-
|
546 |
-
|
547 |
-
|
548 |
-
|
549 |
-
|
550 |
-
|
551 |
-
|
552 |
-
# Add color legend
|
553 |
-
standard_dict = atlantic_standard if standard == 'atlantic' else taiwan_standard
|
554 |
-
legend_elements = [plt.Line2D([0], [0], marker='o', color='w', label=f"{cat}",
|
555 |
-
markerfacecolor=details['hex'], markersize=10)
|
556 |
-
for cat, details in standard_dict.items()]
|
557 |
-
fig.legend(handles=legend_elements, title="Color Legend", loc='center right',
|
558 |
-
bbox_to_anchor=(0.95, 0.5), fontsize=10)
|
559 |
-
|
560 |
-
# Add wind radius legend
|
561 |
-
radius_legend = [
|
562 |
-
plt.Line2D([0], [0], color='blue', label='34kt Gale Force'),
|
563 |
-
plt.Line2D([0], [0], color='orange', label='50kt Storm Force'),
|
564 |
-
plt.Line2D([0], [0], color='red', label='64kt Hurricane Force')
|
565 |
-
]
|
566 |
-
fig.legend(handles=radius_legend, title="Wind Radii", loc='lower right',
|
567 |
-
bbox_to_anchor=(0.95, 0.15), fontsize=9)
|
568 |
-
|
569 |
def init():
|
570 |
line.set_data([], [])
|
571 |
point.set_data([], [])
|
572 |
date_text.set_text('')
|
573 |
-
|
574 |
-
|
575 |
-
patch.set_center((0, 0))
|
576 |
-
patch.set_radius(0)
|
577 |
-
patch.set_visible(False)
|
578 |
-
return [line, point, date_text, details_text] + radius_patches
|
579 |
|
580 |
def update(frame):
|
581 |
-
line
|
582 |
-
|
583 |
-
|
|
|
|
|
|
|
584 |
point.set_color(color)
|
585 |
-
|
586 |
-
|
587 |
-
# Update
|
588 |
-
|
589 |
-
|
590 |
-
|
591 |
-
|
592 |
-
|
593 |
-
|
594 |
-
|
595 |
-
|
596 |
-
|
597 |
-
# Check USA agency data
|
598 |
-
for quadrant in ['ne', 'se', 'sw', 'nw']:
|
599 |
-
attr = f'usa_r{wind_kt}_{quadrant}'
|
600 |
-
if hasattr(storm, attr) and frame < len(getattr(storm, attr)) and not np.isnan(getattr(storm, attr)[frame]):
|
601 |
-
radius_values.append(getattr(storm, attr)[frame])
|
602 |
-
|
603 |
-
# If no USA data, check BOM data
|
604 |
-
if not radius_values:
|
605 |
-
for quadrant in ['ne', 'se', 'sw', 'nw']:
|
606 |
-
attr = f'bom_r{wind_kt}_{quadrant}'
|
607 |
-
if hasattr(storm, attr) and frame < len(getattr(storm, attr)) and not np.isnan(getattr(storm, attr)[frame]):
|
608 |
-
radius_values.append(getattr(storm, attr)[frame])
|
609 |
-
|
610 |
-
# If still no data, try Reunion data
|
611 |
-
if not radius_values:
|
612 |
-
for quadrant in ['ne', 'se', 'sw', 'nw']:
|
613 |
-
attr = f'reunion_r{wind_kt}_{quadrant}'
|
614 |
-
if hasattr(storm, attr) and frame < len(getattr(storm, attr)) and not np.isnan(getattr(storm, attr)[frame]):
|
615 |
-
radius_values.append(getattr(storm, attr)[frame])
|
616 |
-
|
617 |
-
if radius_values:
|
618 |
-
# Calculate average radius (nautical miles)
|
619 |
-
avg_radius = np.mean(radius_values)
|
620 |
-
|
621 |
-
# Convert from nautical miles to approximate degrees (1 nm ≈ 1/60 degree)
|
622 |
-
radius_deg = avg_radius / 60.0
|
623 |
-
|
624 |
-
radius_patches[i].set_center((storm.lon[frame], storm.lat[frame]))
|
625 |
-
radius_patches[i].set_radius(radius_deg)
|
626 |
-
radius_patches[i].set_edgecolor(circle_color)
|
627 |
-
radius_patches[i].set_visible(True)
|
628 |
-
|
629 |
-
radius_info.append(f"{wind_kt}kt radius: {avg_radius:.1f} nm")
|
630 |
-
else:
|
631 |
-
radius_patches[i].set_visible(False)
|
632 |
-
radius_info.append(f"{wind_kt}kt radius: 0 nm")
|
633 |
-
|
634 |
-
# Add radius information to details
|
635 |
-
radius_text = "\n".join(radius_info)
|
636 |
-
|
637 |
-
# Get pressure value if available, otherwise show 'N/A'
|
638 |
-
pressure_value = storm.mslp[frame] if hasattr(storm, 'mslp') and frame < len(storm.mslp) and not np.isnan(storm.mslp[frame]) else 'N/A'
|
639 |
-
pressure_text = f"Pressure: {pressure_value if pressure_value != 'N/A' else 'N/A'} mb"
|
640 |
-
|
641 |
-
details = f"Name: {storm.name}\n" \
|
642 |
-
f"Date: {storm.time[frame].strftime('%Y-%m-%d %H:%M')}\n" \
|
643 |
-
f"Wind Speed: {storm.vmax[frame]:.1f} kt\n" \
|
644 |
-
f"{pressure_text}\n" \
|
645 |
-
f"Category: {category}\n" \
|
646 |
-
f"\nWind Radii:\n{radius_text}"
|
647 |
-
|
648 |
-
details_text.set_text(details)
|
649 |
-
return [line, point, date_text, details_text] + radius_patches
|
650 |
-
|
651 |
-
ani = animation.FuncAnimation(fig, update, init_func=init, frames=len(storm.time),
|
652 |
interval=200, blit=True, repeat=True)
|
653 |
-
|
654 |
-
# Save as video
|
655 |
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.mp4')
|
656 |
writer = animation.FFMpegWriter(fps=5, bitrate=1800)
|
657 |
ani.save(temp_file.name, writer=writer)
|
658 |
plt.close(fig)
|
659 |
-
|
660 |
return temp_file.name
|
661 |
|
662 |
def simplified_track_video(year, basin, typhoon, standard):
|
663 |
if not typhoon:
|
664 |
return None
|
665 |
-
|
666 |
-
|
667 |
-
typhoon_id = typhoon.split('(')[-1].strip(')')
|
668 |
-
|
669 |
-
# Extract basin code from the basin selection
|
670 |
-
basin_code = "All"
|
671 |
-
if basin != "All Basins":
|
672 |
-
basin_code = basin.split(' - ')[0]
|
673 |
-
|
674 |
-
# Generate the animation
|
675 |
-
return generate_track_video(year, basin_code, typhoon, standard)
|
676 |
|
677 |
-
#
|
678 |
-
|
679 |
-
|
680 |
-
|
681 |
-
|
682 |
-
|
683 |
-
|
684 |
-
y = data['severe_typhoon']
|
685 |
-
model = sm.Logit(y, X).fit()
|
686 |
-
beta_1, exp_beta_1, p_value = model.params['ONI'], np.exp(model.params['ONI']), model.pvalues['ONI']
|
687 |
-
return f"Wind Regression: β1={beta_1:.4f}, Odds Ratio={exp_beta_1:.4f}, P-value={p_value:.4f}"
|
688 |
-
|
689 |
-
def perform_pressure_regression(start_year, start_month, end_year, end_month):
|
690 |
-
start_date = datetime(start_year, start_month, 1)
|
691 |
-
end_date = datetime(end_year, end_month, 28)
|
692 |
-
data = merged_data[(merged_data['ISO_TIME'] >= start_date) & (merged_data['ISO_TIME'] <= end_date)].dropna(subset=['USA_PRES', 'ONI'])
|
693 |
-
data['intense_typhoon'] = (data['USA_PRES'] <= 950).astype(int)
|
694 |
-
X = sm.add_constant(data['ONI'])
|
695 |
-
y = data['intense_typhoon']
|
696 |
-
model = sm.Logit(y, X).fit()
|
697 |
-
beta_1, exp_beta_1, p_value = model.params['ONI'], np.exp(model.params['ONI']), model.pvalues['ONI']
|
698 |
-
return f"Pressure Regression: β1={beta_1:.4f}, Odds Ratio={exp_beta_1:.4f}, P-value={p_value:.4f}"
|
699 |
-
|
700 |
-
def perform_longitude_regression(start_year, start_month, end_year, end_month):
|
701 |
-
start_date = datetime(start_year, start_month, 1)
|
702 |
-
end_date = datetime(end_year, end_month, 28)
|
703 |
-
data = merged_data[(merged_data['ISO_TIME'] >= start_date) & (merged_data['ISO_TIME'] <= end_date)].dropna(subset=['LON', 'ONI'])
|
704 |
-
data['western_typhoon'] = (data['LON'] <= 140).astype(int)
|
705 |
-
X = sm.add_constant(data['ONI'])
|
706 |
-
y = data['western_typhoon']
|
707 |
-
model = sm.Logit(y, X).fit()
|
708 |
-
beta_1, exp_beta_1, p_value = model.params['ONI'], np.exp(model.params['ONI']), model.pvalues['ONI']
|
709 |
-
return f"Longitude Regression: β1={beta_1:.4f}, Odds Ratio={exp_beta_1:.4f}, P-value={p_value:.4f}"
|
710 |
-
|
711 |
-
# t-SNE clustering functions
|
712 |
-
def filter_west_pacific_coordinates(lons, lats):
|
713 |
-
mask = (lons >= 100) & (lons <= 180) & (lats >= 0) & (lats <= 40)
|
714 |
-
return lons[mask], lats[mask]
|
715 |
-
|
716 |
-
def filter_storm_by_season(storm, season):
|
717 |
-
start_month = storm.time[0].month
|
718 |
-
if season == 'all':
|
719 |
-
return True
|
720 |
-
elif season == 'summer':
|
721 |
-
return 4 <= start_month <= 8
|
722 |
-
elif season == 'winter':
|
723 |
-
return 9 <= start_month <= 12
|
724 |
-
return False
|
725 |
-
|
726 |
-
def point_region(lat, lon):
|
727 |
-
twl = regions["Taiwan Land"]
|
728 |
-
if twl["lat_min"] <= lat <= twl["lat_max"] and twl["lon_min"] <= lon <= twl["lon_max"]:
|
729 |
-
return "Taiwan Land"
|
730 |
-
tws = regions["Taiwan Sea"]
|
731 |
-
if tws["lat_min"] <= lat <= tws["lat_max"] and tws["lon_min"] <= lon <= tws["lon_max"]:
|
732 |
-
if not (twl["lat_min"] <= lat <= twl["lat_max"] and twl["lon_min"] <= lon <= twl["lon_max"]):
|
733 |
-
return "Taiwan Sea"
|
734 |
-
for rg in ["Japan", "China", "Hong Kong", "Philippines"]:
|
735 |
-
box = regions[rg]
|
736 |
-
if box["lat_min"] <= lat <= box["lat_max"] and box["lon_min"] <= lon <= box["lon_max"]:
|
737 |
-
return rg
|
738 |
-
return None
|
739 |
-
|
740 |
-
def calculate_region_durations(lons, lats, times):
|
741 |
-
region_times = defaultdict(float)
|
742 |
-
point_regions_list = [point_region(lats[i], lons[i]) for i in range(len(lons))]
|
743 |
-
for i in range(len(lons) - 1):
|
744 |
-
dt = (times[i + 1] - times[i]).total_seconds() / 3600.0
|
745 |
-
r1 = point_regions_list[i]
|
746 |
-
r2 = point_regions_list[i + 1]
|
747 |
-
if r1 and r2:
|
748 |
-
if r1 == r2:
|
749 |
-
region_times[r1] += dt
|
750 |
-
else:
|
751 |
-
region_times[r1] += dt / 2
|
752 |
-
region_times[r2] += dt / 2
|
753 |
-
elif r1 and not r2:
|
754 |
-
region_times[r1] += dt / 2
|
755 |
-
elif r2 and not r1:
|
756 |
-
region_times[r2] += dt / 2
|
757 |
-
return dict(region_times)
|
758 |
|
759 |
-
def
|
760 |
-
|
761 |
-
|
762 |
-
|
763 |
-
|
764 |
-
|
765 |
-
|
766 |
-
|
767 |
-
|
768 |
-
|
769 |
-
|
770 |
-
|
771 |
-
|
772 |
-
|
773 |
-
|
774 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
775 |
|
776 |
-
def
|
777 |
-
|
778 |
-
|
779 |
-
|
780 |
-
|
781 |
-
|
782 |
-
|
783 |
-
|
784 |
-
|
785 |
-
|
786 |
-
|
787 |
-
|
788 |
-
|
789 |
-
|
790 |
-
|
791 |
-
|
792 |
-
|
793 |
-
|
794 |
-
if best_labels is None:
|
795 |
-
for eps in eps_values[::-1]:
|
796 |
-
dbscan = DBSCAN(eps=eps, min_samples=3)
|
797 |
-
labels = dbscan.fit_predict(tsne_results)
|
798 |
-
unique_labels = set(labels)
|
799 |
-
if -1 in unique_labels:
|
800 |
-
unique_labels.remove(-1)
|
801 |
-
n_clusters = len(unique_labels)
|
802 |
-
if n_clusters == max_clusters:
|
803 |
-
best_labels = labels
|
804 |
-
best_n_clusters = n_clusters
|
805 |
-
best_n_noise = np.sum(labels == -1)
|
806 |
-
best_eps = eps
|
807 |
-
break
|
808 |
-
return best_labels, best_n_clusters, best_n_noise, best_eps
|
809 |
|
|
|
810 |
def update_route_clusters(start_year, start_month, end_year, end_month, enso_value, season):
|
811 |
-
|
812 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
813 |
|
814 |
all_storms_data = []
|
815 |
-
for
|
816 |
-
|
817 |
-
|
818 |
-
|
819 |
-
|
820 |
-
|
821 |
-
|
822 |
-
|
823 |
-
|
824 |
-
|
825 |
-
|
826 |
-
|
827 |
-
|
828 |
-
|
829 |
-
if enso_value == 'all' or enso_phase_storm == enso_value.capitalize():
|
830 |
-
all_storms_data.append((lons, lats, np.array(storm.vmax), np.array(storm.mslp), np.array(storm.time), storm.name, enso_phase_storm))
|
831 |
-
|
832 |
if not all_storms_data:
|
833 |
-
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No
|
834 |
-
|
835 |
-
|
836 |
-
max_length = max(len(st[0]) for st in all_storms_data)
|
837 |
route_vectors = []
|
838 |
filtered_storms = []
|
839 |
-
|
840 |
-
storms_mslp_list = []
|
841 |
-
for idx, (lons, lats, vmax, mslp, times, name, enso_phase) in enumerate(all_storms_data):
|
842 |
t = np.linspace(0, 1, len(lons))
|
843 |
t_new = np.linspace(0, 1, max_length)
|
844 |
try:
|
845 |
lon_i = interp1d(t, lons, kind='linear', fill_value='extrapolate')(t_new)
|
846 |
lat_i = interp1d(t, lats, kind='linear', fill_value='extrapolate')(t_new)
|
847 |
-
|
848 |
-
if not np.all(np.isnan(mslp)):
|
849 |
-
mslp_i = interp1d(t, mslp, kind='linear', fill_value='extrapolate')(t_new)
|
850 |
-
else:
|
851 |
-
mslp_i = np.full(max_length, np.nan)
|
852 |
-
except Exception as e:
|
853 |
continue
|
854 |
route_vector = np.column_stack((lon_i, lat_i)).flatten()
|
855 |
if np.isnan(route_vector).any():
|
856 |
continue
|
857 |
route_vectors.append(route_vector)
|
858 |
-
filtered_storms.append((
|
859 |
-
storms_vmax_list.append(vmax_i)
|
860 |
-
storms_mslp_list.append(mslp_i)
|
861 |
-
|
862 |
route_vectors = np.array(route_vectors)
|
863 |
if len(route_vectors) == 0:
|
864 |
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms after interpolation."
|
865 |
-
|
866 |
-
# Perform t-SNE
|
867 |
tsne = TSNE(n_components=2, random_state=42, verbose=1)
|
868 |
tsne_results = tsne.fit_transform(route_vectors)
|
869 |
-
|
870 |
-
|
871 |
-
best_labels, best_n_clusters, best_n_noise, best_eps = dynamic_dbscan(tsne_results)
|
872 |
-
|
873 |
-
# Calculate region durations and mean routes
|
874 |
unique_labels = sorted(set(best_labels) - {-1})
|
875 |
-
label_to_idx = {label: i for i, label in enumerate(unique_labels)}
|
876 |
-
cluster_region_durations = [defaultdict(float) for _ in range(len(unique_labels))]
|
877 |
-
cluster_mean_routes = []
|
878 |
-
cluster_mean_vmax = []
|
879 |
-
cluster_mean_mslp = []
|
880 |
-
|
881 |
-
for i, (lons, lats, vmax, mslp, times) in enumerate(filtered_storms):
|
882 |
-
c = best_labels[i]
|
883 |
-
if c == -1:
|
884 |
-
continue
|
885 |
-
durations = calculate_region_durations(lons, lats, times)
|
886 |
-
idx = label_to_idx[c]
|
887 |
-
for r, val in durations.items():
|
888 |
-
cluster_region_durations[idx][r] += val
|
889 |
-
|
890 |
-
for c in unique_labels:
|
891 |
-
indices = np.where(best_labels == c)[0]
|
892 |
-
if len(indices) == 0:
|
893 |
-
cluster_mean_routes.append(([], []))
|
894 |
-
cluster_mean_vmax.append([])
|
895 |
-
cluster_mean_mslp.append([])
|
896 |
-
continue
|
897 |
-
cluster_lons = []
|
898 |
-
cluster_lats = []
|
899 |
-
cluster_v = []
|
900 |
-
cluster_p = []
|
901 |
-
for idx in indices:
|
902 |
-
lons, lats, vmax_, mslp_, times = filtered_storms[idx]
|
903 |
-
t = np.linspace(0, 1, len(lons))
|
904 |
-
t_new = np.linspace(0, 1, max_length)
|
905 |
-
lon_i = interp1d(t, lons, kind='linear', fill_value='extrapolate')(t_new)
|
906 |
-
lat_i = interp1d(t, lats, kind='linear', fill_value='extrapolate')(t_new)
|
907 |
-
cluster_lons.append(lon_i)
|
908 |
-
cluster_lats.append(lat_i)
|
909 |
-
cluster_v.append(storms_vmax_list[idx])
|
910 |
-
if not np.all(np.isnan(storms_mslp_list[idx])):
|
911 |
-
cluster_p.append(storms_mslp_list[idx])
|
912 |
-
if cluster_lons and cluster_lats:
|
913 |
-
mean_lon = np.mean(cluster_lons, axis=0)
|
914 |
-
mean_lat = np.mean(cluster_lats, axis=0)
|
915 |
-
mean_v = np.mean(cluster_v, axis=0)
|
916 |
-
if cluster_p:
|
917 |
-
mean_p = np.nanmean(cluster_p, axis=0)
|
918 |
-
else:
|
919 |
-
mean_p = np.full(max_length, np.nan)
|
920 |
-
cluster_mean_routes.append((mean_lon, mean_lat))
|
921 |
-
cluster_mean_vmax.append(mean_v)
|
922 |
-
cluster_mean_mslp.append(mean_p)
|
923 |
-
else:
|
924 |
-
cluster_mean_routes.append(([], []))
|
925 |
-
cluster_mean_vmax.append([])
|
926 |
-
cluster_mean_mslp.append([])
|
927 |
-
|
928 |
-
# t-SNE Scatter Plot
|
929 |
fig_tsne = go.Figure()
|
930 |
-
|
931 |
-
|
932 |
-
|
933 |
-
for i, c in enumerate(unique_labels):
|
934 |
-
indices = np.where(best_labels == c)[0]
|
935 |
-
end_reg = endpoint_region_label(c, best_labels, filtered_storms)
|
936 |
-
name = f"Cluster {i+1}" + (f" (towards {end_reg})" if end_reg else "")
|
937 |
fig_tsne.add_trace(go.Scatter(
|
938 |
x=tsne_results[indices, 0],
|
939 |
y=tsne_results[indices, 1],
|
940 |
mode='markers',
|
941 |
-
marker=dict(
|
942 |
-
name=
|
943 |
-
))
|
944 |
-
noise_indices = np.where(best_labels == -1)[0]
|
945 |
-
if len(noise_indices) > 0:
|
946 |
-
fig_tsne.add_trace(go.Scatter(
|
947 |
-
x=tsne_results[noise_indices, 0],
|
948 |
-
y=tsne_results[noise_indices, 1],
|
949 |
-
mode='markers',
|
950 |
-
marker=dict(size=5, color='grey'),
|
951 |
-
name='Noise'
|
952 |
))
|
953 |
-
fig_tsne.update_layout(
|
954 |
-
title="TSNE of Typhoon Routes",
|
955 |
-
xaxis_title="TSNE Dim 1",
|
956 |
-
yaxis_title="TSNE Dim 2",
|
957 |
-
legend_title="Clusters"
|
958 |
-
)
|
959 |
-
|
960 |
-
# Typhoon Routes Plot with Mean Routes
|
961 |
fig_routes = go.Figure()
|
962 |
-
|
963 |
-
|
964 |
-
|
965 |
-
continue
|
966 |
-
color_idx = label_to_idx[c]
|
967 |
-
fig_routes.add_trace(
|
968 |
-
go.Scattergeo(
|
969 |
-
lon=lons,
|
970 |
-
lat=lats,
|
971 |
-
mode='lines',
|
972 |
-
opacity=0.3,
|
973 |
-
line=dict(width=1, color=cluster_colors[color_idx % len(cluster_colors)]),
|
974 |
-
showlegend=False
|
975 |
-
)
|
976 |
-
)
|
977 |
-
for i, c in enumerate(unique_labels):
|
978 |
-
mean_lon, mean_lat = cluster_mean_routes[i]
|
979 |
-
if len(mean_lon) == 0:
|
980 |
-
continue
|
981 |
-
end_reg = endpoint_region_label(c, best_labels, filtered_storms)
|
982 |
-
name = f"Cluster {i+1}" + (f" (towards {end_reg})" if end_reg else "")
|
983 |
-
fig_routes.add_trace(
|
984 |
-
go.Scattergeo(
|
985 |
-
lon=mean_lon,
|
986 |
-
lat=mean_lat,
|
987 |
-
mode='lines',
|
988 |
-
line=dict(width=4, color=cluster_colors[i % len(cluster_colors)]),
|
989 |
-
name=name
|
990 |
-
)
|
991 |
-
)
|
992 |
-
fig_routes.add_trace(
|
993 |
-
go.Scattergeo(
|
994 |
-
lon=[mean_lon[0]],
|
995 |
-
lat=[mean_lat[0]],
|
996 |
-
mode='markers',
|
997 |
-
marker=dict(size=10, color='green', symbol='triangle-up'),
|
998 |
-
name=f"Cluster {i+1} Start"
|
999 |
-
)
|
1000 |
-
)
|
1001 |
-
fig_routes.add_trace(
|
1002 |
-
go.Scattergeo(
|
1003 |
-
lon=[mean_lon[-1]],
|
1004 |
-
lat=[mean_lat[-1]],
|
1005 |
-
mode='markers',
|
1006 |
-
marker=dict(size=10, color='red', symbol='x'),
|
1007 |
-
name=f"Cluster {i+1} End"
|
1008 |
-
)
|
1009 |
-
)
|
1010 |
-
enso_phase_text = {'all': 'All Years', 'El Nino': 'El Niño', 'La Nina': 'La Niña', 'Neutral': 'Neutral Years'}
|
1011 |
-
fig_routes.update_layout(
|
1012 |
-
title=f"West Pacific Typhoon Routes ({start_year}-{end_year}, {season.capitalize()}, {enso_phase_text.get(enso_value, 'All Years')})",
|
1013 |
-
geo=dict(scope='asia', projection_type='mercator', showland=True, landcolor='lightgray')
|
1014 |
-
)
|
1015 |
-
|
1016 |
-
# Cluster Statistics Plot
|
1017 |
-
fig_stats = make_subplots(rows=2, cols=1, shared_xaxes=True, subplot_titles=("Average Wind Speed", "Average Pressure"))
|
1018 |
-
for i, c in enumerate(unique_labels):
|
1019 |
-
if len(cluster_mean_vmax[i]) > 0:
|
1020 |
-
end_reg = endpoint_region_label(c, best_labels, filtered_storms)
|
1021 |
-
name = f"Cluster {i+1}" + (f" ({end_reg})" if end_reg else "")
|
1022 |
-
fig_stats.add_trace(
|
1023 |
-
go.Scatter(y=cluster_mean_vmax[i], mode='lines', line=dict(width=2, color=cluster_colors[i % len(cluster_colors)]), name=name),
|
1024 |
-
row=1, col=1
|
1025 |
-
)
|
1026 |
-
if not np.all(np.isnan(cluster_mean_mslp[i])):
|
1027 |
-
fig_stats.add_trace(
|
1028 |
-
go.Scatter(y=cluster_mean_mslp[i], mode='lines', line=dict(width=2, color=cluster_colors[i % len(cluster_colors)]), name=name),
|
1029 |
-
row=2, col=1
|
1030 |
-
)
|
1031 |
-
fig_stats.update_layout(
|
1032 |
-
title="Cluster Average Wind & Pressure Profiles",
|
1033 |
-
xaxis_title="Route Normalized Index",
|
1034 |
-
yaxis_title="Wind Speed (knots)",
|
1035 |
-
xaxis2_title="Route Normalized Index",
|
1036 |
-
yaxis2_title="Pressure (hPa)",
|
1037 |
-
showlegend=True,
|
1038 |
-
legend_tracegroupgap=300
|
1039 |
-
)
|
1040 |
-
|
1041 |
-
# Cluster Information
|
1042 |
-
cluster_info_lines = [f"Selected DBSCAN eps: {best_eps:.2f}", f"Number of noise points: {best_n_noise}"]
|
1043 |
-
for i, c in enumerate(unique_labels):
|
1044 |
-
indices = np.where(best_labels == c)[0]
|
1045 |
-
count = len(indices)
|
1046 |
-
if count == 0:
|
1047 |
-
continue
|
1048 |
-
avg_durations = {r: (cluster_region_durations[i][r] / count) for r in cluster_region_durations[i]}
|
1049 |
-
end_reg = endpoint_region_label(c, best_labels, filtered_storms)
|
1050 |
-
name = f"Cluster {i+1}" + (f" (towards {end_reg})" if end_reg else "")
|
1051 |
-
cluster_info_lines.append(f"\n{name}")
|
1052 |
-
if avg_durations:
|
1053 |
-
for reg, hrs in avg_durations.items():
|
1054 |
-
cluster_info_lines.append(f"{reg}: {hrs:.2f} hours")
|
1055 |
-
else:
|
1056 |
-
cluster_info_lines.append("No significant region durations.")
|
1057 |
-
if end_reg in ["Taiwan Land", "Taiwan Sea"] and len(cluster_mean_vmax[i]) > 0:
|
1058 |
-
final_wind = cluster_mean_vmax[i][-1]
|
1059 |
-
if final_wind >= 34:
|
1060 |
-
cluster_info_lines.append(
|
1061 |
-
"CWA would issue a land warning ~18 hours before arrival." if end_reg == "Taiwan Land"
|
1062 |
-
else "CWA would issue a sea warning ~24 hours before arrival."
|
1063 |
-
)
|
1064 |
-
if len(noise_indices) > 0:
|
1065 |
-
cluster_info_lines.append(f"\nNoise Cluster\nNumber of storms classified as noise: {len(noise_indices)}")
|
1066 |
-
|
1067 |
-
cluster_info_text = "\n".join(cluster_info_lines)
|
1068 |
-
return fig_tsne, fig_routes, fig_stats, cluster_info_text
|
1069 |
-
|
1070 |
-
# Define the basin to prefix mapping
|
1071 |
-
basin_to_prefix = {
|
1072 |
-
"All Basins": None,
|
1073 |
-
"NA - North Atlantic": "AL",
|
1074 |
-
"EP - Eastern North Pacific": "EP",
|
1075 |
-
"WP - Western North Pacific": "WP",
|
1076 |
-
"NI - North Indian": ["IO", "BB", "AS"], # Multiple prefixes for North Indian
|
1077 |
-
"SI - South Indian": "SI",
|
1078 |
-
"SP - Southern Pacific": "SP",
|
1079 |
-
"SA - South Atlantic": "SL"
|
1080 |
-
}
|
1081 |
-
|
1082 |
-
# Update typhoon options function for animation tab
|
1083 |
-
def update_typhoon_options(year, basin):
|
1084 |
-
try:
|
1085 |
-
season = ibtracs.get_season(int(year))
|
1086 |
-
storm_summary = season.summary()
|
1087 |
-
|
1088 |
-
# Get the prefix for filtering
|
1089 |
-
prefix = basin_to_prefix.get(basin)
|
1090 |
-
|
1091 |
-
# Get all storms for the year
|
1092 |
-
options = []
|
1093 |
-
for i in range(len(storm_summary)):
|
1094 |
-
try:
|
1095 |
-
name = storm_summary['name'][i] if not pd.isna(storm_summary['name'][i]) else "Unnamed"
|
1096 |
-
storm_id = storm_summary['id'][i]
|
1097 |
-
|
1098 |
-
# Filter by basin if a specific basin is selected
|
1099 |
-
if prefix is None or (isinstance(prefix, list) and any(storm_id.startswith(p) for p in prefix)) or (not isinstance(prefix, list) and storm_id.startswith(prefix)):
|
1100 |
-
options.append(f"{name} ({storm_id})")
|
1101 |
-
except Exception:
|
1102 |
-
continue
|
1103 |
-
|
1104 |
-
return gr.update(choices=options, value=options[0] if options else None)
|
1105 |
-
except Exception as e:
|
1106 |
-
print(f"Error updating typhoon options: {e}")
|
1107 |
-
return gr.update(choices=[], value=None)
|
1108 |
|
1109 |
-
# Gradio Interface
|
1110 |
with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
1111 |
gr.Markdown("# Typhoon Analysis Dashboard")
|
1112 |
|
@@ -1117,12 +641,12 @@ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
|
1117 |
This dashboard allows you to analyze typhoon data in relation to ENSO phases.
|
1118 |
|
1119 |
### Features:
|
1120 |
-
- **Track Visualization**: View typhoon tracks by time period and ENSO phase
|
1121 |
- **Wind Analysis**: Examine wind speed vs ONI relationships
|
1122 |
- **Pressure Analysis**: Analyze pressure vs ONI relationships
|
1123 |
- **Longitude Analysis**: Study typhoon generation longitude vs ONI
|
1124 |
-
- **Path Animation**: Watch animated tropical cyclone paths with
|
1125 |
-
- **TSNE Cluster**: Perform t-SNE clustering on
|
1126 |
|
1127 |
Select a tab above to begin your analysis.
|
1128 |
""")
|
@@ -1138,12 +662,7 @@ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
|
1138 |
analyze_btn = gr.Button("Generate Tracks")
|
1139 |
tracks_plot = gr.Plot(label="Typhoon Tracks", elem_id="tracks_plot")
|
1140 |
typhoon_count = gr.Textbox(label="Number of Typhoons Displayed")
|
1141 |
-
|
1142 |
-
analyze_btn.click(
|
1143 |
-
fn=get_full_tracks,
|
1144 |
-
inputs=[start_year, start_month, end_year, end_month, enso_phase, typhoon_search],
|
1145 |
-
outputs=[tracks_plot, typhoon_count]
|
1146 |
-
)
|
1147 |
|
1148 |
with gr.Tab("Wind Analysis"):
|
1149 |
with gr.Row():
|
@@ -1156,12 +675,7 @@ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
|
1156 |
wind_analyze_btn = gr.Button("Generate Wind Analysis")
|
1157 |
wind_scatter = gr.Plot(label="Wind Speed vs ONI")
|
1158 |
wind_regression_results = gr.Textbox(label="Wind Regression Results")
|
1159 |
-
|
1160 |
-
wind_analyze_btn.click(
|
1161 |
-
fn=get_wind_analysis,
|
1162 |
-
inputs=[wind_start_year, wind_start_month, wind_end_year, wind_end_month, wind_enso_phase, wind_typhoon_search],
|
1163 |
-
outputs=[wind_scatter, wind_regression_results]
|
1164 |
-
)
|
1165 |
|
1166 |
with gr.Tab("Pressure Analysis"):
|
1167 |
with gr.Row():
|
@@ -1174,12 +688,7 @@ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
|
1174 |
pressure_analyze_btn = gr.Button("Generate Pressure Analysis")
|
1175 |
pressure_scatter = gr.Plot(label="Pressure vs ONI")
|
1176 |
pressure_regression_results = gr.Textbox(label="Pressure Regression Results")
|
1177 |
-
|
1178 |
-
pressure_analyze_btn.click(
|
1179 |
-
fn=get_pressure_analysis,
|
1180 |
-
inputs=[pressure_start_year, pressure_start_month, pressure_end_year, pressure_end_month, pressure_enso_phase, pressure_typhoon_search],
|
1181 |
-
outputs=[pressure_scatter, pressure_regression_results]
|
1182 |
-
)
|
1183 |
|
1184 |
with gr.Tab("Longitude Analysis"):
|
1185 |
with gr.Row():
|
@@ -1193,65 +702,28 @@ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
|
1193 |
regression_plot = gr.Plot(label="Longitude vs ONI")
|
1194 |
slopes_text = gr.Textbox(label="Regression Slopes")
|
1195 |
lon_regression_results = gr.Textbox(label="Longitude Regression Results")
|
1196 |
-
|
1197 |
-
lon_analyze_btn.click(
|
1198 |
-
fn=get_longitude_analysis,
|
1199 |
-
inputs=[lon_start_year, lon_start_month, lon_end_year, lon_end_month, lon_enso_phase, lon_typhoon_search],
|
1200 |
-
outputs=[regression_plot, slopes_text, lon_regression_results]
|
1201 |
-
)
|
1202 |
|
1203 |
with gr.Tab("Tropical Cyclone Path Animation"):
|
1204 |
with gr.Row():
|
1205 |
year_dropdown = gr.Dropdown(label="Year", choices=[str(y) for y in range(1950, 2025)], value="2000")
|
1206 |
-
basin_dropdown = gr.Dropdown(
|
1207 |
-
label="Basin",
|
1208 |
-
choices=[
|
1209 |
-
"All Basins",
|
1210 |
-
"NA - North Atlantic",
|
1211 |
-
"EP - Eastern North Pacific",
|
1212 |
-
"WP - Western North Pacific",
|
1213 |
-
"NI - North Indian",
|
1214 |
-
"SI - South Indian",
|
1215 |
-
"SP - Southern Pacific",
|
1216 |
-
"SA - South Atlantic"
|
1217 |
-
],
|
1218 |
-
value="WP - Western North Pacific"
|
1219 |
-
)
|
1220 |
-
|
1221 |
with gr.Row():
|
1222 |
typhoon_dropdown = gr.Dropdown(label="Tropical Cyclone")
|
1223 |
standard_dropdown = gr.Dropdown(label="Classification Standard", choices=['atlantic', 'taiwan'], value='atlantic')
|
1224 |
-
|
1225 |
animate_btn = gr.Button("Generate Animation")
|
1226 |
-
|
1227 |
-
# Use format="mp4" to indicate we expect MP4 video, and no source parameter to avoid upload/webcam UI
|
1228 |
path_video = gr.Video(label="Tropical Cyclone Path Animation", format="mp4", interactive=False, elem_id="path_video")
|
1229 |
-
|
1230 |
animation_info = gr.Markdown("""
|
1231 |
### Animation Instructions
|
1232 |
-
1. Select a year and basin from the dropdowns
|
1233 |
-
2. Choose a tropical cyclone from the populated list
|
1234 |
-
3. Select a classification standard (Atlantic or Taiwan)
|
1235 |
-
4. Click "Generate Animation"
|
1236 |
-
5. The animation
|
1237 |
-
- Tropical cyclone track growing over time with colored markers based on intensity
|
1238 |
-
- Wind radius circles (if data available) for 34kt (blue), 50kt (orange), and 64kt (red)
|
1239 |
-
- Date/time on the bottom left
|
1240 |
-
- Details sidebar showing name, date, wind speed, pressure, category, and wind radii
|
1241 |
-
- Color legend for storm categories and wind radii
|
1242 |
-
|
1243 |
-
Note: Wind radius data may not be available for all storms or all observation times.
|
1244 |
-
Different agencies use different wind speed averaging periods: USA (1-min), JTWC (1-min), JMA (10-min), IMD (3-min).
|
1245 |
""")
|
1246 |
-
|
1247 |
-
|
1248 |
-
|
1249 |
-
|
1250 |
-
animate_btn.click(
|
1251 |
-
fn=simplified_track_video,
|
1252 |
-
inputs=[year_dropdown, basin_dropdown, typhoon_dropdown, standard_dropdown],
|
1253 |
-
outputs=path_video
|
1254 |
-
)
|
1255 |
|
1256 |
with gr.Tab("TSNE Cluster"):
|
1257 |
with gr.Row():
|
@@ -1266,11 +738,6 @@ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
|
1266 |
routes_plot = gr.Plot(label="Typhoon Routes with Mean Routes")
|
1267 |
stats_plot = gr.Plot(label="Cluster Statistics")
|
1268 |
cluster_info = gr.Textbox(label="Cluster Information", lines=10)
|
1269 |
-
|
1270 |
-
tsne_analyze_btn.click(
|
1271 |
-
fn=update_route_clusters,
|
1272 |
-
inputs=[tsne_start_year, tsne_start_month, tsne_end_year, tsne_end_month, tsne_enso_phase, tsne_season],
|
1273 |
-
outputs=[tsne_plot, routes_plot, stats_plot, cluster_info]
|
1274 |
-
)
|
1275 |
|
1276 |
-
demo.launch(share=True)
|
|
|
9 |
import plotly.graph_objects as go
|
10 |
import plotly.express as px
|
11 |
from plotly.subplots import make_subplots
|
|
|
12 |
import pickle
|
13 |
import requests
|
14 |
import os
|
|
|
19 |
import tempfile
|
20 |
import csv
|
21 |
from collections import defaultdict
|
|
|
22 |
from sklearn.manifold import TSNE
|
23 |
from sklearn.cluster import DBSCAN
|
24 |
from scipy.interpolate import interp1d
|
25 |
|
26 |
+
# Import tropycal for IBTrACS processing (for typhoon option updates)
|
27 |
+
import tropycal.tracks as tracks
|
28 |
+
|
29 |
+
# ------------------ Argument Parsing ------------------
|
30 |
parser = argparse.ArgumentParser(description='Typhoon Analysis Dashboard')
|
31 |
parser.add_argument('--data_path', type=str, default=os.getcwd(), help='Path to the data directory')
|
32 |
args = parser.parse_args()
|
33 |
DATA_PATH = args.data_path
|
34 |
|
35 |
+
# ------------------ File Paths ------------------
|
36 |
ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
|
37 |
TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
|
38 |
|
39 |
+
# ------------------ IBTrACS Files (for typhoon options) ------------------
|
40 |
BASIN_FILES = {
|
41 |
'EP': 'ibtracs.EP.list.v04r01.csv',
|
42 |
'NA': 'ibtracs.NA.list.v04r01.csv',
|
43 |
'WP': 'ibtracs.WP.list.v04r01.csv'
|
44 |
}
|
45 |
IBTRACS_BASE_URL = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/'
|
|
|
46 |
|
47 |
CACHE_FILE = 'ibtracs_cache.pkl'
|
48 |
CACHE_EXPIRY_DAYS = 0 # Force refresh for testing
|
49 |
|
50 |
+
# ------------------ Color Maps and Standards ------------------
|
51 |
color_map = {
|
52 |
'C5 Super Typhoon': 'rgb(255, 0, 0)',
|
53 |
'C4 Very Strong Typhoon': 'rgb(255, 165, 0)',
|
|
|
57 |
'Tropical Storm': 'rgb(0, 0, 255)',
|
58 |
'Tropical Depression': 'rgb(128, 128, 128)'
|
59 |
}
|
|
|
|
|
60 |
atlantic_standard = {
|
61 |
'C5 Super Typhoon': {'wind_speed': 137, 'color': 'Red', 'hex': '#FF0000'},
|
62 |
'C4 Very Strong Typhoon': {'wind_speed': 113, 'color': 'Orange', 'hex': '#FFA500'},
|
|
|
66 |
'Tropical Storm': {'wind_speed': 34, 'color': 'Blue', 'hex': '#0000FF'},
|
67 |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'}
|
68 |
}
|
|
|
69 |
taiwan_standard = {
|
70 |
'Strong Typhoon': {'wind_speed': 51.0, 'color': 'Red', 'hex': '#FF0000'},
|
71 |
'Medium Typhoon': {'wind_speed': 33.7, 'color': 'Orange', 'hex': '#FFA500'},
|
|
|
73 |
'Tropical Depression': {'wind_speed': 0, 'color': 'Gray', 'hex': '#808080'}
|
74 |
}
|
75 |
|
76 |
+
# ------------------ Season and Regions ------------------
|
77 |
season_months = {
|
78 |
'all': list(range(1, 13)),
|
79 |
'summer': [6, 7, 8],
|
80 |
'winter': [12, 1, 2]
|
81 |
}
|
|
|
|
|
82 |
regions = {
|
83 |
"Taiwan Land": {"lat_min": 21.8, "lat_max": 25.3, "lon_min": 119.5, "lon_max": 122.1},
|
84 |
"Taiwan Sea": {"lat_min": 19, "lat_max": 28, "lon_min": 117, "lon_max": 125},
|
|
|
88 |
"Philippines": {"lat_min": 5, "lat_max": 21, "lon_min": 115, "lon_max": 130}
|
89 |
}
|
90 |
|
91 |
+
# ------------------ ONI and Typhoon Data Functions ------------------
|
92 |
def download_oni_file(url, filename):
|
93 |
response = requests.get(url)
|
94 |
response.raise_for_status()
|
|
|
123 |
input_file = os.path.join(DATA_PATH, "oni.ascii.txt")
|
124 |
output_file = ONI_DATA_PATH
|
125 |
if download_oni_file(url, temp_file):
|
126 |
+
if not os.path.exists(input_file) or not os.path.exists(output_file):
|
127 |
os.replace(temp_file, input_file)
|
128 |
convert_oni_ascii_to_csv(input_file, output_file)
|
129 |
else:
|
130 |
os.remove(temp_file)
|
131 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
132 |
def load_data(oni_path, typhoon_path):
|
133 |
oni_data = pd.read_csv(oni_path)
|
134 |
typhoon_data = pd.read_csv(typhoon_path, low_memory=False)
|
|
|
150 |
typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
|
151 |
typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce')
|
152 |
typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce')
|
153 |
+
print(f"Unique basins in typhoon_data: {typhoon_data['SID'].str[:2].unique()}")
|
|
|
|
|
154 |
typhoon_max = typhoon_data.groupby('SID').agg({
|
155 |
'USA_WIND': 'max', 'USA_PRES': 'min', 'ISO_TIME': 'first', 'SEASON': 'first', 'NAME': 'first',
|
156 |
'LAT': 'first', 'LON': 'first'
|
|
|
164 |
return pd.merge(typhoon_max, oni_long, on=['Year', 'Month'])
|
165 |
|
166 |
def categorize_typhoon(wind_speed):
|
167 |
+
if wind_speed >= 137:
|
|
|
168 |
return 'C5 Super Typhoon'
|
169 |
+
elif wind_speed >= 113:
|
170 |
return 'C4 Very Strong Typhoon'
|
171 |
+
elif wind_speed >= 96:
|
172 |
return 'C3 Strong Typhoon'
|
173 |
+
elif wind_speed >= 83:
|
174 |
return 'C2 Typhoon'
|
175 |
+
elif wind_speed >= 64:
|
176 |
return 'C1 Typhoon'
|
177 |
+
elif wind_speed >= 34:
|
178 |
return 'Tropical Storm'
|
179 |
else:
|
180 |
return 'Tropical Depression'
|
|
|
189 |
else:
|
190 |
return 'Neutral'
|
191 |
|
192 |
+
# ------------------ IBTrACS Data Loading (for typhoon options) ------------------
|
193 |
+
def load_ibtracs_data():
|
194 |
+
ibtracs_data = {}
|
195 |
+
for basin, filename in BASIN_FILES.items():
|
196 |
+
local_path = os.path.join(DATA_PATH, filename)
|
197 |
+
if not os.path.exists(local_path):
|
198 |
+
print(f"Downloading {basin} basin file...")
|
199 |
+
response = requests.get(IBTRACS_BASE_URL + filename)
|
200 |
+
response.raise_for_status()
|
201 |
+
with open(local_path, 'wb') as f:
|
202 |
+
f.write(response.content)
|
203 |
+
print(f"Downloaded {basin} basin file.")
|
204 |
+
try:
|
205 |
+
print(f"--> Starting to read in IBTrACS data for basin {basin}")
|
206 |
+
ds = tracks.TrackDataset(source='ibtracs', ibtracs_url=local_path)
|
207 |
+
print(f"--> Completed reading in IBTrACS data for basin {basin}")
|
208 |
+
ibtracs_data[basin] = ds
|
209 |
+
except ValueError as e:
|
210 |
+
print(f"Warning: Skipping basin {basin} due to error: {e}")
|
211 |
+
ibtracs_data[basin] = None
|
212 |
+
return ibtracs_data
|
213 |
+
|
214 |
ibtracs = load_ibtracs_data()
|
215 |
+
|
216 |
+
# ------------------ Load and Process Data ------------------
|
217 |
+
update_oni_data()
|
218 |
oni_data, typhoon_data = load_data(ONI_DATA_PATH, TYPHOON_DATA_PATH)
|
219 |
oni_long = process_oni_data(oni_data)
|
220 |
typhoon_max = process_typhoon_data(typhoon_data)
|
221 |
merged_data = merge_data(oni_long, typhoon_max)
|
222 |
|
223 |
+
# ------------------ Visualization Functions ------------------
|
224 |
def generate_typhoon_tracks(filtered_data, typhoon_search):
|
225 |
fig = go.Figure()
|
226 |
for sid in filtered_data['SID'].unique():
|
227 |
storm_data = filtered_data[filtered_data['SID'] == sid]
|
228 |
+
phase = storm_data['ENSO_Phase'].iloc[0]
|
229 |
+
color = {'El Nino': 'red', 'La Nina': 'blue', 'Neutral': 'green'}.get(phase, 'black')
|
230 |
fig.add_trace(go.Scattergeo(
|
231 |
lon=storm_data['LON'], lat=storm_data['LAT'], mode='lines',
|
232 |
name=storm_data['NAME'].iloc[0], line=dict(width=2, color=color)
|
|
|
294 |
def generate_main_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
295 |
start_date = datetime(start_year, start_month, 1)
|
296 |
end_date = datetime(end_year, end_month, 28)
|
297 |
+
filtered_data = merged_data[(merged_data['ISO_TIME'] >= start_date) & (merged_data['ISO_TIME'] <= end_date)].copy()
|
|
|
|
|
|
|
298 |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
299 |
if enso_phase != 'all':
|
300 |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
|
|
301 |
tracks_fig = generate_typhoon_tracks(filtered_data, typhoon_search)
|
302 |
wind_scatter = generate_wind_oni_scatter(filtered_data, typhoon_search)
|
303 |
pressure_scatter = generate_pressure_oni_scatter(filtered_data, typhoon_search)
|
304 |
regression_fig, slopes_text = generate_regression_analysis(filtered_data)
|
|
|
305 |
return tracks_fig, wind_scatter, pressure_scatter, regression_fig, slopes_text
|
306 |
|
|
|
307 |
def get_full_tracks(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
308 |
start_date = datetime(start_year, start_month, 1)
|
309 |
end_date = datetime(end_year, end_month, 28)
|
310 |
+
filtered_data = merged_data[(merged_data['ISO_TIME'] >= start_date) & (merged_data['ISO_TIME'] <= end_date)].copy()
|
|
|
|
|
|
|
311 |
filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
|
312 |
if enso_phase != 'all':
|
313 |
filtered_data = filtered_data[filtered_data['ENSO_Phase'] == enso_phase.capitalize()]
|
|
|
316 |
fig = go.Figure()
|
317 |
for sid in unique_storms:
|
318 |
storm_data = typhoon_data[typhoon_data['SID'] == sid]
|
319 |
+
name = storm_data['NAME'].iloc[0] if pd.notnull(storm_data['NAME'].iloc[0]) else "Unnamed"
|
320 |
storm_oni = filtered_data[filtered_data['SID'] == sid]['ONI'].iloc[0]
|
321 |
color = 'red' if storm_oni >= 0.5 else ('blue' if storm_oni <= -0.5 else 'green')
|
322 |
fig.add_trace(go.Scattergeo(
|
|
|
361 |
)
|
362 |
return fig, f"Total typhoons displayed: {count}"
|
363 |
|
|
|
364 |
def get_wind_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search):
|
365 |
results = generate_main_analysis(start_year, start_month, end_year, end_month, enso_phase, typhoon_search)
|
366 |
regression = perform_wind_regression(start_year, start_month, end_year, end_month)
|
|
|
376 |
regression = perform_longitude_regression(start_year, start_month, end_year, end_month)
|
377 |
return results[3], results[4], regression
|
378 |
|
|
|
379 |
def categorize_typhoon_by_standard(wind_speed, standard):
|
380 |
if standard == 'taiwan':
|
381 |
wind_speed_ms = wind_speed * 0.514444
|
|
|
401 |
return 'Tropical Storm', atlantic_standard['Tropical Storm']['hex']
|
402 |
return 'Tropical Depression', atlantic_standard['Tropical Depression']['hex']
|
403 |
|
404 |
+
# ------------------ Animation Functions Using Processed CSV ------------------
|
405 |
+
def generate_track_video_from_csv(year, storm_id, standard):
|
406 |
+
# Filter processed CSV data for the storm ID
|
407 |
+
storm_df = typhoon_data[typhoon_data['SID'] == storm_id].copy()
|
408 |
+
if storm_df.empty:
|
409 |
+
print("No data found for storm:", storm_id)
|
410 |
return None
|
411 |
+
storm_df = storm_df.sort_values('ISO_TIME')
|
412 |
+
lats = storm_df['LAT'].astype(float).values
|
413 |
+
lons = storm_df['LON'].astype(float).values
|
414 |
+
times = pd.to_datetime(storm_df['ISO_TIME']).values
|
415 |
+
if 'USA_WIND' in storm_df.columns:
|
416 |
+
winds = pd.to_numeric(storm_df['USA_WIND'], errors='coerce').values
|
417 |
+
else:
|
418 |
+
winds = np.full(len(lats), np.nan)
|
419 |
+
storm_name = storm_df['NAME'].iloc[0]
|
420 |
+
season = storm_df['SEASON'].iloc[0]
|
421 |
+
|
422 |
+
# Set up map boundaries
|
423 |
+
min_lat, max_lat = np.min(lats), np.max(lats)
|
424 |
+
min_lon, max_lon = np.min(lons), np.max(lons)
|
425 |
lat_padding = max((max_lat - min_lat) * 0.3, 5)
|
426 |
lon_padding = max((max_lon - min_lon) * 0.3, 5)
|
427 |
+
|
428 |
+
# Create a larger figure with custom central longitude for better regional focus
|
429 |
+
fig = plt.figure(figsize=(12, 9), dpi=100)
|
430 |
+
ax = plt.axes([0.05, 0.05, 0.60, 0.90],
|
431 |
+
projection=ccrs.PlateCarree(central_longitude=-25))
|
432 |
+
ax.set_extent([min_lon - lon_padding, max_lon + lon_padding, min_lat - lat_padding, max_lat + lat_padding],
|
433 |
+
crs=ccrs.PlateCarree())
|
434 |
+
|
435 |
+
# Add map features
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
436 |
ax.add_feature(cfeature.LAND, facecolor='lightgray')
|
437 |
ax.add_feature(cfeature.OCEAN, facecolor='lightblue')
|
438 |
ax.add_feature(cfeature.COASTLINE, edgecolor='black')
|
439 |
ax.add_feature(cfeature.BORDERS, linestyle=':', edgecolor='gray')
|
440 |
ax.gridlines(draw_labels=True, linestyle='--', color='gray', alpha=0.5)
|
441 |
+
ax.set_title(f"{year} {storm_name} - {season}", fontsize=16)
|
442 |
+
|
443 |
+
# Plot track and state marker
|
|
|
444 |
line, = ax.plot([], [], 'b-', linewidth=2, transform=ccrs.PlateCarree())
|
445 |
+
point, = ax.plot([], [], 'o', markersize=10, transform=ccrs.PlateCarree())
|
446 |
+
|
447 |
+
# Dynamic text elements
|
448 |
+
date_text = ax.text(0.02, 0.02, '', transform=ax.transAxes, fontsize=12, bbox=dict(facecolor='white', alpha=0.8))
|
449 |
+
state_text = fig.text(0.70, 0.60, '', fontsize=14, verticalalignment='top',
|
450 |
+
bbox=dict(facecolor='white', alpha=0.8, boxstyle='round,pad=0.5'))
|
451 |
+
|
452 |
+
# Persistent legend for color mapping (placed on right)
|
453 |
+
legend_elements = [plt.Line2D([0], [0], marker='o', color='w', label=f"{cat}",
|
454 |
+
markerfacecolor=details['hex'], markersize=10)
|
455 |
+
for cat, details in (atlantic_standard if standard=='atlantic' else taiwan_standard).items()]
|
456 |
+
ax.legend(handles=legend_elements, title="Storm Categories", loc='upper right', fontsize=10)
|
457 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
458 |
def init():
|
459 |
line.set_data([], [])
|
460 |
point.set_data([], [])
|
461 |
date_text.set_text('')
|
462 |
+
state_text.set_text('')
|
463 |
+
return line, point, date_text, state_text
|
|
|
|
|
|
|
|
|
464 |
|
465 |
def update(frame):
|
466 |
+
# Update the track line
|
467 |
+
line.set_data(lons[:frame+1], lats[:frame+1])
|
468 |
+
# Update the marker position using lists to avoid deprecation warning
|
469 |
+
point.set_data([lons[frame]], [lats[frame]])
|
470 |
+
wind_speed = winds[frame] if frame < len(winds) else np.nan
|
471 |
+
category, color = categorize_typhoon_by_standard(wind_speed, standard)
|
472 |
point.set_color(color)
|
473 |
+
dt_str = pd.to_datetime(times[frame]).strftime('%Y-%m-%d %H:%M')
|
474 |
+
date_text.set_text(dt_str)
|
475 |
+
# Update state information dynamically in the sidebar
|
476 |
+
state_info = (f"Name: {storm_name}\n"
|
477 |
+
f"Date: {dt_str}\n"
|
478 |
+
f"Wind: {wind_speed:.1f} kt\n"
|
479 |
+
f"Category: {category}")
|
480 |
+
state_text.set_text(state_info)
|
481 |
+
return line, point, date_text, state_text
|
482 |
+
|
483 |
+
ani = animation.FuncAnimation(fig, update, init_func=init, frames=len(times),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
484 |
interval=200, blit=True, repeat=True)
|
|
|
|
|
485 |
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.mp4')
|
486 |
writer = animation.FFMpegWriter(fps=5, bitrate=1800)
|
487 |
ani.save(temp_file.name, writer=writer)
|
488 |
plt.close(fig)
|
|
|
489 |
return temp_file.name
|
490 |
|
491 |
def simplified_track_video(year, basin, typhoon, standard):
|
492 |
if not typhoon:
|
493 |
return None
|
494 |
+
storm_id = typhoon.split('(')[-1].strip(')')
|
495 |
+
return generate_track_video_from_csv(year, storm_id, standard)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
496 |
|
497 |
+
# ------------------ Typhoon Options Update Functions ------------------
|
498 |
+
basin_to_prefix = {
|
499 |
+
"All Basins": "all",
|
500 |
+
"NA - North Atlantic": "NA",
|
501 |
+
"EP - Eastern North Pacific": "EP",
|
502 |
+
"WP - Western North Pacific": "WP"
|
503 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
504 |
|
505 |
+
def update_typhoon_options(year, basin):
|
506 |
+
try:
|
507 |
+
if basin == "All Basins":
|
508 |
+
summaries = []
|
509 |
+
for data in ibtracs.values():
|
510 |
+
if data is not None:
|
511 |
+
season_data = data.get_season(int(year))
|
512 |
+
if season_data.summary().empty:
|
513 |
+
continue
|
514 |
+
summaries.append(season_data.summary())
|
515 |
+
if len(summaries) == 0:
|
516 |
+
print("Error updating typhoon options: No storms identified for the given year and basin.")
|
517 |
+
return gr.update(choices=[], value=None)
|
518 |
+
combined_summary = pd.concat(summaries, ignore_index=True)
|
519 |
+
else:
|
520 |
+
prefix = basin_to_prefix.get(basin)
|
521 |
+
ds = ibtracs.get(prefix)
|
522 |
+
if ds is None:
|
523 |
+
print("Error updating typhoon options: Dataset not found for the given basin.")
|
524 |
+
return gr.update(choices=[], value=None)
|
525 |
+
season_data = ds.get_season(int(year))
|
526 |
+
if season_data.summary().empty:
|
527 |
+
print("Error updating typhoon options: No storms identified for the given year and basin.")
|
528 |
+
return gr.update(choices=[], value=None)
|
529 |
+
combined_summary = season_data.summary()
|
530 |
+
options = []
|
531 |
+
for i in range(len(combined_summary)):
|
532 |
+
try:
|
533 |
+
name = combined_summary['name'][i] if pd.notnull(combined_summary['name'][i]) else "Unnamed"
|
534 |
+
storm_id = combined_summary['id'][i]
|
535 |
+
options.append(f"{name} ({storm_id})")
|
536 |
+
except Exception:
|
537 |
+
continue
|
538 |
+
return gr.update(choices=options, value=options[0] if options else None)
|
539 |
+
except Exception as e:
|
540 |
+
print(f"Error updating typhoon options: {e}")
|
541 |
+
return gr.update(choices=[], value=None)
|
542 |
|
543 |
+
def update_typhoon_options_anim(year, basin):
|
544 |
+
try:
|
545 |
+
# For animation, use the processed CSV data for all storms in the given year
|
546 |
+
data = typhoon_data.copy()
|
547 |
+
data['Year'] = pd.to_datetime(data['ISO_TIME']).dt.year
|
548 |
+
season_data = data[data['Year'] == int(year)]
|
549 |
+
if season_data.empty:
|
550 |
+
print("Error updating typhoon options (animation): No storms identified for the given year.")
|
551 |
+
return gr.update(choices=[], value=None)
|
552 |
+
summary = season_data.groupby('SID').first().reset_index()
|
553 |
+
options = []
|
554 |
+
for idx, row in summary.iterrows():
|
555 |
+
name = row['NAME'] if pd.notnull(row['NAME']) else "Unnamed"
|
556 |
+
options.append(f"{name} ({row['SID']})")
|
557 |
+
return gr.update(choices=options, value=options[0] if options else None)
|
558 |
+
except Exception as e:
|
559 |
+
print(f"Error updating typhoon options (animation): {e}")
|
560 |
+
return gr.update(choices=[], value=None)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
561 |
|
562 |
+
# ------------------ TSNE Cluster Function ------------------
|
563 |
def update_route_clusters(start_year, start_month, end_year, end_month, enso_value, season):
|
564 |
+
# Use only WP storms from processed CSV for clustering.
|
565 |
+
wp_data = typhoon_data[typhoon_data['SID'].str.startswith("WP")]
|
566 |
+
if wp_data.empty:
|
567 |
+
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No West Pacific storms found."
|
568 |
+
wp_data['Year'] = pd.to_datetime(wp_data['ISO_TIME']).dt.year
|
569 |
+
wp_season = wp_data[wp_data['Year'] == int(start_year)]
|
570 |
+
if wp_season.empty:
|
571 |
+
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No storms found for the given period in WP."
|
572 |
|
573 |
all_storms_data = []
|
574 |
+
for sid, group in wp_data.groupby('SID'):
|
575 |
+
group = group.sort_values('ISO_TIME')
|
576 |
+
times = pd.to_datetime(group['ISO_TIME']).values
|
577 |
+
lats = group['LAT'].astype(float).values
|
578 |
+
lons = group['LON'].astype(float).values
|
579 |
+
if len(lons) < 2:
|
580 |
+
continue
|
581 |
+
if season != 'all':
|
582 |
+
month = pd.to_datetime(group['ISO_TIME']).iloc[0].month
|
583 |
+
if season == 'summer' and not (4 <= month <= 8):
|
584 |
+
continue
|
585 |
+
if season == 'winter' and not (9 <= month <= 12):
|
586 |
+
continue
|
587 |
+
all_storms_data.append((lons, lats, np.array(group['USA_WIND'].astype(float)), times, sid))
|
|
|
|
|
|
|
588 |
if not all_storms_data:
|
589 |
+
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid WP storms for clustering."
|
590 |
+
|
591 |
+
max_length = max(len(item[0]) for item in all_storms_data)
|
|
|
592 |
route_vectors = []
|
593 |
filtered_storms = []
|
594 |
+
for lons, lats, winds, times, sid in all_storms_data:
|
|
|
|
|
595 |
t = np.linspace(0, 1, len(lons))
|
596 |
t_new = np.linspace(0, 1, max_length)
|
597 |
try:
|
598 |
lon_i = interp1d(t, lons, kind='linear', fill_value='extrapolate')(t_new)
|
599 |
lat_i = interp1d(t, lats, kind='linear', fill_value='extrapolate')(t_new)
|
600 |
+
except Exception:
|
|
|
|
|
|
|
|
|
|
|
601 |
continue
|
602 |
route_vector = np.column_stack((lon_i, lat_i)).flatten()
|
603 |
if np.isnan(route_vector).any():
|
604 |
continue
|
605 |
route_vectors.append(route_vector)
|
606 |
+
filtered_storms.append((lon_i, lat_i, winds, times, sid))
|
|
|
|
|
|
|
607 |
route_vectors = np.array(route_vectors)
|
608 |
if len(route_vectors) == 0:
|
609 |
return go.Figure(), go.Figure(), make_subplots(rows=2, cols=1), "No valid storms after interpolation."
|
610 |
+
|
|
|
611 |
tsne = TSNE(n_components=2, random_state=42, verbose=1)
|
612 |
tsne_results = tsne.fit_transform(route_vectors)
|
613 |
+
dbscan = DBSCAN(eps=5, min_samples=3)
|
614 |
+
best_labels = dbscan.fit_predict(tsne_results)
|
|
|
|
|
|
|
615 |
unique_labels = sorted(set(best_labels) - {-1})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
616 |
fig_tsne = go.Figure()
|
617 |
+
colors = px.colors.qualitative.Safe
|
618 |
+
for i, label in enumerate(unique_labels):
|
619 |
+
indices = np.where(best_labels == label)[0]
|
|
|
|
|
|
|
|
|
620 |
fig_tsne.add_trace(go.Scatter(
|
621 |
x=tsne_results[indices, 0],
|
622 |
y=tsne_results[indices, 1],
|
623 |
mode='markers',
|
624 |
+
marker=dict(color=colors[i % len(colors)]),
|
625 |
+
name=f"Cluster {label}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
626 |
))
|
627 |
+
fig_tsne.update_layout(title="t-SNE of WP Storm Routes")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
628 |
fig_routes = go.Figure()
|
629 |
+
fig_stats = make_subplots(rows=2, cols=1)
|
630 |
+
info = "TSNE clustering complete."
|
631 |
+
return fig_tsne, fig_routes, fig_stats, info
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
632 |
|
633 |
+
# ------------------ Gradio Interface ------------------
|
634 |
with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
|
635 |
gr.Markdown("# Typhoon Analysis Dashboard")
|
636 |
|
|
|
641 |
This dashboard allows you to analyze typhoon data in relation to ENSO phases.
|
642 |
|
643 |
### Features:
|
644 |
+
- **Track Visualization**: View typhoon tracks by time period and ENSO phase (all basins available)
|
645 |
- **Wind Analysis**: Examine wind speed vs ONI relationships
|
646 |
- **Pressure Analysis**: Analyze pressure vs ONI relationships
|
647 |
- **Longitude Analysis**: Study typhoon generation longitude vs ONI
|
648 |
+
- **Path Animation**: Watch animated tropical cyclone paths with dynamic state display and a legend (using processed CSV data)
|
649 |
+
- **TSNE Cluster**: Perform t-SNE clustering on WP storm routes with mean routes and region analysis
|
650 |
|
651 |
Select a tab above to begin your analysis.
|
652 |
""")
|
|
|
662 |
analyze_btn = gr.Button("Generate Tracks")
|
663 |
tracks_plot = gr.Plot(label="Typhoon Tracks", elem_id="tracks_plot")
|
664 |
typhoon_count = gr.Textbox(label="Number of Typhoons Displayed")
|
665 |
+
analyze_btn.click(fn=get_full_tracks, inputs=[start_year, start_month, end_year, end_month, enso_phase, typhoon_search], outputs=[tracks_plot, typhoon_count])
|
|
|
|
|
|
|
|
|
|
|
666 |
|
667 |
with gr.Tab("Wind Analysis"):
|
668 |
with gr.Row():
|
|
|
675 |
wind_analyze_btn = gr.Button("Generate Wind Analysis")
|
676 |
wind_scatter = gr.Plot(label="Wind Speed vs ONI")
|
677 |
wind_regression_results = gr.Textbox(label="Wind Regression Results")
|
678 |
+
wind_analyze_btn.click(fn=get_wind_analysis, inputs=[wind_start_year, wind_start_month, wind_end_year, wind_end_month, wind_enso_phase, wind_typhoon_search], outputs=[wind_scatter, wind_regression_results])
|
|
|
|
|
|
|
|
|
|
|
679 |
|
680 |
with gr.Tab("Pressure Analysis"):
|
681 |
with gr.Row():
|
|
|
688 |
pressure_analyze_btn = gr.Button("Generate Pressure Analysis")
|
689 |
pressure_scatter = gr.Plot(label="Pressure vs ONI")
|
690 |
pressure_regression_results = gr.Textbox(label="Pressure Regression Results")
|
691 |
+
pressure_analyze_btn.click(fn=get_pressure_analysis, inputs=[pressure_start_year, pressure_start_month, pressure_end_year, pressure_end_month, pressure_enso_phase, pressure_typhoon_search], outputs=[pressure_scatter, pressure_regression_results])
|
|
|
|
|
|
|
|
|
|
|
692 |
|
693 |
with gr.Tab("Longitude Analysis"):
|
694 |
with gr.Row():
|
|
|
702 |
regression_plot = gr.Plot(label="Longitude vs ONI")
|
703 |
slopes_text = gr.Textbox(label="Regression Slopes")
|
704 |
lon_regression_results = gr.Textbox(label="Longitude Regression Results")
|
705 |
+
lon_analyze_btn.click(fn=get_longitude_analysis, inputs=[lon_start_year, lon_start_month, lon_end_year, lon_end_month, lon_enso_phase, lon_typhoon_search], outputs=[regression_plot, slopes_text, lon_regression_results])
|
|
|
|
|
|
|
|
|
|
|
706 |
|
707 |
with gr.Tab("Tropical Cyclone Path Animation"):
|
708 |
with gr.Row():
|
709 |
year_dropdown = gr.Dropdown(label="Year", choices=[str(y) for y in range(1950, 2025)], value="2000")
|
710 |
+
basin_dropdown = gr.Dropdown(label="Basin", choices=["NA - North Atlantic", "EP - Eastern North Pacific", "WP - Western North Pacific", "All Basins"], value="NA - North Atlantic")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
711 |
with gr.Row():
|
712 |
typhoon_dropdown = gr.Dropdown(label="Tropical Cyclone")
|
713 |
standard_dropdown = gr.Dropdown(label="Classification Standard", choices=['atlantic', 'taiwan'], value='atlantic')
|
|
|
714 |
animate_btn = gr.Button("Generate Animation")
|
|
|
|
|
715 |
path_video = gr.Video(label="Tropical Cyclone Path Animation", format="mp4", interactive=False, elem_id="path_video")
|
|
|
716 |
animation_info = gr.Markdown("""
|
717 |
### Animation Instructions
|
718 |
+
1. Select a year and basin from the dropdowns. (This animation uses the processed CSV data.)
|
719 |
+
2. Choose a tropical cyclone from the populated list.
|
720 |
+
3. Select a classification standard (Atlantic or Taiwan).
|
721 |
+
4. Click "Generate Animation".
|
722 |
+
5. The animation displays the storm track along with a dynamic sidebar that updates the state (name, date, wind, category) and includes a persistent legend.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
723 |
""")
|
724 |
+
year_dropdown.change(fn=update_typhoon_options_anim, inputs=[year_dropdown, basin_dropdown], outputs=typhoon_dropdown)
|
725 |
+
basin_dropdown.change(fn=update_typhoon_options_anim, inputs=[year_dropdown, basin_dropdown], outputs=typhoon_dropdown)
|
726 |
+
animate_btn.click(fn=simplified_track_video, inputs=[year_dropdown, basin_dropdown, typhoon_dropdown, standard_dropdown], outputs=path_video)
|
|
|
|
|
|
|
|
|
|
|
|
|
727 |
|
728 |
with gr.Tab("TSNE Cluster"):
|
729 |
with gr.Row():
|
|
|
738 |
routes_plot = gr.Plot(label="Typhoon Routes with Mean Routes")
|
739 |
stats_plot = gr.Plot(label="Cluster Statistics")
|
740 |
cluster_info = gr.Textbox(label="Cluster Information", lines=10)
|
741 |
+
tsne_analyze_btn.click(fn=update_route_clusters, inputs=[tsne_start_year, tsne_start_month, tsne_end_year, tsne_end_month, tsne_enso_phase, tsne_season], outputs=[tsne_plot, routes_plot, stats_plot, cluster_info])
|
|
|
|
|
|
|
|
|
|
|
742 |
|
743 |
+
demo.launch(share=True)
|