euler314 commited on
Commit
eb8c873
·
verified ·
1 Parent(s): a3d5eda

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1156 -881
app.py CHANGED
@@ -1,7 +1,6 @@
1
  import gradio as gr
2
  import plotly.graph_objects as go
3
  import plotly.express as px
4
- from plotly.subplots import make_subplots
5
  import pickle
6
  import tropycal.tracks as tracks
7
  import pandas as pd
@@ -10,15 +9,19 @@ import cachetools
10
  import functools
11
  import hashlib
12
  import os
 
13
  from datetime import datetime, timedelta
14
- from datetime import date
15
  from scipy import stats
16
  from scipy.optimize import minimize, curve_fit
17
  from sklearn.linear_model import LinearRegression
18
  from sklearn.cluster import KMeans
19
  from scipy.interpolate import interp1d
20
  from fractions import Fraction
 
 
21
  import statsmodels.api as sm
 
22
  import time
23
  import threading
24
  import requests
@@ -28,618 +31,707 @@ import csv
28
  from collections import defaultdict
29
  import shutil
30
  import filecmp
31
- import warnings
32
- warnings.filterwarnings('ignore')
33
 
34
- # Constants
35
- DATA_PATH = os.getcwd()
 
 
 
 
 
 
36
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
37
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
38
- LOCAL_iBtrace_PATH = os.path.join(DATA_PATH, 'ibtracs.WP.list.v04r00.csv')
39
- iBtrace_uri = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r00/access/csv/ibtracs.WP.list.v04r00.csv'
 
40
  CACHE_FILE = 'ibtracs_cache.pkl'
41
  CACHE_EXPIRY_DAYS = 1
 
42
 
43
- # Color mappings
44
- COLOR_MAP = {
45
- 'C5 Super Typhoon': 'rgb(255, 0, 0)',
46
- 'C4 Very Strong Typhoon': 'rgb(255, 63, 0)',
47
- 'C3 Strong Typhoon': 'rgb(255, 127, 0)',
48
- 'C2 Typhoon': 'rgb(255, 191, 0)',
49
- 'C1 Typhoon': 'rgb(255, 255, 0)',
50
- 'Tropical Storm': 'rgb(0, 255, 255)',
51
- 'Tropical Depression': 'rgb(173, 216, 230)'
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  }
53
 
54
- class TyphoonAnalyzer:
55
- def __init__(self):
56
- self.last_oni_update = None
57
- self.ensure_data_files_exist()
58
- self.load_initial_data()
59
 
60
- def ensure_data_files_exist(self):
61
- """Ensure all required data files exist before loading"""
62
- print("Checking and downloading required data files...")
63
 
64
- # Create data directory if it doesn't exist
65
- os.makedirs(DATA_PATH, exist_ok=True)
66
 
67
- # Download ONI data if it doesn't exist
68
- if not os.path.exists(ONI_DATA_PATH):
69
- print("Downloading ONI data...")
70
- url = "https://www.cpc.ncep.noaa.gov/data/indices/oni.ascii.txt"
71
- temp_file = os.path.join(DATA_PATH, "temp_oni.ascii.txt")
72
- try:
73
- response = requests.get(url)
74
- response.raise_for_status()
75
- with open(temp_file, 'wb') as f:
76
- f.write(response.content)
77
- self.convert_oni_ascii_to_csv(temp_file, ONI_DATA_PATH)
78
- print("ONI data downloaded and converted successfully")
79
- except Exception as e:
80
- print(f"Error downloading ONI data: {e}")
81
- raise
82
- finally:
83
- if os.path.exists(temp_file):
84
- os.remove(temp_file)
85
-
86
- # Download IBTrACS data if it doesn't exist
87
- if not os.path.exists(LOCAL_iBtrace_PATH):
88
- print("Downloading IBTrACS data...")
89
- try:
90
- response = requests.get(iBtrace_uri)
91
- response.raise_for_status()
92
- with open(LOCAL_iBtrace_PATH, 'w') as f:
93
- f.write(response.text)
94
- print("IBTrACS data downloaded successfully")
95
- except Exception as e:
96
- print(f"Error downloading IBTrACS data: {e}")
97
- raise
98
-
99
- # Create processed typhoon data if it doesn't exist
100
- if not os.path.exists(TYPHOON_DATA_PATH):
101
- print("Processing typhoon data...")
102
- try:
103
- self.convert_typhoondata(LOCAL_iBtrace_PATH, TYPHOON_DATA_PATH)
104
- print("Typhoon data processed successfully")
105
- except Exception as e:
106
- print(f"Error processing typhoon data: {e}")
107
- raise
108
-
109
- print("All required data files are ready")
110
-
111
- def load_initial_data(self):
112
- """Initialize all required data"""
113
- print("Loading initial data...")
114
- self.update_oni_data()
115
- self.oni_df = self.fetch_oni_data_from_csv()
116
- self.ibtracs = self.load_ibtracs_data()
117
- self.update_typhoon_data()
118
- self.oni_data, self.typhoon_data = self.load_data()
119
- self.oni_long = self.process_oni_data(self.oni_data)
120
- self.typhoon_max = self.process_typhoon_data(self.typhoon_data)
121
- self.merged_data = self.merge_data()
122
- print("Initial data loading complete")
123
-
124
- def convert_typhoondata(self, input_file, output_file):
125
- """Convert IBTrACS data to processed format"""
126
- print(f"Converting typhoon data from {input_file} to {output_file}")
127
- with open(input_file, 'r') as infile:
128
- # Skip the header lines
129
- next(infile)
130
- next(infile)
131
-
132
- reader = csv.reader(infile)
133
- sid_data = defaultdict(list)
134
-
135
- for row in reader:
136
- if not row: # Skip blank lines
137
- continue
138
-
139
- sid = row[0]
140
- iso_time = row[6]
141
- sid_data[sid].append((row, iso_time))
142
-
143
- with open(output_file, 'w', newline='') as outfile:
144
- fieldnames = ['SID', 'ISO_TIME', 'LAT', 'LON', 'SEASON', 'NAME',
145
- 'WMO_WIND', 'WMO_PRES', 'USA_WIND', 'USA_PRES',
146
- 'START_DATE', 'END_DATE']
147
- writer = csv.DictWriter(outfile, fieldnames=fieldnames)
148
- writer.writeheader()
149
 
150
- for sid, data in sid_data.items():
151
- start_date = min(data, key=lambda x: x[1])[1]
152
- end_date = max(data, key=lambda x: x[1])[1]
153
-
154
- for row, iso_time in data:
155
- writer.writerow({
156
- 'SID': row[0],
157
- 'ISO_TIME': iso_time,
158
- 'LAT': row[8],
159
- 'LON': row[9],
160
- 'SEASON': row[1],
161
- 'NAME': row[5],
162
- 'WMO_WIND': row[10].strip() or ' ',
163
- 'WMO_PRES': row[11].strip() or ' ',
164
- 'USA_WIND': row[23].strip() or ' ',
165
- 'USA_PRES': row[24].strip() or ' ',
166
- 'START_DATE': start_date,
167
- 'END_DATE': end_date
168
- })
169
-
170
- def fetch_oni_data_from_csv(self):
171
- """Load ONI data from CSV"""
172
- df = pd.read_csv(ONI_DATA_PATH)
173
- df = df.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
174
-
175
- # Convert month numbers to month names
176
- month_map = {
177
- '01': 'Jan', '02': 'Feb', '03': 'Mar', '04': 'Apr',
178
- '05': 'May', '06': 'Jun', '07': 'Jul', '08': 'Aug',
179
- '09': 'Sep', '10': 'Oct', '11': 'Nov', '12': 'Dec'
180
- }
181
- df['Month'] = df['Month'].map(month_map)
182
-
183
- # Now create the date
184
- df['Date'] = pd.to_datetime(df['Year'].astype(str) + df['Month'], format='%Y%b')
185
- return df.set_index('Date')
186
 
187
- def update_oni_data(self):
188
- """Update ONI data from NOAA"""
189
- if not self._should_update_oni():
190
- return
191
 
192
- url = "https://www.cpc.ncep.noaa.gov/data/indices/oni.ascii.txt"
193
- with tempfile.NamedTemporaryFile(delete=False) as temp_file:
194
- try:
195
- response = requests.get(url)
196
- response.raise_for_status()
197
- temp_file.write(response.content)
198
- self.convert_oni_ascii_to_csv(temp_file.name, ONI_DATA_PATH)
199
- self.last_oni_update = date.today()
200
- except Exception as e:
201
- print(f"Error updating ONI data: {e}")
202
- finally:
203
- if os.path.exists(temp_file.name):
204
- os.remove(temp_file.name)
205
-
206
- def _should_update_oni(self):
207
- """Check if ONI data should be updated"""
208
- today = datetime.now()
209
- return (today.day in [1, 15] or
210
- today.day == (today.replace(day=1, month=today.month%12+1) - timedelta(days=1)).day)
211
-
212
- def convert_oni_ascii_to_csv(self, input_file, output_file):
213
- """Convert ONI ASCII data to CSV format"""
214
- data = defaultdict(lambda: [''] * 12)
215
- season_to_month = {
216
- 'DJF': 12, 'JFM': 1, 'FMA': 2, 'MAM': 3, 'AMJ': 4, 'MJJ': 5,
217
- 'JJA': 6, 'JAS': 7, 'ASO': 8, 'SON': 9, 'OND': 10, 'NDJ': 11
218
- }
219
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
220
  with open(input_file, 'r') as f:
221
- next(f) # Skip header
222
- for line in f:
 
 
 
 
 
 
223
  parts = line.split()
224
  if len(parts) >= 4:
225
- season, year, anom = parts[0], parts[1], parts[-1]
 
 
226
  if season in season_to_month:
227
  month = season_to_month[season]
 
228
  if season == 'DJF':
229
  year = str(int(year) - 1)
 
230
  data[year][month-1] = anom
 
 
 
 
 
 
 
 
 
231
 
 
 
232
  with open(output_file, 'w', newline='') as f:
233
  writer = csv.writer(f)
234
- writer.writerow(['Year'] + [f"{m:02d}" for m in range(1, 13)])
 
235
  for year in sorted(data.keys()):
236
- writer.writerow([year] + data[year])
 
 
 
 
 
 
237
 
238
- def load_ibtracs_data(self):
239
- """Load IBTrACS data with caching"""
240
- if os.path.exists(CACHE_FILE):
241
- cache_time = datetime.fromtimestamp(os.path.getmtime(CACHE_FILE))
242
- if datetime.now() - cache_time < timedelta(days=CACHE_EXPIRY_DAYS):
243
- with open(CACHE_FILE, 'rb') as f:
244
- return pickle.load(f)
245
 
246
- if os.path.exists(LOCAL_iBtrace_PATH):
247
- ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs',
248
- ibtracs_url=LOCAL_iBtrace_PATH)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
249
  else:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
250
  response = requests.get(iBtrace_uri)
251
  response.raise_for_status()
252
- with open(LOCAL_iBtrace_PATH, 'w') as f:
253
- f.write(response.text)
254
- ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs',
255
- ibtracs_url=LOCAL_iBtrace_PATH)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
256
 
257
- with open(CACHE_FILE, 'wb') as f:
258
- pickle.dump(ibtracs, f)
259
- return ibtracs
 
260
 
261
- def update_typhoon_data(self):
262
- """Update typhoon data from IBTrACS"""
263
- try:
264
- response = requests.head(iBtrace_uri)
265
- remote_modified = datetime.strptime(response.headers['Last-Modified'],
266
- '%a, %d %b %Y %H:%M:%S GMT')
267
- local_modified = (datetime.fromtimestamp(os.path.getmtime(LOCAL_iBtrace_PATH))
268
- if os.path.exists(LOCAL_iBtrace_PATH) else datetime.min)
269
-
270
- if remote_modified > local_modified:
271
- response = requests.get(iBtrace_uri)
272
- response.raise_for_status()
273
- with open(LOCAL_iBtrace_PATH, 'w') as f:
274
- f.write(response.text)
275
- print("Typhoon data updated successfully")
276
- except Exception as e:
277
- print(f"Error updating typhoon data: {e}")
278
-
279
- def load_data(self):
280
- """Load ONI and typhoon data"""
281
- oni_data = pd.read_csv(ONI_DATA_PATH)
282
- typhoon_data = pd.read_csv(TYPHOON_DATA_PATH, low_memory=False)
283
- typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'])
284
- return oni_data, typhoon_data
285
-
286
- def process_oni_data(self, oni_data):
287
- """Process ONI data"""
288
- oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
289
 
290
- # Create a mapping for month numbers
291
- month_map = {
292
- '01': 1, '02': 2, '03': 3, '04': 4,
293
- '05': 5, '06': 6, '07': 7, '08': 8,
294
- '09': 9, '10': 10, '11': 11, '12': 12
295
- }
 
296
 
297
- # Convert month strings to numbers directly
298
- oni_long['Month'] = oni_long['Month'].map(month_map)
 
299
 
300
- return oni_long
301
-
302
- def process_typhoon_data(self, typhoon_data):
303
- """Process typhoon data"""
304
- typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
305
- typhoon_data['WMO_PRES'] = pd.to_numeric(typhoon_data['WMO_PRES'], errors='coerce')
306
- typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'])
307
- typhoon_data['Year'] = typhoon_data['ISO_TIME'].dt.year
308
- typhoon_data['Month'] = typhoon_data['ISO_TIME'].dt.month
309
 
310
- typhoon_max = typhoon_data.groupby(['SID', 'Year', 'Month']).agg({
311
- 'USA_WIND': 'max',
312
- 'WMO_PRES': 'min',
313
- 'NAME': 'first',
314
- 'LAT': 'first',
315
- 'LON': 'first',
316
- 'ISO_TIME': 'first'
317
- }).reset_index()
318
 
319
- typhoon_max['Category'] = typhoon_max['USA_WIND'].apply(self.categorize_typhoon)
320
- return typhoon_max
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
321
 
322
- def merge_data(self):
323
- """Merge ONI and typhoon data"""
324
- return pd.merge(self.typhoon_max, self.oni_long, on=['Year', 'Month'])
 
 
 
 
 
 
 
 
 
325
 
326
- def categorize_typhoon(self, wind_speed):
327
- """Categorize typhoon based on wind speed"""
328
- if np.isnan(wind_speed):
329
- return 'Unknown'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
330
  if wind_speed >= 137:
331
- return 'C5 Super Typhoon'
332
  elif wind_speed >= 113:
333
- return 'C4 Very Strong Typhoon'
334
  elif wind_speed >= 96:
335
- return 'C3 Strong Typhoon'
336
  elif wind_speed >= 83:
337
- return 'C2 Typhoon'
338
  elif wind_speed >= 64:
339
- return 'C1 Typhoon'
340
  elif wind_speed >= 34:
341
- return 'Tropical Storm'
342
  else:
343
- return 'Tropical Depression'
344
 
345
- def analyze_typhoon(self, start_year, start_month, end_year, end_month, enso_value='all'):
346
- """Main analysis function"""
347
- start_date = datetime(start_year, start_month, 1)
348
- end_date = datetime(end_year, end_month, 28)
349
-
350
- filtered_data = self.merged_data[
351
- (self.merged_data['ISO_TIME'] >= start_date) &
352
- (self.merged_data['ISO_TIME'] <= end_date)
353
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
354
 
355
- if enso_value != 'all':
356
- filtered_data = filtered_data[
357
- (filtered_data['ONI'] >= 0.5 if enso_value == 'el_nino' else
358
- filtered_data['ONI'] <= -0.5 if enso_value == 'la_nina' else
359
- (filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5))
360
- ]
361
 
362
- return {
363
- 'tracks': self.create_tracks_plot(filtered_data),
364
- 'wind': self.create_wind_analysis(filtered_data),
365
- 'pressure': self.create_pressure_analysis(filtered_data),
366
- 'stats': self.generate_statistics(filtered_data)
367
- }
368
 
369
- def create_tracks_plot(self, data):
370
- """Create typhoon tracks visualization"""
 
 
 
 
 
371
  fig = go.Figure()
372
-
373
- fig.update_layout(
374
- title={
375
- 'text': 'Typhoon Tracks',
376
- 'y':0.95,
377
- 'x':0.5,
378
- 'xanchor': 'center',
379
- 'yanchor': 'top'
380
- },
381
- showlegend=True,
382
- legend=dict(
383
- yanchor="top",
384
- y=0.99,
385
- xanchor="left",
386
- x=0.01,
387
- bgcolor='rgba(255, 255, 255, 0.8)'
388
- ),
389
- geo=dict(
390
- projection_type='mercator',
391
- showland=True,
392
- showcoastlines=True,
393
- landcolor='rgb(243, 243, 243)',
394
- countrycolor='rgb(204, 204, 204)',
395
- coastlinecolor='rgb(214, 214, 214)',
396
- showocean=True,
397
- oceancolor='rgb(230, 250, 255)',
398
- showlakes=True,
399
- lakecolor='rgb(230, 250, 255)',
400
- lataxis=dict(range=[0, 50]),
401
- lonaxis=dict(range=[100, 180]),
402
- center=dict(lat=20, lon=140),
403
- bgcolor='rgba(255, 255, 255, 0.5)'
404
- ),
405
- paper_bgcolor='rgba(255, 255, 255, 0.5)',
406
- plot_bgcolor='rgba(255, 255, 255, 0.5)'
407
- )
408
-
409
- for category in COLOR_MAP.keys():
410
- category_data = data[data['Category'] == category]
411
- for _, storm in category_data.groupby('SID'):
412
- track_data = self.typhoon_data[self.typhoon_data['SID'] == storm['SID'].iloc[0]]
413
- track_data = track_data.sort_values('ISO_TIME')
414
-
415
- fig.add_trace(go.Scattergeo(
416
- lon=track_data['LON'],
417
- lat=track_data['LAT'],
418
- mode='lines',
419
- line=dict(
420
- width=2,
421
- color=COLOR_MAP[category]
422
- ),
423
- name=category,
424
- legendgroup=category,
425
- showlegend=True if storm.iloc[0]['SID'] == category_data.iloc[0]['SID'] else False,
426
- hovertemplate=(
427
- f"Name: {storm['NAME'].iloc[0]}<br>" +
428
- f"Category: {category}<br>" +
429
- f"Wind Speed: {storm['USA_WIND'].iloc[0]:.1f} kt<br>" +
430
- f"Pressure: {storm['WMO_PRES'].iloc[0]:.1f} hPa<br>" +
431
- f"Date: {track_data['ISO_TIME'].dt.strftime('%Y-%m-%d %H:%M').iloc[0]}<br>" +
432
- f"Lat: {track_data['LAT'].iloc[0]:.2f}°N<br>" +
433
- f"Lon: {track_data['LON'].iloc[0]:.2f}°E<br>" +
434
- "<extra></extra>"
435
- )
436
- ))
437
 
438
- return fig
439
-
440
- def create_wind_analysis(self, data):
441
- """Create wind speed analysis plot"""
442
- fig = px.scatter(data,
443
- x='ONI',
444
- y='USA_WIND',
445
- color='Category',
446
- color_discrete_map=COLOR_MAP,
447
- title='Wind Speed vs ONI Index',
448
- labels={
449
- 'ONI': 'Oceanic Niño Index',
450
- 'USA_WIND': 'Maximum Wind Speed (kt)'
451
- },
452
- hover_data=['NAME', 'ISO_TIME', 'Category']
453
- )
454
-
455
- # Add regression line
456
- x = data['ONI']
457
- y = data['USA_WIND']
458
- slope, intercept = np.polyfit(x, y, 1)
459
  fig.add_trace(
460
- go.Scatter(
461
- x=x,
462
- y=slope * x + intercept,
463
  mode='lines',
464
- name=f'Regression (slope={slope:.2f})',
465
- line=dict(color='black', dash='dash')
 
466
  )
467
  )
468
-
469
- return fig
470
 
471
- def create_pressure_analysis(self, data):
472
- """Create pressure analysis plot"""
473
- fig = px.scatter(data,
474
- x='ONI',
475
- y='WMO_PRES',
476
- color='Category',
477
- color_discrete_map=COLOR_MAP,
478
- title='Pressure vs ONI Index',
479
- labels={
480
- 'ONI': 'Oceanic Niño Index',
481
- 'WMO_PRES': 'Minimum Pressure (hPa)'
482
- },
483
- hover_data=['NAME', 'ISO_TIME', 'Category']
484
- )
485
-
486
- # Add regression line
487
- x = data['ONI']
488
- y = data['WMO_PRES']
489
- slope, intercept = np.polyfit(x, y, 1)
490
  fig.add_trace(
491
- go.Scatter(
492
- x=x,
493
- y=slope * x + intercept,
494
- mode='lines',
495
- name=f'Regression (slope={slope:.2f})',
496
- line=dict(color='black', dash='dash')
497
- )
498
- )
499
-
500
- return fig
501
-
502
- def generate_statistics(self, data):
503
- """Generate statistical summary"""
504
- stats = {
505
- 'total_typhoons': len(data['SID'].unique()),
506
- 'avg_wind': data['USA_WIND'].mean(),
507
- 'max_wind': data['USA_WIND'].max(),
508
- 'avg_pressure': data['WMO_PRES'].mean(),
509
- 'min_pressure': data['WMO_PRES'].min(),
510
- 'oni_correlation_wind': data['ONI'].corr(data['USA_WIND']),
511
- 'oni_correlation_pressure': data['ONI'].corr(data['WMO_PRES']),
512
- 'category_counts': data['Category'].value_counts().to_dict()
513
- }
514
-
515
- return f"""
516
- ### Statistical Summary
517
-
518
- - Total Typhoons: {stats['total_typhoons']}
519
- - Average Wind Speed: {stats['avg_wind']:.2f} kt
520
- - Maximum Wind Speed: {stats['max_wind']:.2f} kt
521
- - Average Pressure: {stats['avg_pressure']:.2f} hPa
522
- - Minimum Pressure: {stats['min_pressure']:.2f} hPa
523
- - ONI-Wind Speed Correlation: {stats['oni_correlation_wind']:.3f}
524
- - ONI-Pressure Correlation: {stats['oni_correlation_pressure']:.3f}
525
-
526
- ### Category Distribution
527
- {chr(10).join(f'- {cat}: {count}' for cat, count in stats['category_counts'].items())}
528
- """
529
-
530
- def analyze_clusters(self, year, n_clusters):
531
- """Analyze typhoon clusters for a specific year"""
532
- year_data = self.typhoon_data[self.typhoon_data['SEASON'] == year]
533
- if year_data.empty:
534
- return go.Figure(), "No data available for selected year"
535
-
536
- # Prepare data for clustering
537
- routes = []
538
- for _, storm in year_data.groupby('SID'):
539
- if len(storm) > 1:
540
- # Standardize route length
541
- t = np.linspace(0, 1, len(storm))
542
- t_new = np.linspace(0, 1, 100)
543
- lon_interp = interp1d(t, storm['LON'], kind='linear')(t_new)
544
- lat_interp = interp1d(t, storm['LAT'], kind='linear')(t_new)
545
- routes.append(np.column_stack((lon_interp, lat_interp)))
546
-
547
- if len(routes) < n_clusters:
548
- return go.Figure(), f"Not enough typhoons ({len(routes)}) for {n_clusters} clusters"
549
-
550
- # Perform clustering
551
- routes_array = np.array(routes)
552
- routes_reshaped = routes_array.reshape(routes_array.shape[0], -1)
553
- kmeans = KMeans(n_clusters=n_clusters, random_state=42)
554
- clusters = kmeans.fit_predict(routes_reshaped)
555
-
556
- # Create visualization
557
- fig = go.Figure()
558
-
559
- # Set layout
560
- fig.update_layout(
561
- title=f'Typhoon Route Clusters ({year})',
562
- showlegend=True,
563
- geo=dict(
564
- projection_type='mercator',
565
- showland=True,
566
- showcoastlines=True,
567
- landcolor='rgb(243, 243, 243)',
568
- countrycolor='rgb(204, 204, 204)',
569
- coastlinecolor='rgb(214, 214, 214)',
570
- showocean=True,
571
- oceancolor='rgb(230, 250, 255)',
572
- lataxis=dict(range=[0, 50]),
573
- lonaxis=dict(range=[100, 180]),
574
- center=dict(lat=20, lon=140)
575
  )
576
  )
577
-
578
- # Plot routes colored by cluster
579
- for route, cluster_id in zip(routes, clusters):
580
- fig.add_trace(go.Scattergeo(
581
- lon=route[:, 0],
582
- lat=route[:, 1],
583
- mode='lines',
584
- line=dict(
585
- width=1,
586
- color=f'hsl({cluster_id * 360/n_clusters}, 50%, 50%)'
587
- ),
588
- name=f'Cluster {cluster_id + 1}',
589
- showlegend=False
590
- ))
591
-
592
- # Plot cluster centers
593
- for i in range(n_clusters):
594
- center = kmeans.cluster_centers_[i].reshape(-1, 2)
595
- fig.add_trace(go.Scattergeo(
596
- lon=center[:, 0],
597
- lat=center[:, 1],
598
- mode='lines',
599
- name=f'Cluster {i+1} Center',
600
- line=dict(
601
- width=3,
602
- color=f'hsl({i * 360/n_clusters}, 100%, 50%)'
603
- )
604
- ))
605
-
606
- # Generate statistics text
607
- stats_text = "### Clustering Results\n\n"
608
- cluster_counts = np.bincount(clusters)
609
- for i in range(n_clusters):
610
- stats_text += f"- Cluster {i+1}: {cluster_counts[i]} typhoons\n"
611
-
612
- return fig, stats_text
613
 
614
- def get_typhoons_for_year(self, year):
615
- """Get list of typhoons for a specific year"""
616
- try:
617
- season = self.ibtracs.get_season(year)
618
- storm_summary = season.summary()
619
-
620
- typhoon_options = []
621
- for i in range(storm_summary['season_storms']):
622
- storm_id = storm_summary['id'][i]
623
- storm_name = storm_summary['name'][i]
624
- typhoon_options.append({'label': f"{storm_name} ({storm_id})", 'value': storm_id})
625
 
626
- return typhoon_options
627
- except Exception as e:
628
- print(f"Error getting typhoons for year {year}: {str(e)}")
629
- return []
630
-
631
- def create_typhoon_animation(self, year, storm_id, standard='atlantic'):
632
- """Create animated visualization of typhoon path"""
633
- if not storm_id:
634
- return go.Figure(), "Please select a typhoon"
 
 
635
 
636
- storm = self.ibtracs.get_storm(storm_id)
637
-
638
- fig = go.Figure()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
639
 
640
- # Base map setup with correct scaling
641
  fig.update_layout(
642
- title=f"{year} - {storm.name} Typhoon Path",
 
643
  geo=dict(
644
  projection_type='natural earth',
645
  showland=True,
@@ -648,9 +740,6 @@ class TyphoonAnalyzer:
648
  coastlinecolor='rgb(100, 100, 100)',
649
  showocean=True,
650
  oceancolor='rgb(230, 250, 255)',
651
- lataxis=dict(range=[0, 50]),
652
- lonaxis=dict(range=[100, 180]),
653
- center=dict(lat=20, lon=140),
654
  ),
655
  updatemenus=[{
656
  "buttons": [
@@ -677,375 +766,561 @@ class TyphoonAnalyzer:
677
  "xanchor": "right",
678
  "y": 0,
679
  "yanchor": "top"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
680
  }]
681
  )
682
 
683
- # Create animation frames
684
- frames = []
685
- for i in range(len(storm.time)):
686
- category, color = self.categorize_typhoon_by_standard(storm.vmax[i], standard)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
687
 
688
- # Get extra radius data if available
689
- radius_info = ""
690
- if hasattr(storm, 'dict'):
691
- r34_ne = storm.dict.get('USA_R34_NE', [None])[i] if 'USA_R34_NE' in storm.dict else None
692
- r34_se = storm.dict.get('USA_R34_SE', [None])[i] if 'USA_R34_SE' in storm.dict else None
693
- r34_sw = storm.dict.get('USA_R34_SW', [None])[i] if 'USA_R34_SW' in storm.dict else None
694
- r34_nw = storm.dict.get('USA_R34_NW', [None])[i] if 'USA_R34_NW' in storm.dict else None
695
- rmw = storm.dict.get('USA_RMW', [None])[i] if 'USA_RMW' in storm.dict else None
696
- eye = storm.dict.get('USA_EYE', [None])[i] if 'USA_EYE' in storm.dict else None
697
 
698
- if any([r34_ne, r34_se, r34_sw, r34_nw, rmw, eye]):
699
- radius_info = f"<br>R34: NE={r34_ne}, SE={r34_se}, SW={r34_sw}, NW={r34_nw}<br>"
700
- radius_info += f"RMW: {rmw}<br>Eye Diameter: {eye}"
701
-
702
- frame = go.Frame(
703
- data=[
704
- go.Scattergeo(
705
- lon=storm.lon[:i+1],
706
- lat=storm.lat[:i+1],
707
- mode='lines',
708
- line=dict(width=2, color='blue'),
709
- name='Path Traveled',
710
- showlegend=False,
711
- ),
712
- go.Scattergeo(
713
- lon=[storm.lon[i]],
714
- lat=[storm.lat[i]],
715
- mode='markers+text',
716
- marker=dict(size=10, color=color, symbol='star'),
717
- text=category,
718
- textposition="top center",
719
- textfont=dict(size=12, color=color),
720
- name='Current Location',
721
- hovertemplate=(
722
- f"{storm.time[i].strftime('%Y-%m-%d %H:%M')}<br>"
723
- f"Category: {category}<br>"
724
- f"Wind Speed: {storm.vmax[i]:.1f} kt<br>"
725
- f"{radius_info}"
726
- ),
727
- ),
728
- ],name=f"frame{i}"
729
- )
730
- frames.append(frame)
731
-
732
- fig.frames = frames
733
 
734
- # Add initial track and starting point
735
- fig.add_trace(
736
- go.Scattergeo(
737
- lon=storm.lon,
738
- lat=storm.lat,
739
- mode='lines',
740
- line=dict(width=2, color='gray'),
741
- name='Complete Path',
742
- showlegend=True,
743
- )
744
- )
745
-
746
- fig.add_trace(
747
- go.Scattergeo(
748
- lon=[storm.lon[0]],
749
- lat=[storm.lat[0]],
750
- mode='markers',
751
- marker=dict(size=10, color='green', symbol='star'),
752
- name='Starting Point',
753
- text=storm.time[0].strftime('%Y-%m-%d %H:%M'),
754
- hoverinfo='text+name',
755
  )
756
  )
757
-
758
- # Add slider for frame selection
759
- sliders = [{
760
- "active": 0,
761
- "yanchor": "top",
762
- "xanchor": "left",
763
- "currentvalue": {
764
- "font": {"size": 20},
765
- "prefix": "Time: ",
766
- "visible": True,
767
- "xanchor": "right"
768
- },
769
- "transition": {"duration": 100, "easing": "cubic-in-out"},
770
- "pad": {"b": 10, "t": 50},
771
- "len": 0.9,
772
- "x": 0.1,
773
- "y": 0,
774
- "steps": [
775
- {
776
- "args": [[f"frame{k}"],
777
- {"frame": {"duration": 100, "redraw": True},
778
- "mode": "immediate",
779
- "transition": {"duration": 0}}
780
- ],
781
- "label": storm.time[k].strftime('%Y-%m-%d %H:%M'),
782
- "method": "animate"
783
- }
784
- for k in range(len(storm.time))
785
- ]
786
- }]
787
 
788
- fig.update_layout(sliders=sliders)
789
-
790
- info_text = f"""
791
- ### Typhoon Information
792
- - **Name:** {storm.name}
793
- - **Start Date:** {storm.time[0].strftime('%Y-%m-%d %H:%M')}
794
- - **End Date:** {storm.time[-1].strftime('%Y-%m-%d %H:%M')}
795
- - **Duration:** {(storm.time[-1] - storm.time[0]).total_seconds() / 3600:.1f} hours
796
- - **Maximum Wind Speed:** {max(storm.vmax):.1f} kt
797
- - **Minimum Pressure:** {min(storm.mslp):.1f} hPa
798
- - **Peak Category:** {self.categorize_typhoon_by_standard(max(storm.vmax), standard)[0]}
799
- """
800
-
801
- return fig, info_text
802
-
803
- def search_typhoons(self, query):
804
- """Search for typhoons by name"""
805
- if not query:
806
- return go.Figure(), "Please enter a typhoon name to search"
807
 
808
- # Find all typhoons matching the query
809
- matching_storms = []
810
 
811
- # Limit search to last 30 years to improve performance
812
- current_year = datetime.now().year
813
- start_year = current_year - 30
 
 
 
814
 
815
- for year in range(start_year, current_year + 1):
816
- try:
817
- season = self.ibtracs.get_season(year)
818
- for storm_id in season.summary()['id']:
819
- storm = self.ibtracs.get_storm(storm_id)
820
- if query.lower() in storm.name.lower():
821
- matching_storms.append((year, storm))
822
- except Exception as e:
823
- print(f"Error searching year {year}: {str(e)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
824
  continue
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
825
 
826
- if not matching_storms:
827
- return go.Figure(), "No typhoons found matching your search"
828
 
829
- # Create visualization of all matching typhoons
830
- fig = go.Figure()
831
 
832
- fig.update_layout(
833
- title=f"Typhoons Matching: '{query}'",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
834
  geo=dict(
835
- projection_type='natural earth',
836
  showland=True,
837
  landcolor='rgb(243, 243, 243)',
838
  countrycolor='rgb(204, 204, 204)',
839
  coastlinecolor='rgb(100, 100, 100)',
840
  showocean=True,
841
  oceancolor='rgb(230, 250, 255)',
842
- lataxis=dict(range=[0, 50]),
843
- lonaxis=dict(range=[100, 180]),
844
- center=dict(lat=20, lon=140),
845
- )
 
846
  )
847
 
848
- # Plot each matching storm with a different color
849
- colors = px.colors.qualitative.Plotly
 
 
 
 
 
 
 
 
 
 
 
 
 
 
850
 
851
- for i, (year, storm) in enumerate(matching_storms):
852
- color = colors[i % len(colors)]
 
 
 
853
 
854
- fig.add_trace(go.Scattergeo(
855
- lon=storm.lon,
856
- lat=storm.lat,
857
- mode='lines',
858
- line=dict(width=3, color=color),
859
- name=f"{storm.name} ({year})",
860
- hovertemplate=(
861
- f"Name: {storm.name}<br>"
862
- f"Year: {year}<br>"
863
- f"Max Wind: {max(storm.vmax):.1f} kt<br>"
864
- f"Min Pressure: {min(storm.mslp):.1f} hPa<br>"
865
- f"Position: %{lat:.2f}°N, %{lon:.2f}°E"
866
- )
867
- ))
868
-
869
- # Add starting points
870
- for i, (year, storm) in enumerate(matching_storms):
871
- color = colors[i % len(colors)]
872
 
873
- fig.add_trace(go.Scattergeo(
874
- lon=[storm.lon[0]],
875
- lat=[storm.lat[0]],
876
- mode='markers',
877
- marker=dict(size=10, color=color, symbol='circle'),
878
- name=f"Start: {storm.name} ({year})",
879
- showlegend=False,
880
- hoverinfo='name'
881
- ))
882
-
883
- # Create information text
884
- info_text = f"### Found {len(matching_storms)} typhoons matching '{query}':\n\n"
885
-
886
- for year, storm in matching_storms:
887
- info_text += f"- **{storm.name} ({year})**\n"
888
- info_text += f" - Time: {storm.time[0].strftime('%Y-%m-%d')} to {storm.time[-1].strftime('%Y-%m-%d')}\n"
889
- info_text += f" - Max Wind: {max(storm.vmax):.1f} kt\n"
890
- info_text += f" - Min Pressure: {min(storm.mslp):.1f} hPa\n"
891
- info_text += f" - Category: {self.categorize_typhoon_by_standard(max(storm.vmax))[0]}\n\n"
892
-
893
- return fig, info_text
894
-
895
- def categorize_typhoon_by_standard(self, wind_speed, standard='atlantic'):
896
- """
897
- Categorize typhoon based on wind speed and chosen standard
898
- wind_speed is in knots
899
- """
900
- if standard == 'taiwan':
901
- # Convert knots to m/s for Taiwan standard
902
- wind_speed_ms = wind_speed * 0.514444
903
 
904
- if wind_speed_ms >= 51.0:
905
- return 'Strong Typhoon', 'rgb(255, 0, 0)'
906
- elif wind_speed_ms >= 33.7:
907
- return 'Medium Typhoon', 'rgb(255, 127, 0)'
908
- elif wind_speed_ms >= 17.2:
909
- return 'Mild Typhoon', 'rgb(255, 255, 0)'
910
- else:
911
- return 'Tropical Depression', 'rgb(173, 216, 230)'
912
- else:
913
- # Atlantic standard uses knots
914
- if wind_speed >= 137:
915
- return 'C5 Super Typhoon', 'rgb(255, 0, 0)'
916
- elif wind_speed >= 113:
917
- return 'C4 Very Strong Typhoon', 'rgb(255, 63, 0)'
918
- elif wind_speed >= 96:
919
- return 'C3 Strong Typhoon', 'rgb(255, 127, 0)'
920
- elif wind_speed >= 83:
921
- return 'C2 Typhoon', 'rgb(255, 191, 0)'
922
- elif wind_speed >= 64:
923
- return 'C1 Typhoon', 'rgb(255, 255, 0)'
924
- elif wind_speed >= 34:
925
- return 'Tropical Storm', 'rgb(0, 255, 255)'
926
- else:
927
- return 'Tropical Depression', 'rgb(173, 216, 230)'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
928
 
 
929
  def create_interface():
930
- analyzer = TyphoonAnalyzer()
931
-
932
- with gr.Blocks(title="Typhoon Analysis Dashboard", theme=gr.themes.Base()) as demo:
 
 
933
  gr.Markdown("# Typhoon Analysis Dashboard")
934
 
935
- with gr.Tabs():
936
- # Main Analysis Tab
937
- with gr.Tab("Main Analysis"):
938
- with gr.Row():
939
- with gr.Column():
940
- start_year = gr.Slider(1900, 2024, 2000, label="Start Year")
941
- start_month = gr.Slider(1, 12, 1, label="Start Month")
942
- with gr.Column():
943
- end_year = gr.Slider(1900, 2024, 2024, label="End Year")
944
- end_month = gr.Slider(1, 12, 12, label="End Month")
945
-
946
- enso_dropdown = gr.Dropdown(
947
- choices=["all", "el_nino", "la_nina", "neutral"],
948
- value="all",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
949
  label="ENSO Phase"
950
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
951
 
952
- analyze_btn = gr.Button("Analyze")
953
-
954
- tracks_plot = gr.Plot(label="Typhoon Tracks")
955
-
956
- with gr.Row():
957
- wind_plot = gr.Plot(label="Wind Speed Analysis")
958
- pressure_plot = gr.Plot(label="Pressure Analysis")
959
-
960
- stats_text = gr.Markdown()
961
-
962
- # Typhoon Animation Tab
963
- with gr.Tab("Typhoon Animation"):
964
- with gr.Row():
965
- animation_year = gr.Slider(
966
- minimum=1950,
967
- maximum=2024,
968
- value=2020,
969
- step=1,
970
- label="Select Year"
971
- )
972
-
973
- with gr.Row():
974
- animation_typhoon = gr.Dropdown(
975
- choices=[],
976
- label="Select Typhoon",
977
- interactive=True
978
- )
979
-
980
- standard_dropdown = gr.Dropdown(
981
- choices=[
982
- {"label": "Atlantic Standard", "value": "atlantic"},
983
- {"label": "Taiwan Standard", "value": "taiwan"}
984
- ],
985
- value="atlantic",
986
- label="Classification Standard"
987
- )
988
-
989
- animation_btn = gr.Button("Show Typhoon Path", variant="primary")
990
- animation_plot = gr.Plot(label="Typhoon Path Animation")
991
- animation_info = gr.Markdown()
992
-
993
- # Search Tab
994
- with gr.Tab("Typhoon Search"):
995
- with gr.Row():
996
- search_input = gr.Textbox(label="Search Typhoon Name")
997
- search_btn = gr.Button("Search Typhoons", variant="primary")
998
 
999
- search_results = gr.Plot(label="Search Results")
1000
- search_info = gr.Markdown()
1001
-
1002
- # Event handlers
1003
- def analyze_callback(start_y, start_m, end_y, end_m, enso):
1004
- results = analyzer.analyze_typhoon(start_y, start_m, end_y, end_m, enso)
1005
- return [
1006
- results['tracks'],
1007
- results['wind'],
1008
- results['pressure'],
1009
- results['stats']
1010
- ]
1011
-
1012
- def update_typhoon_choices(year):
1013
- typhoons = analyzer.get_typhoons_for_year(year)
1014
- return gr.update(choices=typhoons, value=None)
1015
-
1016
- # Connect events for main analysis
1017
- analyze_btn.click(
1018
- analyze_callback,
1019
- inputs=[start_year, start_month, end_year, end_month, enso_dropdown],
1020
- outputs=[tracks_plot, wind_plot, pressure_plot, stats_text]
1021
- )
1022
-
1023
- # Connect events for Animation tab
1024
- animation_year.change(
1025
- update_typhoon_choices,
1026
- inputs=[animation_year],
1027
- outputs=[animation_typhoon]
1028
- )
1029
-
1030
- animation_btn.click(
1031
- analyzer.create_typhoon_animation,
1032
- inputs=[animation_year, animation_typhoon, standard_dropdown],
1033
- outputs=[animation_plot, animation_info]
1034
- )
1035
-
1036
- # Connect events for Search tab
1037
- search_btn.click(
1038
- analyzer.search_typhoons,
1039
- inputs=[search_input],
1040
- outputs=[search_results, search_info]
1041
- )
1042
-
1043
  return demo
1044
 
 
1045
  if __name__ == "__main__":
 
 
 
 
 
 
 
 
1046
  demo = create_interface()
1047
- demo.launch(
1048
- server_name="0.0.0.0",
1049
- server_port=7860,
1050
- share=True
1051
- )
 
1
  import gradio as gr
2
  import plotly.graph_objects as go
3
  import plotly.express as px
 
4
  import pickle
5
  import tropycal.tracks as tracks
6
  import pandas as pd
 
9
  import functools
10
  import hashlib
11
  import os
12
+ import argparse
13
  from datetime import datetime, timedelta
14
+ from datetime import date, datetime
15
  from scipy import stats
16
  from scipy.optimize import minimize, curve_fit
17
  from sklearn.linear_model import LinearRegression
18
  from sklearn.cluster import KMeans
19
  from scipy.interpolate import interp1d
20
  from fractions import Fraction
21
+ from concurrent.futures import ThreadPoolExecutor
22
+ from sklearn.metrics import mean_squared_error
23
  import statsmodels.api as sm
24
+ import schedule
25
  import time
26
  import threading
27
  import requests
 
31
  from collections import defaultdict
32
  import shutil
33
  import filecmp
 
 
34
 
35
+ # Add command-line argument parsing
36
+ parser = argparse.ArgumentParser(description='Typhoon Analysis Dashboard')
37
+ parser.add_argument('--data_path', type=str, default=os.getcwd(), help='Path to the data directory')
38
+ args = parser.parse_args()
39
+
40
+ # Use the command-line argument for data path
41
+ DATA_PATH = args.data_path
42
+
43
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
44
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
45
+ LOCAL_iBtrace_PATH = os.path.join(DATA_PATH, 'ibtracs.WP.list.v04r01.csv')
46
+ iBtrace_uri = 'https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/csv/ibtracs.WP.list.v04r01.csv'
47
+
48
  CACHE_FILE = 'ibtracs_cache.pkl'
49
  CACHE_EXPIRY_DAYS = 1
50
+ last_oni_update = None
51
 
52
+
53
+ def should_update_oni():
54
+ today = datetime.now()
55
+ # Beginning of the month: 1st day
56
+ if today.day == 1:
57
+ return True
58
+ # Middle of the month: 15th day
59
+ if today.day == 15:
60
+ return True
61
+ # End of the month: last day
62
+ if today.day == (today.replace(day=1, month=today.month%12+1) - timedelta(days=1)).day:
63
+ return True
64
+ return False
65
+
66
+ color_map = {
67
+ 'C5 Super Typhoon': 'rgb(255, 0, 0)', # Red
68
+ 'C4 Very Strong Typhoon': 'rgb(255, 63, 0)', # Red-Orange
69
+ 'C3 Strong Typhoon': 'rgb(255, 127, 0)', # Orange
70
+ 'C2 Typhoon': 'rgb(255, 191, 0)', # Orange-Yellow
71
+ 'C1 Typhoon': 'rgb(255, 255, 0)', # Yellow
72
+ 'Tropical Storm': 'rgb(0, 255, 255)', # Cyan
73
+ 'Tropical Depression': 'rgb(173, 216, 230)' # Light Blue
74
  }
75
 
76
+ def convert_typhoondata(input_file, output_file):
77
+ with open(input_file, 'r') as infile:
78
+ # Skip the title and the unit line.
79
+ next(infile)
80
+ next(infile)
81
 
82
+ reader = csv.reader(infile)
 
 
83
 
84
+ # Used for storing data for each SID
85
+ sid_data = defaultdict(list)
86
 
87
+ for row in reader:
88
+ if not row: # Skip the blank lines
89
+ continue
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
 
91
+ sid = row[0]
92
+ iso_time = row[6]
93
+ sid_data[sid].append((row, iso_time))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94
 
95
+ with open(output_file, 'w', newline='') as outfile:
96
+ fieldnames = ['SID', 'ISO_TIME', 'LAT', 'LON', 'SEASON', 'NAME', 'WMO_WIND', 'WMO_PRES', 'USA_WIND', 'USA_PRES', 'START_DATE', 'END_DATE']
97
+ writer = csv.DictWriter(outfile, fieldnames=fieldnames)
 
98
 
99
+ writer.writeheader()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
+ for sid, data in sid_data.items():
102
+ start_date = min(data, key=lambda x: x[1])[1]
103
+ end_date = max(data, key=lambda x: x[1])[1]
104
+
105
+ for row, iso_time in data:
106
+ writer.writerow({
107
+ 'SID': row[0],
108
+ 'ISO_TIME': iso_time,
109
+ 'LAT': row[8],
110
+ 'LON': row[9],
111
+ 'SEASON': row[1],
112
+ 'NAME': row[5],
113
+ 'WMO_WIND': row[10].strip() or ' ',
114
+ 'WMO_PRES': row[11].strip() or ' ',
115
+ 'USA_WIND': row[23].strip() or ' ',
116
+ 'USA_PRES': row[24].strip() or ' ',
117
+ 'START_DATE': start_date,
118
+ 'END_DATE': end_date
119
+ })
120
+
121
+
122
+ def download_oni_file(url, filename):
123
+ print(f"Downloading file from {url}...")
124
+ try:
125
+ response = requests.get(url)
126
+ response.raise_for_status() # Raises an exception for non-200 status codes
127
+ with open(filename, 'wb') as f:
128
+ f.write(response.content)
129
+ print(f"File successfully downloaded and saved as {filename}")
130
+ return True
131
+ except requests.RequestException as e:
132
+ print(f"Download failed. Error: {e}")
133
+ return False
134
+
135
+
136
+ def convert_oni_ascii_to_csv(input_file, output_file):
137
+ data = defaultdict(lambda: [''] * 12)
138
+ season_to_month = {
139
+ 'DJF': 12, 'JFM': 1, 'FMA': 2, 'MAM': 3, 'AMJ': 4, 'MJJ': 5,
140
+ 'JJA': 6, 'JAS': 7, 'ASO': 8, 'SON': 9, 'OND': 10, 'NDJ': 11
141
+ }
142
+
143
+ print(f"Attempting to read file: {input_file}")
144
+ try:
145
  with open(input_file, 'r') as f:
146
+ lines = f.readlines()
147
+ print(f"Successfully read {len(lines)} lines")
148
+
149
+ if len(lines) <= 1:
150
+ print("Error: File is empty or contains only header")
151
+ return
152
+
153
+ for line in lines[1:]: # Skip header
154
  parts = line.split()
155
  if len(parts) >= 4:
156
+ season, year = parts[0], parts[1]
157
+ anom = parts[-1]
158
+
159
  if season in season_to_month:
160
  month = season_to_month[season]
161
+
162
  if season == 'DJF':
163
  year = str(int(year) - 1)
164
+
165
  data[year][month-1] = anom
166
+ else:
167
+ print(f"Warning: Unknown season: {season}")
168
+ else:
169
+ print(f"Warning: Skipping invalid line: {line.strip()}")
170
+
171
+ print(f"Processed data for {len(data)} years")
172
+ except Exception as e:
173
+ print(f"Error reading file: {e}")
174
+ return
175
 
176
+ print(f"Attempting to write file: {output_file}")
177
+ try:
178
  with open(output_file, 'w', newline='') as f:
179
  writer = csv.writer(f)
180
+ writer.writerow(['Year', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
181
+
182
  for year in sorted(data.keys()):
183
+ row = [year] + data[year]
184
+ writer.writerow(row)
185
+
186
+ print(f"Successfully wrote {len(data)} rows of data")
187
+ except Exception as e:
188
+ print(f"Error writing file: {e}")
189
+ return
190
 
191
+ print(f"Conversion complete. Data saved to {output_file}")
 
 
 
 
 
 
192
 
193
+ def update_oni_data():
194
+ global last_oni_update
195
+ current_date = date.today()
196
+
197
+ # Check if already updated today
198
+ if last_oni_update == current_date:
199
+ print("ONI data already checked today. Skipping update.")
200
+ return
201
+
202
+ url = "https://www.cpc.ncep.noaa.gov/data/indices/oni.ascii.txt"
203
+ temp_file = os.path.join(DATA_PATH, "temp_oni.ascii.txt")
204
+ input_file = os.path.join(DATA_PATH, "oni.ascii.txt")
205
+ output_file = ONI_DATA_PATH
206
+
207
+ if download_oni_file(url, temp_file):
208
+ if not os.path.exists(input_file) or not filecmp.cmp(temp_file, input_file, shallow=False):
209
+ # File doesn't exist or has been updated
210
+ os.replace(temp_file, input_file)
211
+ print("New ONI data detected. Converting to CSV.")
212
+ convert_oni_ascii_to_csv(input_file, output_file)
213
+ print("ONI data updated successfully.")
214
  else:
215
+ print("ONI data is up to date. No conversion needed.")
216
+ os.remove(temp_file) # Remove temporary file
217
+
218
+ last_oni_update = current_date
219
+ else:
220
+ print("Failed to download ONI data.")
221
+ if os.path.exists(temp_file):
222
+ os.remove(temp_file) # Ensure cleanup of temporary file
223
+
224
+ def load_ibtracs_data():
225
+ if os.path.exists(CACHE_FILE):
226
+ cache_time = datetime.fromtimestamp(os.path.getmtime(CACHE_FILE))
227
+ if datetime.now() - cache_time < timedelta(days=CACHE_EXPIRY_DAYS):
228
+ print("Loading data from cache...")
229
+ with open(CACHE_FILE, 'rb') as f:
230
+ return pickle.load(f)
231
+
232
+ if os.path.exists(LOCAL_iBtrace_PATH):
233
+ print("Using local IBTrACS file...")
234
+ ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
235
+ else:
236
+ print("Local IBTrACS file not found. Fetching data from remote server...")
237
+ try:
238
  response = requests.get(iBtrace_uri)
239
  response.raise_for_status()
240
+
241
+ with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv') as temp_file:
242
+ temp_file.write(response.text)
243
+ temp_file_path = temp_file.name
244
+
245
+ # Save the downloaded data as the local file
246
+ shutil.move(temp_file_path, LOCAL_iBtrace_PATH)
247
+ print(f"Downloaded data saved to {LOCAL_iBtrace_PATH}")
248
+
249
+ ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
250
+ except requests.RequestException as e:
251
+ print(f"Error downloading data: {e}")
252
+ print("No local file available and download failed. Unable to load IBTrACS data.")
253
+ return None
254
+
255
+ with open(CACHE_FILE, 'wb') as f:
256
+ pickle.dump(ibtracs, f)
257
+
258
+ return ibtracs
259
+
260
+ def update_ibtracs_data():
261
+ global ibtracs
262
+ print("Checking for IBTrACS data updates...")
263
 
264
+ try:
265
+ # Get the last-modified time of the remote file
266
+ response = requests.head(iBtrace_uri)
267
+ remote_last_modified = datetime.strptime(response.headers['Last-Modified'], '%a, %d %b %Y %H:%M:%S GMT')
268
 
269
+ # Get the last-modified time of the local file
270
+ if os.path.exists(LOCAL_iBtrace_PATH):
271
+ local_last_modified = datetime.fromtimestamp(os.path.getmtime(LOCAL_iBtrace_PATH))
272
+ else:
273
+ local_last_modified = datetime.min
274
+
275
+ # Compare the modification times
276
+ if remote_last_modified <= local_last_modified:
277
+ print("Local IBTrACS data is up to date. No update needed.")
278
+ if os.path.exists(CACHE_FILE):
279
+ # Update the cache file's timestamp to extend its validity
280
+ os.utime(CACHE_FILE, None)
281
+ print("Cache file timestamp updated.")
282
+ return
283
+
284
+ print("Remote data is newer. Updating IBTrACS data...")
 
 
 
 
 
 
 
 
 
 
 
 
285
 
286
+ # Download the new data
287
+ response = requests.get(iBtrace_uri)
288
+ response.raise_for_status()
289
+
290
+ with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv') as temp_file:
291
+ temp_file.write(response.text)
292
+ temp_file_path = temp_file.name
293
 
294
+ # Save the downloaded data as the local file
295
+ shutil.move(temp_file_path, LOCAL_iBtrace_PATH)
296
+ print(f"Downloaded data saved to {LOCAL_iBtrace_PATH}")
297
 
298
+ # Update the last modified time of the local file to match the remote file
299
+ os.utime(LOCAL_iBtrace_PATH, (remote_last_modified.timestamp(), remote_last_modified.timestamp()))
 
 
 
 
 
 
 
300
 
301
+ ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
 
 
 
 
 
 
 
302
 
303
+ with open(CACHE_FILE, 'wb') as f:
304
+ pickle.dump(ibtracs, f)
305
+ print("IBTrACS data updated and cache refreshed.")
306
+
307
+ except requests.RequestException as e:
308
+ print(f"Error checking or downloading data: {e}")
309
+ if os.path.exists(LOCAL_iBtrace_PATH):
310
+ print("Using existing local file.")
311
+ ibtracs = tracks.TrackDataset(basin='west_pacific', source='ibtracs', ibtracs_url=LOCAL_iBtrace_PATH)
312
+ if os.path.exists(CACHE_FILE):
313
+ # Update the cache file's timestamp even when using existing local file
314
+ os.utime(CACHE_FILE, None)
315
+ print("Cache file timestamp updated.")
316
+ else:
317
+ print("No local file available. Update failed.")
318
+
319
+ def run_schedule():
320
+ while True:
321
+ schedule.run_pending()
322
+ time.sleep(1)
323
+
324
+ def analyze_typhoon_generation(merged_data, start_date, end_date):
325
+ filtered_data = merged_data[
326
+ (merged_data['ISO_TIME'] >= start_date) &
327
+ (merged_data['ISO_TIME'] <= end_date)
328
+ ]
329
+
330
+ filtered_data['ENSO_Phase'] = filtered_data['ONI'].apply(classify_enso_phases)
331
+
332
+ typhoon_counts = filtered_data['ENSO_Phase'].value_counts().to_dict()
333
+
334
+ month_counts = filtered_data.groupby(['ENSO_Phase', filtered_data['ISO_TIME'].dt.month]).size().unstack(fill_value=0)
335
+ concentrated_months = month_counts.idxmax(axis=1).to_dict()
336
+
337
+ return typhoon_counts, concentrated_months
338
+
339
+ def cache_key_generator(*args, **kwargs):
340
+ key = hashlib.md5()
341
+ for arg in args:
342
+ key.update(str(arg).encode())
343
+ for k, v in sorted(kwargs.items()):
344
+ key.update(str(k).encode())
345
+ key.update(str(v).encode())
346
+ return key.hexdigest()
347
+
348
+ def categorize_typhoon(wind_speed):
349
+ wind_speed_kt = wind_speed / 2 # Convert kt to m/s
350
+
351
+ # Add category classification
352
+ if wind_speed_kt >= 137/2.35:
353
+ return 'C5 Super Typhoon'
354
+ elif wind_speed_kt >= 113/2.35:
355
+ return 'C4 Very Strong Typhoon'
356
+ elif wind_speed_kt >= 96/2.35:
357
+ return 'C3 Strong Typhoon'
358
+ elif wind_speed_kt >= 83/2.35:
359
+ return 'C2 Typhoon'
360
+ elif wind_speed_kt >= 64/2.35:
361
+ return 'C1 Typhoon'
362
+ elif wind_speed_kt >= 34/2.35:
363
+ return 'Tropical Storm'
364
+ else:
365
+ return 'Tropical Depression'
366
+
367
+ @functools.lru_cache(maxsize=None)
368
+ def process_oni_data_cached(oni_data_hash):
369
+ return process_oni_data(oni_data)
370
+
371
+ def process_oni_data(oni_data):
372
+ oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
373
+ oni_long['Month'] = oni_long['Month'].map({
374
+ 'Jan': '01', 'Feb': '02', 'Mar': '03', 'Apr': '04', 'May': '05', 'Jun': '06',
375
+ 'Jul': '07', 'Aug': '08', 'Sep': '09', 'Oct': '10', 'Nov': '11', 'Dec': '12'
376
+ })
377
+ oni_long['Date'] = pd.to_datetime(oni_long['Year'].astype(str) + '-' + oni_long['Month'] + '-01')
378
+ oni_long['ONI'] = pd.to_numeric(oni_long['ONI'], errors='coerce')
379
+ return oni_long
380
+
381
+ def process_oni_data_with_cache(oni_data):
382
+ oni_data_hash = cache_key_generator(oni_data.to_json())
383
+ return process_oni_data_cached(oni_data_hash)
384
+
385
+ @functools.lru_cache(maxsize=None)
386
+ def process_typhoon_data_cached(typhoon_data_hash):
387
+ return process_typhoon_data(typhoon_data)
388
+
389
+ def process_typhoon_data(typhoon_data):
390
+ typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
391
+ typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
392
+ typhoon_data['USA_PRES'] = pd.to_numeric(typhoon_data['USA_PRES'], errors='coerce')
393
+ typhoon_data['LON'] = pd.to_numeric(typhoon_data['LON'], errors='coerce')
394
+
395
+ typhoon_max = typhoon_data.groupby('SID').agg({
396
+ 'USA_WIND': 'max',
397
+ 'USA_PRES': 'min',
398
+ 'ISO_TIME': 'first',
399
+ 'SEASON': 'first',
400
+ 'NAME': 'first',
401
+ 'LAT': 'first',
402
+ 'LON': 'first'
403
+ }).reset_index()
404
+
405
+ typhoon_max['Month'] = typhoon_max['ISO_TIME'].dt.strftime('%m')
406
+ typhoon_max['Year'] = typhoon_max['ISO_TIME'].dt.year
407
+ typhoon_max['Category'] = typhoon_max['USA_WIND'].apply(categorize_typhoon)
408
+ return typhoon_max
409
+
410
+ def process_typhoon_data_with_cache(typhoon_data):
411
+ typhoon_data_hash = cache_key_generator(typhoon_data.to_json())
412
+ return process_typhoon_data_cached(typhoon_data_hash)
413
+
414
+ def merge_data(oni_long, typhoon_max):
415
+ return pd.merge(typhoon_max, oni_long, on=['Year', 'Month'])
416
+
417
+ def calculate_logistic_regression(merged_data):
418
+ data = merged_data.dropna(subset=['USA_WIND', 'ONI'])
419
+
420
+ # Create binary outcome for severe typhoons
421
+ data['severe_typhoon'] = (data['USA_WIND'] >= 51).astype(int)
422
+
423
+ # Create binary predictor for El Niño
424
+ data['el_nino'] = (data['ONI'] >= 0.5).astype(int)
425
+
426
+ X = data['el_nino']
427
+ X = sm.add_constant(X) # Add constant term
428
+ y = data['severe_typhoon']
429
+
430
+ model = sm.Logit(y, X).fit()
431
+
432
+ beta_1 = model.params['el_nino']
433
+ exp_beta_1 = np.exp(beta_1)
434
+ p_value = model.pvalues['el_nino']
435
+
436
+ return beta_1, exp_beta_1, p_value
437
+
438
+ @cachetools.cached(cache={})
439
+ def fetch_oni_data_from_csv(file_path):
440
+ df = pd.read_csv(file_path, sep=',', header=0, na_values='-99.90')
441
+ df.columns = ['Year', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
442
+ df = df.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
443
+ df['Date'] = pd.to_datetime(df['Year'].astype(str) + df['Month'], format='%Y%b')
444
+ df = df.set_index('Date')
445
+ return df
446
+
447
+ def classify_enso_phases(oni_value):
448
+ if isinstance(oni_value, pd.Series):
449
+ oni_value = oni_value.iloc[0]
450
+ if oni_value >= 0.5:
451
+ return 'El Nino'
452
+ elif oni_value <= -0.5:
453
+ return 'La Nina'
454
+ else:
455
+ return 'Neutral'
456
 
457
+ def load_data(oni_data_path, typhoon_data_path):
458
+ oni_data = pd.read_csv(oni_data_path)
459
+ typhoon_data = pd.read_csv(typhoon_data_path, low_memory=False)
460
+
461
+ typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
462
+
463
+ typhoon_data = typhoon_data.dropna(subset=['ISO_TIME'])
464
+
465
+ print(f"Typhoon data shape after cleaning: {typhoon_data.shape}")
466
+ print(f"Year range: {typhoon_data['ISO_TIME'].dt.year.min()} - {typhoon_data['ISO_TIME'].dt.year.max()}")
467
+
468
+ return oni_data, typhoon_data
469
 
470
+ def preprocess_data(oni_data, typhoon_data):
471
+ typhoon_data['USA_WIND'] = pd.to_numeric(typhoon_data['USA_WIND'], errors='coerce')
472
+ typhoon_data['WMO_PRES'] = pd.to_numeric(typhoon_data['WMO_PRES'], errors='coerce')
473
+ typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
474
+ typhoon_data['Year'] = typhoon_data['ISO_TIME'].dt.year
475
+ typhoon_data['Month'] = typhoon_data['ISO_TIME'].dt.month
476
+
477
+ monthly_max_wind_speed = typhoon_data.groupby(['Year', 'Month'])['USA_WIND'].max().reset_index()
478
+
479
+ oni_data_long = pd.melt(oni_data, id_vars=['Year'], var_name='Month', value_name='ONI')
480
+ oni_data_long['Month'] = oni_data_long['Month'].apply(lambda x: pd.to_datetime(x, format='%b').month)
481
+
482
+ merged_data = pd.merge(monthly_max_wind_speed, oni_data_long, on=['Year', 'Month'])
483
+
484
+ return merged_data
485
+
486
+ def calculate_max_wind_min_pressure(typhoon_data):
487
+ max_wind_speed = typhoon_data['USA_WIND'].max()
488
+ min_pressure = typhoon_data['WMO_PRES'].min()
489
+ return max_wind_speed, min_pressure
490
+
491
+ @functools.lru_cache(maxsize=None)
492
+ def get_storm_data(storm_id):
493
+ return ibtracs.get_storm(storm_id)
494
+
495
+ def filter_west_pacific_coordinates(lons, lats):
496
+ mask = (100 <= lons) & (lons <= 180) & (0 <= lats) & (lats <= 40)
497
+ return lons[mask], lats[mask]
498
+
499
+ def polynomial_exp(x, a, b, c, d):
500
+ return a * x**2 + b * x + c + d * np.exp(x)
501
+
502
+ def exponential(x, a, b, c):
503
+ return a * np.exp(b * x) + c
504
+
505
+ def generate_cluster_equations(cluster_center):
506
+ X = cluster_center[:, 0] # Longitudes
507
+ y = cluster_center[:, 1] # Latitudes
508
+
509
+ x_min = X.min()
510
+ x_max = X.max()
511
+
512
+ equations = []
513
+
514
+ # Fourier Series (up to 4th order)
515
+ def fourier_series(x, a0, a1, b1, a2, b2, a3, b3, a4, b4):
516
+ return (a0 + a1*np.cos(x) + b1*np.sin(x) +
517
+ a2*np.cos(2*x) + b2*np.sin(2*x) +
518
+ a3*np.cos(3*x) + b3*np.sin(3*x) +
519
+ a4*np.cos(4*x) + b4*np.sin(4*x))
520
+
521
+ # Normalize X to the range [0, 2π]
522
+ X_normalized = 2 * np.pi * (X - x_min) / (x_max - x_min)
523
+
524
+ params, _ = curve_fit(fourier_series, X_normalized, y)
525
+ a0, a1, b1, a2, b2, a3, b3, a4, b4 = params
526
+
527
+ # Create the equation string
528
+ fourier_eq = (f"y = {a0:.4f} + {a1:.4f}*cos(x) + {b1:.4f}*sin(x) + "
529
+ f"{a2:.4f}*cos(2x) + {b2:.4f}*sin(2x) + "
530
+ f"{a3:.4f}*cos(3x) + {b3:.4f}*sin(3x) + "
531
+ f"{a4:.4f}*cos(4x) + {b4:.4f}*sin(4x)")
532
+
533
+ equations.append(("Fourier Series", fourier_eq))
534
+ equations.append(("X Range", f"x goes from 0 to {2*np.pi:.4f}"))
535
+ equations.append(("Longitude Range", f"Longitude goes from {x_min:.4f}°E to {x_max:.4f}°E"))
536
+
537
+ return equations, (x_min, x_max)
538
+
539
+
540
+
541
+
542
+ # Classification standards
543
+ atlantic_standard = {
544
+ 'C5 Super Typhoon': {'wind_speed': 137, 'color': 'rgb(255, 0, 0)'},
545
+ 'C4 Very Strong Typhoon': {'wind_speed': 113, 'color': 'rgb(255, 63, 0)'},
546
+ 'C3 Strong Typhoon': {'wind_speed': 96, 'color': 'rgb(255, 127, 0)'},
547
+ 'C2 Typhoon': {'wind_speed': 83, 'color': 'rgb(255, 191, 0)'},
548
+ 'C1 Typhoon': {'wind_speed': 64, 'color': 'rgb(255, 255, 0)'},
549
+ 'Tropical Storm': {'wind_speed': 34, 'color': 'rgb(0, 255, 255)'},
550
+ 'Tropical Depression': {'wind_speed': 0, 'color': 'rgb(173, 216, 230)'}
551
+ }
552
+
553
+ taiwan_standard = {
554
+ 'Strong Typhoon': {'wind_speed': 51.0, 'color': 'rgb(255, 0, 0)'}, # >= 51.0 m/s
555
+ 'Medium Typhoon': {'wind_speed': 33.7, 'color': 'rgb(255, 127, 0)'}, # 33.7-50.9 m/s
556
+ 'Mild Typhoon': {'wind_speed': 17.2, 'color': 'rgb(255, 255, 0)'}, # 17.2-33.6 m/s
557
+ 'Tropical Depression': {'wind_speed': 0, 'color': 'rgb(173, 216, 230)'} # < 17.2 m/s
558
+ }
559
+
560
+ def categorize_typhoon_by_standard(wind_speed, standard='atlantic'):
561
+ """
562
+ Categorize typhoon based on wind speed and chosen standard
563
+ wind_speed is in knots
564
+ """
565
+ if standard == 'taiwan':
566
+ # Convert knots to m/s for Taiwan standard
567
+ wind_speed_ms = wind_speed * 0.514444
568
+
569
+ if wind_speed_ms >= 51.0:
570
+ return 'Strong Typhoon', taiwan_standard['Strong Typhoon']['color']
571
+ elif wind_speed_ms >= 33.7:
572
+ return 'Medium Typhoon', taiwan_standard['Medium Typhoon']['color']
573
+ elif wind_speed_ms >= 17.2:
574
+ return 'Mild Typhoon', taiwan_standard['Mild Typhoon']['color']
575
+ else:
576
+ return 'Tropical Depression', taiwan_standard['Tropical Depression']['color']
577
+ else:
578
+ # Atlantic standard uses knots
579
  if wind_speed >= 137:
580
+ return 'C5 Super Typhoon', atlantic_standard['C5 Super Typhoon']['color']
581
  elif wind_speed >= 113:
582
+ return 'C4 Very Strong Typhoon', atlantic_standard['C4 Very Strong Typhoon']['color']
583
  elif wind_speed >= 96:
584
+ return 'C3 Strong Typhoon', atlantic_standard['C3 Strong Typhoon']['color']
585
  elif wind_speed >= 83:
586
+ return 'C2 Typhoon', atlantic_standard['C2 Typhoon']['color']
587
  elif wind_speed >= 64:
588
+ return 'C1 Typhoon', atlantic_standard['C1 Typhoon']['color']
589
  elif wind_speed >= 34:
590
+ return 'Tropical Storm', atlantic_standard['Tropical Storm']['color']
591
  else:
592
+ return 'Tropical Depression', atlantic_standard['Tropical Depression']['color']
593
 
594
+ # Initialize data at startup
595
+ def initialize_data():
596
+ global oni_df, ibtracs, oni_data, typhoon_data, oni_long, typhoon_max, merged_data, data, max_wind_speed, min_pressure
597
+
598
+ print(f"Using data path: {DATA_PATH}")
599
+ # Update ONI data before starting the application
600
+ update_oni_data()
601
+ oni_df = fetch_oni_data_from_csv(ONI_DATA_PATH)
602
+ ibtracs = load_ibtracs_data()
603
+
604
+ if os.path.exists(LOCAL_iBtrace_PATH):
605
+ convert_typhoondata(LOCAL_iBtrace_PATH, TYPHOON_DATA_PATH)
606
+
607
+ oni_data, typhoon_data = load_data(ONI_DATA_PATH, TYPHOON_DATA_PATH)
608
+ oni_long = process_oni_data(oni_data)
609
+ typhoon_max = process_typhoon_data(typhoon_data)
610
+ merged_data = merge_data(oni_long, typhoon_max)
611
+ data = preprocess_data(oni_data, typhoon_data)
612
+ max_wind_speed, min_pressure = calculate_max_wind_min_pressure(typhoon_data)
613
+
614
+ # Schedule data updates
615
+ schedule.every().day.at("01:00").do(update_ibtracs_data)
616
+ schedule.every().day.at("00:00").do(lambda: update_oni_data() if should_update_oni() else None)
617
+
618
+ # Run the scheduler in a separate thread
619
+ scheduler_thread = threading.Thread(target=run_schedule)
620
+ scheduler_thread.daemon = True
621
+ scheduler_thread.start()
622
+
623
+ return oni_df, ibtracs, typhoon_data
624
+
625
+ # Function to get available years from typhoon data
626
+ def get_available_years():
627
+ if typhoon_data is None:
628
+ return []
629
+ years = typhoon_data['ISO_TIME'].dt.year.unique()
630
+ years = years[~np.isnan(years)]
631
+ years = sorted(years)
632
+ return years
633
+
634
+ # Function to get available typhoons for a selected year
635
+ def get_typhoons_for_year(year):
636
+ if not year or ibtracs is None:
637
+ return []
638
+
639
+ try:
640
+ year = int(year)
641
+ season = ibtracs.get_season(year)
642
+ storm_summary = season.summary()
643
 
644
+ typhoon_options = []
645
+ for i in range(storm_summary['season_storms']):
646
+ storm_id = storm_summary['id'][i]
647
+ storm_name = storm_summary['name'][i]
648
+ typhoon_options.append((f"{storm_name} ({storm_id})", storm_id))
 
649
 
650
+ return typhoon_options
651
+ except Exception as e:
652
+ print(f"Error getting typhoons for year {year}: {e}")
653
+ return []
 
 
654
 
655
+ # Create animation for typhoon path
656
+ def create_typhoon_path_animation(year, typhoon_id, standard):
657
+ if not year or not typhoon_id:
658
+ return None
659
+
660
+ try:
661
+ storm = ibtracs.get_storm(typhoon_id)
662
  fig = go.Figure()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
663
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
664
  fig.add_trace(
665
+ go.Scattergeo(
666
+ lon=storm.lon,
667
+ lat=storm.lat,
668
  mode='lines',
669
+ line=dict(width=2, color='gray'),
670
+ name='Path',
671
+ showlegend=False,
672
  )
673
  )
 
 
674
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
675
  fig.add_trace(
676
+ go.Scattergeo(
677
+ lon=[storm.lon[0]],
678
+ lat=[storm.lat[0]],
679
+ mode='markers',
680
+ marker=dict(size=10, color='green', symbol='star'),
681
+ name='Starting Point',
682
+ text=storm.time[0].strftime('%Y-%m-%d %H:%M'),
683
+ hoverinfo='text+name',
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
684
  )
685
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
686
 
687
+ frames = []
688
+ for i in range(len(storm.time)):
689
+ category, color = categorize_typhoon_by_standard(storm.vmax[i], standard)
 
 
 
 
 
 
 
 
690
 
691
+ # Get additional data if available
692
+ r34_ne = storm.dict['USA_R34_NE'][i] if 'USA_R34_NE' in storm.dict else None
693
+ r34_se = storm.dict['USA_R34_SE'][i] if 'USA_R34_SE' in storm.dict else None
694
+ r34_sw = storm.dict['USA_R34_SW'][i] if 'USA_R34_SW' in storm.dict else None
695
+ r34_nw = storm.dict['USA_R34_NW'][i] if 'USA_R34_NW' in storm.dict else None
696
+ rmw = storm.dict['USA_RMW'][i] if 'USA_RMW' in storm.dict else None
697
+ eye_diameter = storm.dict['USA_EYE'][i] if 'USA_EYE' in storm.dict else None
698
+
699
+ radius_info = f"R34: NE={r34_ne}, SE={r34_se}, SW={r34_sw}, NW={r34_nw}<br>"
700
+ radius_info += f"RMW: {rmw}<br>"
701
+ radius_info += f"Eye Diameter: {eye_diameter}"
702
 
703
+ frame_data = [
704
+ go.Scattergeo(
705
+ lon=storm.lon[:i+1],
706
+ lat=storm.lat[:i+1],
707
+ mode='lines',
708
+ line=dict(width=2, color='blue'),
709
+ name='Path Traveled',
710
+ showlegend=False,
711
+ ),
712
+ go.Scattergeo(
713
+ lon=[storm.lon[i]],
714
+ lat=[storm.lat[i]],
715
+ mode='markers+text',
716
+ marker=dict(size=10, color=color, symbol='star'),
717
+ text=category,
718
+ textposition="top center",
719
+ textfont=dict(size=12, color=color),
720
+ name='Current Location',
721
+ hovertext=f"{storm.time[i].strftime('%Y-%m-%d %H:%M')}<br>"
722
+ f"Category: {category}<br>"
723
+ f"Wind Speed: {storm.vmax[i]:.1f} m/s<br>"
724
+ f"{radius_info}",
725
+ hoverinfo='text',
726
+ ),
727
+ ]
728
+ frames.append(go.Frame(data=frame_data, name=f"frame{i}"))
729
+
730
+ fig.frames = frames
731
 
 
732
  fig.update_layout(
733
+ title=f"{year} Year {storm.name} Typhoon Path",
734
+ showlegend=False,
735
  geo=dict(
736
  projection_type='natural earth',
737
  showland=True,
 
740
  coastlinecolor='rgb(100, 100, 100)',
741
  showocean=True,
742
  oceancolor='rgb(230, 250, 255)',
 
 
 
743
  ),
744
  updatemenus=[{
745
  "buttons": [
 
766
  "xanchor": "right",
767
  "y": 0,
768
  "yanchor": "top"
769
+ }],
770
+ sliders=[{
771
+ "active": 0,
772
+ "yanchor": "top",
773
+ "xanchor": "left",
774
+ "currentvalue": {
775
+ "font": {"size": 20},
776
+ "prefix": "Time: ",
777
+ "visible": True,
778
+ "xanchor": "right"
779
+ },
780
+ "transition": {"duration": 100, "easing": "cubic-in-out"},
781
+ "pad": {"b": 10, "t": 50},
782
+ "len": 0.9,
783
+ "x": 0.1,
784
+ "y": 0,
785
+ "steps": [
786
+ {
787
+ "args": [[f"frame{k}"],
788
+ {"frame": {"duration": 100, "redraw": True},
789
+ "mode": "immediate",
790
+ "transition": {"duration": 0}}
791
+ ],
792
+ "label": storm.time[k].strftime('%Y-%m-%d %H:%M'),
793
+ "method": "animate"
794
+ }
795
+ for k in range(len(storm.time))
796
+ ]
797
  }]
798
  )
799
 
800
+ return fig
801
+ except Exception as e:
802
+ print(f"Error creating typhoon path animation: {e}")
803
+ return None
804
+
805
+ # Function to analyze typhoon tracks
806
+ def analyze_typhoon_tracks(start_year, start_month, end_year, end_month, enso_selection, typhoon_search=""):
807
+ start_date = datetime(int(start_year), int(start_month), 1)
808
+ end_date = datetime(int(end_year), int(end_month), 28)
809
+
810
+ # Create typhoon tracks plot
811
+ fig_tracks = go.Figure()
812
+
813
+ # Map Gradio dropdown values to the values used in the original code
814
+ enso_map = {
815
+ "All Years": "all",
816
+ "El Niño Years": "el_nino",
817
+ "La Niña Years": "la_nina",
818
+ "Neutral Years": "neutral"
819
+ }
820
+ enso_value = enso_map[enso_selection]
821
+
822
+ try:
823
+ for year in range(int(start_year), int(end_year) + 1):
824
+ if year not in ibtracs.data.keys():
825
+ continue
826
 
827
+ season = ibtracs.get_season(year)
828
+ for storm_id in season.summary()['id']:
829
+ storm = get_storm_data(storm_id)
830
+ storm_dates = storm.time
 
 
 
 
 
831
 
832
+ if any(start_date <= date <= end_date for date in storm_dates):
833
+ storm_date_str = storm_dates[0].strftime('%Y-%b')
834
+ if storm_date_str in oni_df.index:
835
+ storm_oni = oni_df.loc[storm_date_str]['ONI']
836
+ if isinstance(storm_oni, pd.Series):
837
+ storm_oni = storm_oni.iloc[0]
838
+
839
+ phase = classify_enso_phases(storm_oni)
840
+
841
+ if (enso_value == 'all' or
842
+ (enso_value == 'el_nino' and phase == 'El Nino') or
843
+ (enso_value == 'la_nina' and phase == 'La Nina') or
844
+ (enso_value == 'neutral' and phase == 'Neutral')):
845
+
846
+ color = {'El Nino': 'red', 'La Nina': 'blue', 'Neutral': 'green'}[phase]
847
+
848
+ # Highlight searched typhoon
849
+ if typhoon_search and typhoon_search.lower() in storm.name.lower():
850
+ line_width = 5
851
+ line_color = 'yellow'
852
+ else:
853
+ line_width = 2
854
+ line_color = color
855
+
856
+ fig_tracks.add_trace(go.Scattergeo(
857
+ lon=storm.lon,
858
+ lat=storm.lat,
859
+ mode='lines',
860
+ name=storm.name,
861
+ text=f'{storm.name} ({year})',
862
+ hoverinfo='text',
863
+ line=dict(width=line_width, color=line_color)
864
+ ))
 
 
865
 
866
+ fig_tracks.update_layout(
867
+ title=f'Typhoon Tracks from {start_year}-{start_month} to {end_year}-{end_month}',
868
+ geo=dict(
869
+ projection_type='natural earth',
870
+ showland=True,
871
+ coastlinecolor='rgb(100, 100, 100)',
872
+ countrycolor='rgb(204, 204, 204)',
 
 
 
 
 
 
 
 
 
 
 
 
 
 
873
  )
874
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
875
 
876
+ # Calculate statistics for this period
877
+ filtered_data = merged_data[
878
+ (merged_data['Year'] >= int(start_year)) &
879
+ (merged_data['Year'] <= int(end_year)) &
880
+ (merged_data['Month'].astype(int) >= int(start_month)) &
881
+ (merged_data['Month'].astype(int) <= int(end_month))
882
+ ]
883
+
884
+ max_wind = filtered_data['USA_WIND'].max() if not filtered_data.empty else 0
885
+ min_press = filtered_data['USA_PRES'].min() if not filtered_data.empty else 0
 
 
 
 
 
 
 
 
 
886
 
887
+ stats_text = f"Maximum Wind Speed: {max_wind:.2f} knots\nMinimum Pressure: {min_press:.2f} hPa"
 
888
 
889
+ # Create wind scatter plot
890
+ wind_oni_scatter = px.scatter(filtered_data, x='ONI', y='USA_WIND', color='Category',
891
+ hover_data=['NAME', 'Year', 'Category'],
892
+ title='Wind Speed vs ONI',
893
+ labels={'ONI': 'ONI Value', 'USA_WIND': 'Maximum Wind Speed (knots)'},
894
+ color_discrete_map=color_map)
895
 
896
+ # Create pressure scatter plot
897
+ pressure_oni_scatter = px.scatter(filtered_data, x='ONI', y='USA_PRES', color='Category',
898
+ hover_data=['NAME', 'Year', 'Category'],
899
+ title='Pressure vs ONI',
900
+ labels={'ONI': 'ONI Value', 'USA_PRES': 'Minimum Pressure (hPa)'},
901
+ color_discrete_map=color_map)
902
+
903
+ return fig_tracks, wind_oni_scatter, pressure_oni_scatter, stats_text
904
+ except Exception as e:
905
+ error_fig = go.Figure()
906
+ error_fig.add_annotation(text=f"Error: {str(e)}", xref="paper", yref="paper", x=0.5, y=0.5, showarrow=False)
907
+ return error_fig, error_fig, error_fig, f"Error analyzing typhoon tracks: {str(e)}"
908
+
909
+ # Function to run cluster analysis
910
+ def run_cluster_analysis(start_year, start_month, end_year, end_month, n_clusters, enso_selection, analysis_type):
911
+ start_date = datetime(int(start_year), int(start_month), 1)
912
+ end_date = datetime(int(end_year), int(end_month), 28)
913
+
914
+ # Map Gradio dropdown values to the values used in the original code
915
+ enso_map = {
916
+ "All Years": "all",
917
+ "El Niño Years": "el_nino",
918
+ "La Niña Years": "la_nina",
919
+ "Neutral Years": "neutral"
920
+ }
921
+ enso_value = enso_map[enso_selection]
922
+
923
+ fig_routes = go.Figure()
924
+
925
+ try:
926
+ # Clustering analysis
927
+ west_pacific_storms = []
928
+ for year in range(int(start_year), int(end_year) + 1):
929
+ if year not in ibtracs.data.keys():
930
  continue
931
+
932
+ season = ibtracs.get_season(year)
933
+ for storm_id in season.summary()['id']:
934
+ storm = get_storm_data(storm_id)
935
+ storm_date = storm.time[0]
936
+
937
+ # Try to find the ONI value for this storm date
938
+ date_str = storm_date.strftime('%Y-%b')
939
+ if date_str in oni_df.index:
940
+ storm_oni = oni_df.loc[date_str]['ONI']
941
+ if isinstance(storm_oni, pd.Series):
942
+ storm_oni = storm_oni.iloc[0]
943
+ storm_phase = classify_enso_phases(storm_oni)
944
+
945
+ if enso_value == 'all' or \
946
+ (enso_value == 'el_nino' and storm_phase == 'El Nino') or \
947
+ (enso_value == 'la_nina' and storm_phase == 'La Nina') or \
948
+ (enso_value == 'neutral' and storm_phase == 'Neutral'):
949
+ lons, lats = filter_west_pacific_coordinates(np.array(storm.lon), np.array(storm.lat))
950
+ if len(lons) > 1: # Ensure the storm has a valid path in West Pacific
951
+ west_pacific_storms.append((lons, lats))
952
 
953
+ if not west_pacific_storms:
954
+ return None, "No storms found matching the criteria"
955
 
956
+ max_length = max(len(storm[0]) for storm in west_pacific_storms)
957
+ standardized_routes = []
958
 
959
+ for lons, lats in west_pacific_storms:
960
+ if len(lons) < 2: # Skip if not enough points
961
+ continue
962
+ t = np.linspace(0, 1, len(lons))
963
+ t_new = np.linspace(0, 1, max_length)
964
+ lon_interp = interp1d(t, lons, kind='linear')(t_new)
965
+ lat_interp = interp1d(t, lats, kind='linear')(t_new)
966
+ route_vector = np.column_stack((lon_interp, lat_interp)).flatten()
967
+ standardized_routes.append(route_vector)
968
+
969
+ if not standardized_routes:
970
+ return None, "Unable to create standardized routes"
971
+
972
+ kmeans = KMeans(n_clusters=int(n_clusters), random_state=42, n_init=10)
973
+ clusters = kmeans.fit_predict(standardized_routes)
974
+
975
+ # Count the number of typhoons in each cluster
976
+ cluster_counts = np.bincount(clusters)
977
+
978
+ # Draw all routes (with lighter color)
979
+ if analysis_type == "Show Routes":
980
+ for lons, lats in west_pacific_storms:
981
+ fig_routes.add_trace(go.Scattergeo(
982
+ lon=lons, lat=lats,
983
+ mode='lines',
984
+ line=dict(width=1, color='lightgray'),
985
+ showlegend=False,
986
+ hoverinfo='none'
987
+ ))
988
+
989
+ equations_output = ""
990
+ # Draw cluster centroids
991
+ if analysis_type == "Show Clusters" or analysis_type == "Fourier Series":
992
+ for i in range(int(n_clusters)):
993
+ cluster_center = kmeans.cluster_centers_[i].reshape(-1, 2)
994
+
995
+ fig_routes.add_trace(go.Scattergeo(
996
+ lon=cluster_center[:, 0],
997
+ lat=cluster_center[:, 1],
998
+ mode='lines',
999
+ name=f'Cluster {i+1} (n={cluster_counts[i]})',
1000
+ line=dict(width=3)
1001
+ ))
1002
+
1003
+ if analysis_type == "Fourier Series":
1004
+ cluster_equations, (lon_min, lon_max) = generate_cluster_equations(cluster_center)
1005
+
1006
+ equations_output += f"\n--- Cluster {i+1} (Typhoons: {cluster_counts[i]}) ---\n"
1007
+ for name, eq in cluster_equations:
1008
+ equations_output += f"{name}: {eq}\n"
1009
+
1010
+ equations_output += "\nTo use in GeoGebra:\n"
1011
+ equations_output += f"1. Set x-axis from 0 to {2*np.pi:.4f}\n"
1012
+ equations_output += "2. Use the equation as is\n"
1013
+ equations_output += f"3. To convert x back to longitude: lon = {lon_min:.4f} + x * {(lon_max - lon_min) / (2*np.pi):.4f}\n\n"
1014
+
1015
+ enso_phase_text = {
1016
+ 'all': 'All Years',
1017
+ 'el_nino': 'El Niño Years',
1018
+ 'la_nina': 'La Niña Years',
1019
+ 'neutral': 'Neutral Years'
1020
+ }
1021
+
1022
+ fig_routes.update_layout(
1023
+ title=f'Typhoon Routes Clustering in West Pacific ({start_year}-{end_year}) - {enso_phase_text[enso_value]}',
1024
  geo=dict(
1025
+ projection_type='mercator',
1026
  showland=True,
1027
  landcolor='rgb(243, 243, 243)',
1028
  countrycolor='rgb(204, 204, 204)',
1029
  coastlinecolor='rgb(100, 100, 100)',
1030
  showocean=True,
1031
  oceancolor='rgb(230, 250, 255)',
1032
+ lataxis={'range': [0, 40]},
1033
+ lonaxis={'range': [100, 180]},
1034
+ center={'lat': 20, 'lon': 140},
1035
+ ),
1036
+ legend_title='Clusters'
1037
  )
1038
 
1039
+ return fig_routes, equations_output
1040
+ except Exception as e:
1041
+ error_fig = go.Figure()
1042
+ error_fig.add_annotation(text=f"Error: {str(e)}", xref="paper", yref="paper", x=0.5, y=0.5, showarrow=False)
1043
+ return error_fig, f"Error in cluster analysis: {str(e)}"
1044
+
1045
+ # Function to perform logistic regression
1046
+ def perform_logistic_regression(start_year, start_month, end_year, end_month, regression_type):
1047
+ start_date = datetime(int(start_year), int(start_month), 1)
1048
+ end_date = datetime(int(end_year), int(end_month), 28)
1049
+
1050
+ try:
1051
+ filtered_data = merged_data[
1052
+ (merged_data['ISO_TIME'] >= start_date) &
1053
+ (merged_data['ISO_TIME'] <= end_date)
1054
+ ]
1055
 
1056
+ if regression_type == "Wind Speed":
1057
+ filtered_data['severe_typhoon'] = (filtered_data['USA_WIND'] >= 64).astype(int) # 64 knots threshold for severe typhoons
1058
+ X = sm.add_constant(filtered_data['ONI'])
1059
+ y = filtered_data['severe_typhoon']
1060
+ model = sm.Logit(y, X).fit()
1061
 
1062
+ beta_1 = model.params['ONI']
1063
+ exp_beta_1 = np.exp(beta_1)
1064
+ p_value = model.pvalues['ONI']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1065
 
1066
+ el_nino_data = filtered_data[filtered_data['ONI'] >= 0.5]
1067
+ la_nina_data = filtered_data[filtered_data['ONI'] <= -0.5]
1068
+ neutral_data = filtered_data[(filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5)]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1069
 
1070
+ el_nino_severe = el_nino_data['severe_typhoon'].mean() if not el_nino_data.empty else 0
1071
+ la_nina_severe = la_nina_data['severe_typhoon'].mean() if not la_nina_data.empty else 0
1072
+ neutral_severe = neutral_data['severe_typhoon'].mean() if not neutral_data.empty else 0
1073
+
1074
+ result = f"""
1075
+ # Wind Speed Logistic Regression Results
1076
+
1077
+ β1 (ONI coefficient): {beta_1:.4f}
1078
+ exp(β1) (Odds Ratio): {exp_beta_1:.4f}
1079
+ P-value: {p_value:.4f}
1080
+
1081
+ Interpretation:
1082
+ - For each unit increase in ONI, the odds of a severe typhoon are {"increased" if exp_beta_1 > 1 else "decreased"} by a factor of {exp_beta_1:.2f}.
1083
+ - This effect is {"statistically significant" if p_value < 0.05 else "not statistically significant"} at the 0.05 level.
1084
+
1085
+ Proportion of severe typhoons:
1086
+ - El Niño conditions: {el_nino_severe:.2%}
1087
+ - La Niña conditions: {la_nina_severe:.2%}
1088
+ - Neutral conditions: {neutral_severe:.2%}
1089
+ """
1090
+
1091
+ elif regression_type == "Pressure":
1092
+ filtered_data['intense_typhoon'] = (filtered_data['USA_PRES'] <= 950).astype(int) # 950 hPa threshold for intense typhoons
1093
+ X = sm.add_constant(filtered_data['ONI'])
1094
+ y = filtered_data['intense_typhoon']
1095
+ model = sm.Logit(y, X).fit()
1096
+
1097
+ beta_1 = model.params['ONI']
1098
+ exp_beta_1 = np.exp(beta_1)
1099
+ p_value = model.pvalues['ONI']
1100
+
1101
+ el_nino_data = filtered_data[filtered_data['ONI'] >= 0.5]
1102
+ la_nina_data = filtered_data[filtered_data['ONI'] <= -0.5]
1103
+ neutral_data = filtered_data[(filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5)]
1104
+
1105
+ el_nino_intense = el_nino_data['intense_typhoon'].mean() if not el_nino_data.empty else 0
1106
+ la_nina_intense = la_nina_data['intense_typhoon'].mean() if not la_nina_data.empty else 0
1107
+ neutral_intense = neutral_data['intense_typhoon'].mean() if not neutral_data.empty else 0
1108
+
1109
+ result = f"""
1110
+ # Pressure Logistic Regression Results
1111
+
1112
+ β1 (ONI coefficient): {beta_1:.4f}
1113
+ exp(β1) (Odds Ratio): {exp_beta_1:.4f}
1114
+ P-value: {p_value:.4f}
1115
+
1116
+ Interpretation:
1117
+ - For each unit increase in ONI, the odds of an intense typhoon (pressure <= 950 hPa) are {"increased" if exp_beta_1 > 1 else "decreased"} by a factor of {exp_beta_1:.2f}.
1118
+ - This effect is {"statistically significant" if p_value < 0.05 else "not statistically significant"} at the 0.05 level.
1119
+
1120
+ Proportion of intense typhoons:
1121
+ - El Niño conditions: {el_nino_intense:.2%}
1122
+ - La Niña conditions: {la_nina_intense:.2%}
1123
+ - Neutral conditions: {neutral_intense:.2%}
1124
+ """
1125
+
1126
+ elif regression_type == "Longitude":
1127
+ filtered_data = filtered_data.dropna(subset=['LON'])
1128
+
1129
+ if len(filtered_data) == 0:
1130
+ return "Insufficient data for longitude analysis"
1131
+
1132
+ filtered_data['western_typhoon'] = (filtered_data['LON'] <= 140).astype(int) # 140°E as threshold for western typhoons
1133
+ X = sm.add_constant(filtered_data['ONI'])
1134
+ y = filtered_data['western_typhoon']
1135
+ model = sm.Logit(y, X).fit()
1136
+
1137
+ beta_1 = model.params['ONI']
1138
+ exp_beta_1 = np.exp(beta_1)
1139
+ p_value = model.pvalues['ONI']
1140
+
1141
+ el_nino_data = filtered_data[filtered_data['ONI'] >= 0.5]
1142
+ la_nina_data = filtered_data[filtered_data['ONI'] <= -0.5]
1143
+ neutral_data = filtered_data[(filtered_data['ONI'] > -0.5) & (filtered_data['ONI'] < 0.5)]
1144
+
1145
+ el_nino_western = el_nino_data['western_typhoon'].mean() if not el_nino_data.empty else 0
1146
+ la_nina_western = la_nina_data['western_typhoon'].mean() if not la_nina_data.empty else 0
1147
+ neutral_western = neutral_data['western_typhoon'].mean() if not neutral_data.empty else 0
1148
+
1149
+ result = f"""
1150
+ # Longitude Logistic Regression Results
1151
+
1152
+ β1 (ONI coefficient): {beta_1:.4f}
1153
+ exp(β1) (Odds Ratio): {exp_beta_1:.4f}
1154
+ P-value: {p_value:.4f}
1155
+
1156
+ Interpretation:
1157
+ - For each unit increase in ONI, the odds of a typhoon forming west of 140°E are {"increased" if exp_beta_1 > 1 else "decreased"} by a factor of {exp_beta_1:.2f}.
1158
+ - This effect is {"statistically significant" if p_value < 0.05 else "not statistically significant"} at the 0.05 level.
1159
+
1160
+ Proportion of typhoons forming west of 140°E:
1161
+ - El Niño conditions: {el_nino_western:.2%}
1162
+ - La Niña conditions: {la_nina_western:.2%}
1163
+ - Neutral conditions: {neutral_western:.2%}
1164
+ """
1165
+
1166
+ return result
1167
+ except Exception as e:
1168
+ return f"Error performing logistic regression: {str(e)}"
1169
 
1170
+ # Define Gradio interface
1171
  def create_interface():
1172
+ # Initialize data first
1173
+ initialize_data()
1174
+
1175
+ # Define interface tabs
1176
+ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
1177
  gr.Markdown("# Typhoon Analysis Dashboard")
1178
 
1179
+ with gr.Tab("Typhoon Tracks Analysis"):
1180
+ with gr.Row():
1181
+ with gr.Column():
1182
+ start_year = gr.Number(value=2000, label="Start Year", minimum=1950, maximum=2024, step=1)
1183
+ start_month = gr.Number(value=1, label="Start Month", minimum=1, maximum=12, step=1)
1184
+ with gr.Column():
1185
+ end_year = gr.Number(value=2024, label="End Year", minimum=1950, maximum=2024, step=1)
1186
+ end_month = gr.Number(value=6, label="End Month", minimum=1, maximum=12, step=1)
1187
+
1188
+ enso_dropdown = gr.Dropdown(
1189
+ choices=["All Years", "El Niño Years", "La Niña Years", "Neutral Years"],
1190
+ value="All Years",
1191
+ label="ENSO Phase"
1192
+ )
1193
+
1194
+ typhoon_search = gr.Textbox(label="Search Typhoon Name")
1195
+
1196
+ analyze_button = gr.Button("Analyze Tracks")
1197
+
1198
+ with gr.Row():
1199
+ tracks_plot = gr.Plot(label="Typhoon Tracks")
1200
+ stats_text = gr.Textbox(label="Statistics", lines=4)
1201
+
1202
+ with gr.Row():
1203
+ wind_plot = gr.Plot(label="Wind Speed vs ONI")
1204
+ pressure_plot = gr.Plot(label="Pressure vs ONI")
1205
+
1206
+ analyze_button.click(
1207
+ analyze_typhoon_tracks,
1208
+ inputs=[start_year, start_month, end_year, end_month, enso_dropdown, typhoon_search],
1209
+ outputs=[tracks_plot, wind_plot, pressure_plot, stats_text]
1210
+ )
1211
+
1212
+ with gr.Tab("Clustering Analysis"):
1213
+ with gr.Row():
1214
+ with gr.Column():
1215
+ cluster_start_year = gr.Number(value=2000, label="Start Year", minimum=1950, maximum=2024, step=1)
1216
+ cluster_start_month = gr.Number(value=1, label="Start Month", minimum=1, maximum=12, step=1)
1217
+ with gr.Column():
1218
+ cluster_end_year = gr.Number(value=2024, label="End Year", minimum=1950, maximum=2024, step=1)
1219
+ cluster_end_month = gr.Number(value=6, label="End Month", minimum=1, maximum=12, step=1)
1220
+
1221
+ with gr.Row():
1222
+ n_clusters = gr.Number(value=5, label="Number of Clusters", minimum=1, maximum=20, step=1)
1223
+ cluster_enso_dropdown = gr.Dropdown(
1224
+ choices=["All Years", "El Niño Years", "La Niña Years", "Neutral Years"],
1225
+ value="All Years",
1226
  label="ENSO Phase"
1227
  )
1228
+
1229
+ analysis_type = gr.Radio(
1230
+ choices=["Show Routes", "Show Clusters", "Fourier Series"],
1231
+ value="Show Clusters",
1232
+ label="Analysis Type"
1233
+ )
1234
+
1235
+ cluster_button = gr.Button("Run Cluster Analysis")
1236
+
1237
+ cluster_plot = gr.Plot(label="Typhoon Routes Clustering")
1238
+ equation_text = gr.Textbox(label="Cluster Equations", lines=15)
1239
+
1240
+ cluster_button.click(
1241
+ run_cluster_analysis,
1242
+ inputs=[
1243
+ cluster_start_year, cluster_start_month, cluster_end_year,
1244
+ cluster_end_month, n_clusters, cluster_enso_dropdown, analysis_type
1245
+ ],
1246
+ outputs=[cluster_plot, equation_text]
1247
+ )
1248
+
1249
+ with gr.Tab("Regression Analysis"):
1250
+ with gr.Row():
1251
+ with gr.Column():
1252
+ reg_start_year = gr.Number(value=2000, label="Start Year", minimum=1950, maximum=2024, step=1)
1253
+ reg_start_month = gr.Number(value=1, label="Start Month", minimum=1, maximum=12, step=1)
1254
+ with gr.Column():
1255
+ reg_end_year = gr.Number(value=2024, label="End Year", minimum=1950, maximum=2024, step=1)
1256
+ reg_end_month = gr.Number(value=6, label="End Month", minimum=1, maximum=12, step=1)
1257
+
1258
+ regression_type = gr.Radio(
1259
+ choices=["Wind Speed", "Pressure", "Longitude"],
1260
+ value="Wind Speed",
1261
+ label="Regression Type"
1262
+ )
1263
+
1264
+ regression_button = gr.Button("Perform Logistic Regression")
1265
+
1266
+ regression_results = gr.Textbox(label="Regression Results", lines=15)
1267
+
1268
+ regression_button.click(
1269
+ perform_logistic_regression,
1270
+ inputs=[reg_start_year, reg_start_month, reg_end_year, reg_end_month, regression_type],
1271
+ outputs=regression_results
1272
+ )
1273
+
1274
+ with gr.Tab("Typhoon Path Animation"):
1275
+ with gr.Row():
1276
+ year_dropdown = gr.Dropdown(
1277
+ choices=[str(year) for year in range(1950, 2025)],
1278
+ value="2024",
1279
+ label="Year"
1280
+ )
1281
 
1282
+ typhoon_dropdown = gr.Dropdown(
1283
+ label="Typhoon",
1284
+ interactive=True
1285
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1286
 
1287
+ standard_dropdown = gr.Dropdown(
1288
+ choices=["atlantic", "taiwan"],
1289
+ value="atlantic",
1290
+ label="Classification Standard"
1291
+ )
1292
+
1293
+ # Update typhoon dropdown when year changes
1294
+ year_dropdown.change(
1295
+ lambda year: (
1296
+ [{"label": name, "value": id} for name, id in get_typhoons_for_year(year)],
1297
+ get_typhoons_for_year(year)[0][1] if get_typhoons_for_year(year) else None
1298
+ ),
1299
+ inputs=year_dropdown,
1300
+ outputs=[typhoon_dropdown, typhoon_dropdown]
1301
+ )
1302
+
1303
+ animation_button = gr.Button("Generate Animation")
1304
+
1305
+ typhoon_animation = gr.Plot(label="Typhoon Path Animation")
1306
+
1307
+ animation_button.click(
1308
+ create_typhoon_path_animation,
1309
+ inputs=[year_dropdown, typhoon_dropdown, standard_dropdown],
1310
+ outputs=typhoon_animation
1311
+ )
1312
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1313
  return demo
1314
 
1315
+ # Run the app
1316
  if __name__ == "__main__":
1317
+ # Schedule background tasks
1318
+ schedule.every().day.at("01:00").do(update_ibtracs_data)
1319
+ schedule.every().day.at("00:00").do(lambda: update_oni_data() if should_update_oni() else None)
1320
+ scheduler_thread = threading.Thread(target=run_schedule)
1321
+ scheduler_thread.daemon = True
1322
+ scheduler_thread.start()
1323
+
1324
+ # Create and launch the Gradio interface
1325
  demo = create_interface()
1326
+ demo.launch(server_name="127.0.0.1", server_port=7860)