euler314 commited on
Commit
27f90be
·
verified ·
1 Parent(s): 8e5f90a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -100
app.py CHANGED
@@ -18,6 +18,7 @@ import cartopy.feature as cfeature
18
  import plotly.graph_objects as go
19
  import plotly.express as px
20
  from plotly.subplots import make_subplots
 
21
  from sklearn.manifold import TSNE
22
  from sklearn.cluster import DBSCAN
23
  from sklearn.preprocessing import StandardScaler
@@ -47,6 +48,7 @@ logging.basicConfig(
47
  parser = argparse.ArgumentParser(description='Typhoon Analysis Dashboard')
48
  parser.add_argument('--data_path', type=str, default=os.getcwd(), help='Path to the data directory')
49
  args = parser.parse_args()
 
50
 
51
  # Update data paths for Huggingface Spaces
52
  TEMP_DIR = tempfile.gettempdir()
@@ -55,7 +57,7 @@ DATA_PATH = os.environ.get('DATA_PATH', TEMP_DIR)
55
  # Ensure directory exists
56
  os.makedirs(DATA_PATH, exist_ok=True)
57
 
58
- # Data paths
59
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
60
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
61
  MERGED_DATA_CSV = os.path.join(DATA_PATH, 'merged_typhoon_era5_data.csv')
@@ -115,95 +117,7 @@ regions = {
115
  "Hong Kong": {"lat_min": 21.5, "lat_max": 23, "lon_min": 113, "lon_max": 115},
116
  "Philippines": {"lat_min": 5, "lat_max": 21, "lon_min": 115, "lon_max": 130}
117
  }
118
- # Add these functions near the top of the file after imports
119
- # After your imports section but before any other code, add:
120
 
121
- # -----------------------------
122
- # Sample Data Generation
123
- # -----------------------------
124
- def generate_sample_oni_data():
125
- """Generate sample ONI data"""
126
- years = range(1950, 2024)
127
- months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
128
- data = {'Year': list(years)}
129
-
130
- # Generate random ONI values between -2.5 and 2.5
131
- np.random.seed(42) # For reproducibility
132
- for month in months:
133
- data[month] = np.round(np.random.uniform(-2.5, 2.5, len(years)), 1)
134
-
135
- df = pd.DataFrame(data)
136
- df.to_csv(ONI_DATA_PATH, index=False)
137
- logging.info(f"Generated sample ONI data and saved to {ONI_DATA_PATH}")
138
- return df
139
-
140
- def generate_sample_typhoon_data():
141
- """Generate sample typhoon data"""
142
- # Create sample data with realistic values
143
- np.random.seed(42)
144
-
145
- # Generate 100 sample typhoons
146
- n_typhoons = 100
147
- n_points_per_typhoon = 20
148
-
149
- data = {
150
- 'SID': [],
151
- 'ISO_TIME': [],
152
- 'NAME': [],
153
- 'SEASON': [],
154
- 'LAT': [],
155
- 'LON': [],
156
- 'USA_WIND': [],
157
- 'USA_PRES': []
158
- }
159
-
160
- # Basin prefixes and sample names
161
- basin_prefixes = ['WP', 'EP', 'NA']
162
- typhoon_names = ['HAIYAN', 'YOLANDA', 'MANGKHUT', 'YUTU', 'HAGIBIS', 'MERANTI',
163
- 'MEGI', 'HAGUPIT', 'MAYSAK', 'HATO', 'NEPARTAK', 'SOUDELOR']
164
-
165
- # Generate data
166
- for i in range(n_typhoons):
167
- basin = np.random.choice(basin_prefixes)
168
- year = np.random.randint(1980, 2024)
169
- name = np.random.choice(typhoon_names)
170
- sid = f"{basin}{year}{i:02d}"
171
-
172
- # Starting position
173
- start_lon = np.random.uniform(120, 170)
174
- start_lat = np.random.uniform(5, 30)
175
-
176
- # Generate track points
177
- for j in range(n_points_per_typhoon):
178
- # Time progression
179
- date = datetime(year, np.random.randint(6, 11), np.random.randint(1, 28),
180
- np.random.randint(0, 24))
181
- date += timedelta(hours=j*6) # 6-hour intervals
182
-
183
- # Position progression (typically moves northwest in WP)
184
- lon = start_lon - j * np.random.uniform(0.3, 0.8)
185
- lat = start_lat + j * np.random.uniform(0.2, 0.5)
186
-
187
- # Intensity progression (typically intensifies then weakens)
188
- intensity_factor = min(1.0, j/(n_points_per_typhoon/2)) if j < n_points_per_typhoon/2 else \
189
- 1.0 - min(1.0, (j-n_points_per_typhoon/2)/(n_points_per_typhoon/2))
190
- wind = np.random.randint(30, 150) * intensity_factor
191
- pressure = 1010 - (wind * 0.75) # Approximate relationship
192
-
193
- # Add to data dict
194
- data['SID'].append(sid)
195
- data['ISO_TIME'].append(date)
196
- data['NAME'].append(name)
197
- data['SEASON'].append(year)
198
- data['LAT'].append(lat)
199
- data['LON'].append(lon)
200
- data['USA_WIND'].append(wind)
201
- data['USA_PRES'].append(pressure)
202
-
203
- df = pd.DataFrame(data)
204
- df.to_csv(TYPHOON_DATA_PATH, index=False)
205
- logging.info(f"Generated sample typhoon data and saved to {TYPHOON_DATA_PATH}")
206
- return df
207
  # -----------------------------
208
  # ONI and Typhoon Data Functions
209
  # -----------------------------
@@ -248,16 +162,18 @@ def update_oni_data():
248
  os.remove(temp_file)
249
 
250
  def load_data(oni_path, typhoon_path):
251
- # Always generate sample data for Huggingface Spaces
252
- logging.info("Generating sample data for Huggingface Spaces")
253
- oni_data = generate_sample_oni_data()
254
- typhoon_data = generate_sample_typhoon_data()
255
-
256
- # Convert ISO_TIME to datetime
257
- typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
258
- typhoon_data = typhoon_data.dropna(subset=['ISO_TIME'])
259
-
260
- return oni_data, typhoon_data
 
 
261
 
262
  def process_oni_data(oni_data):
263
  oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
@@ -1055,4 +971,3 @@ with gr.Blocks(title="Typhoon Analysis Dashboard") as demo:
1055
  outputs=[tsne_plot, routes_plot, stats_plot, cluster_info])
1056
 
1057
  demo.launch(share=True)
1058
-
 
18
  import plotly.graph_objects as go
19
  import plotly.express as px
20
  from plotly.subplots import make_subplots
21
+
22
  from sklearn.manifold import TSNE
23
  from sklearn.cluster import DBSCAN
24
  from sklearn.preprocessing import StandardScaler
 
48
  parser = argparse.ArgumentParser(description='Typhoon Analysis Dashboard')
49
  parser.add_argument('--data_path', type=str, default=os.getcwd(), help='Path to the data directory')
50
  args = parser.parse_args()
51
+ DATA_PATH = args.data_path
52
 
53
  # Update data paths for Huggingface Spaces
54
  TEMP_DIR = tempfile.gettempdir()
 
57
  # Ensure directory exists
58
  os.makedirs(DATA_PATH, exist_ok=True)
59
 
60
+ # Update your file paths
61
  ONI_DATA_PATH = os.path.join(DATA_PATH, 'oni_data.csv')
62
  TYPHOON_DATA_PATH = os.path.join(DATA_PATH, 'processed_typhoon_data.csv')
63
  MERGED_DATA_CSV = os.path.join(DATA_PATH, 'merged_typhoon_era5_data.csv')
 
117
  "Hong Kong": {"lat_min": 21.5, "lat_max": 23, "lon_min": 113, "lon_max": 115},
118
  "Philippines": {"lat_min": 5, "lat_max": 21, "lon_min": 115, "lon_max": 130}
119
  }
 
 
120
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  # -----------------------------
122
  # ONI and Typhoon Data Functions
123
  # -----------------------------
 
162
  os.remove(temp_file)
163
 
164
  def load_data(oni_path, typhoon_path):
165
+ if not os.path.exists(typhoon_path):
166
+ logging.error(f"Typhoon data file not found: {typhoon_path}")
167
+ return pd.DataFrame(), pd.DataFrame()
168
+ try:
169
+ oni_data = pd.read_csv(oni_path)
170
+ typhoon_data = pd.read_csv(typhoon_path, low_memory=False)
171
+ typhoon_data['ISO_TIME'] = pd.to_datetime(typhoon_data['ISO_TIME'], errors='coerce')
172
+ typhoon_data = typhoon_data.dropna(subset=['ISO_TIME'])
173
+ return oni_data, typhoon_data
174
+ except Exception as e:
175
+ logging.error(f"Error loading data: {e}")
176
+ return pd.DataFrame(), pd.DataFrame()
177
 
178
  def process_oni_data(oni_data):
179
  oni_long = oni_data.melt(id_vars=['Year'], var_name='Month', value_name='ONI')
 
971
  outputs=[tsne_plot, routes_plot, stats_plot, cluster_info])
972
 
973
  demo.launch(share=True)