BuildingBench commited on
Commit
dd4ed72
·
verified ·
1 Parent(s): d7f7deb

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +573 -39
src/streamlit_app.py CHANGED
@@ -1,40 +1,574 @@
1
- import altair as alt
2
- import numpy as np
 
 
3
  import pandas as pd
4
- import streamlit as st
5
-
6
- """
7
- # Welcome to Streamlit!
8
-
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
12
-
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
-
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import logging
3
+ import sys
4
+ from pathlib import Path
5
  import pandas as pd
6
+ sys.path.insert(0, str(Path(__file__).parent.parent))
7
+ from building_gen.core import BuildingPipeline, create_building_weather_combinations
8
+
9
+
10
+ def configure_logging(level: str = "INFO"):
11
+ log_level = getattr(logging, level.upper(), logging.INFO)
12
+
13
+ logging.basicConfig(
14
+ level=log_level,
15
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
16
+ datefmt='%Y-%m-%d %H:%M:%S'
17
+ )
18
+
19
+ # Reduce noise from external libraries
20
+ logging.getLogger('googleapiclient').setLevel(logging.WARNING)
21
+ logging.getLogger('google_auth_oauthlib').setLevel(logging.WARNING)
22
+
23
+
24
+
25
+ def main():
26
+ parser = argparse.ArgumentParser(
27
+ description="Building Processing Pipeline - Process and create variations of building energy models",
28
+ formatter_class=argparse.RawDescriptionHelpFormatter,
29
+ epilog="""
30
+ Examples:
31
+ # Process everything
32
+ python scripts/main.py --all
33
+
34
+ # Preprocess existing files
35
+ python scripts/main.py --preprocess
36
+
37
+ # Weather management
38
+ python scripts/main.py --create-weather-table
39
+ python scripts/main.py --weather-stats
40
+ python scripts/main.py --query-weather --country USA --climate-zone 6A
41
+
42
+ # Building-weather simulation
43
+ python scripts/main.py --match-buildings-weather
44
+ python scripts/main.py --create-combinations --simulation-climate-zones 6A 2A
45
+ python scripts/main.py --simulation-stats
46
+
47
+ # Weather variations
48
+ python scripts/main.py --weather-vars
49
+ python scripts/main.py --weather-vars --weather-types base real_weather
50
+ python scripts/main.py --weather-variation-stats
51
+
52
+ # Create specific variations
53
+ python scripts/main.py --occupancy-vars --occupancy-schedules standard low_occupancy high_occupancy
54
+ python scripts/main.py --window-vars --wwr-ratios 0.2 0.4 0.6 0.8
55
+ python scripts/main.py --thermal-vars --thermal-scenarios default high_performance low_performance
56
+
57
+ # Create combined variations with thermal
58
+ python scripts/main.py --combined-vars --thermal-scenarios default high_performance
59
+
60
+ # Create table and query
61
+ python scripts/main.py --create-table
62
+ python scripts/main.py --query --building-type office --climate-zone 6A
63
+
64
+ # Query and export
65
+ python scripts/main.py --query --building-type office --export results.csv
66
+ python scripts/main.py --query --thermal-scenario high_performance --export high_performance_buildings.csv
67
+ """
68
+ )
69
+
70
+ # Global configuration
71
+ parser.add_argument("--data-dir", default="data", help="Data directory (default: data)")
72
+ parser.add_argument("--log-level", choices=["DEBUG", "INFO", "WARNING", "ERROR"],
73
+ default="INFO", help="Logging level (default: INFO)")
74
+
75
+ # Step selection arguments
76
+ step_group = parser.add_argument_group("Pipeline Steps")
77
+ step_group.add_argument("--preprocess", action="store_true", help="Preprocess buildings (add meters, setpoints, etc.)")
78
+ step_group.add_argument("--occupancy-vars", action="store_true", help="Create occupancy variations")
79
+ step_group.add_argument("--window-vars", action="store_true", help="Create window variations")
80
+ step_group.add_argument("--thermal-vars", action="store_true", help="Create thermal resistance variations")
81
+ step_group.add_argument("--combined-vars", action="store_true", help="Create combined variations")
82
+ step_group.add_argument("--create-table", action="store_true", help="Create/update building database table")
83
+ step_group.add_argument("--all", action="store_true", help="Run all steps")
84
+
85
+ # Weather management arguments
86
+ weather_group = parser.add_argument_group("Weather Management")
87
+ weather_group.add_argument("--create-weather-table", action="store_true",
88
+ help="Create weather table from all EPW files")
89
+ weather_group.add_argument("--weather-stats", action="store_true",
90
+ help="Show weather collection statistics")
91
+ weather_group.add_argument("--query-weather", action="store_true",
92
+ help="Query weather locations")
93
+ weather_group.add_argument("--export-weather", type=Path,
94
+ help="Export weather query results to CSV file")
95
+ weather_group.add_argument("--validate-weather", action="store_true",
96
+ help="Validate all EPW files")
97
+ weather_group.add_argument("--weather-vars", action="store_true",
98
+ help="Create comprehensive weather variations for all buildings")
99
+ weather_group.add_argument("--weather-types", nargs="+",
100
+ default=["base", "climate_zone_expanded", "real_weather"],
101
+ choices=["base", "climate_zone_expanded", "real_weather"],
102
+ help="Weather variation types to create")
103
+ weather_group.add_argument("--weather-variation-stats", action="store_true",
104
+ help="Show weather variation statistics")
105
+
106
+ # Building-Weather Simulation arguments
107
+ simulation_group = parser.add_argument_group("Building-Weather Simulation")
108
+ simulation_group.add_argument("--match-buildings-weather", action="store_true",
109
+ help="Match buildings to weather files (add base_weather_id)")
110
+ simulation_group.add_argument("--create-combinations", action="store_true",
111
+ help="Create building-weather combinations for simulation")
112
+ simulation_group.add_argument("--simulation-climate-zones", nargs="+",
113
+ help="Climate zones to include in simulations (e.g., 6A 2A 4A)")
114
+ simulation_group.add_argument("--simulation-stats", action="store_true",
115
+ help="Show building-weather combination statistics")
116
+ simulation_group.add_argument("--export-combinations", type=Path,
117
+ help="Export building-weather combinations to CSV")
118
+
119
+ # Weather filtering options
120
+ weather_filter_group = parser.add_argument_group("Weather Filtering Options")
121
+ weather_filter_group.add_argument("--country", help="Filter by country code (e.g., USA, CAN, CHN)")
122
+ weather_filter_group.add_argument("--weather-climate-zone", help="Filter weather by climate zone")
123
+ weather_filter_group.add_argument("--data-source", choices=["base", "expanded", "real"],
124
+ help="Filter by data source")
125
+ weather_filter_group.add_argument("--min-latitude", type=float, help="Minimum latitude")
126
+ weather_filter_group.add_argument("--max-latitude", type=float, help="Maximum latitude")
127
+ weather_filter_group.add_argument("--min-longitude", type=float, help="Minimum longitude")
128
+ weather_filter_group.add_argument("--max-longitude", type=float, help="Maximum longitude")
129
+
130
+ # Query arguments
131
+ query_group = parser.add_argument_group("Building Query Options")
132
+ query_group.add_argument("--query", action="store_true", help="Query buildings from database")
133
+ query_group.add_argument("--export", type=Path, help="Export query results to CSV file")
134
+ query_group.add_argument("--stats", action="store_true", help="Show database statistics")
135
+
136
+ # Preprocessing configuration
137
+ preprocess_group = parser.add_argument_group("Preprocessing Configuration")
138
+ preprocess_group.add_argument("--no-meters", action="store_true",
139
+ help="Skip adding HVAC meters")
140
+ preprocess_group.add_argument("--no-outdoor-vars", action="store_true",
141
+ help="Skip adding outdoor air variables")
142
+ preprocess_group.add_argument("--timesteps-per-hour", type=int, default=4,
143
+ help="Simulation timesteps per hour (default: 4)")
144
+ preprocess_group.add_argument("--no-setpoint-control", action="store_true",
145
+ help="Skip adding setpoint control")
146
+ preprocess_group.add_argument("--no-validation", action="store_true",
147
+ help="Skip validation of processed files")
148
+
149
+ # Variation configuration
150
+ variation_group = parser.add_argument_group("Variation Configuration")
151
+ variation_group.add_argument("--occupancy-schedules", nargs="+",
152
+ default=["standard", "low_occupancy", "high_occupancy"],
153
+ help="Occupancy schedules. Options: standard, low_occupancy, high_occupancy, early_shift, late_shift, retail, school, flexible_hybrid, hospital, gym, warehouse, 24_7")
154
+ variation_group.add_argument("--wwr-ratios", nargs="+", type=float,
155
+ default=[0.2, 0.4, 0.6, 0.8],
156
+ help="Window-to-wall ratios (0.0-1.0)")
157
+ variation_group.add_argument("--thermal-scenarios", nargs="+",
158
+ default=["default", "high_performance", "low_performance"],
159
+ help="Thermal scenarios. Options: default, high_performance, low_performance")
160
+
161
+ # Building filtering options
162
+ filter_group = parser.add_argument_group("Building Filtering Options")
163
+ filter_group.add_argument("--building-type",
164
+ choices=["office", "retail", "school", "hospital", "warehouse", "hotel", "apartment", "restaurant", "healthcare"])
165
+ filter_group.add_argument("--climate-zone",
166
+ help="Filter by climate zone (e.g., 4A, 5A, 6A)")
167
+ filter_group.add_argument("--variation-type", choices=["base", "occupancy", "windows", "thermal", "combined"],
168
+ help="Filter by variation type")
169
+ filter_group.add_argument("--occupancy-schedule",
170
+ choices=["standard", "low_occupancy", "high_occupancy", "early_shift",
171
+ "late_shift", "retail", "school", "flexible_hybrid",
172
+ "hospital", "gym", "warehouse", "24_7"],
173
+ help="Filter by occupancy schedule")
174
+ filter_group.add_argument("--thermal-scenario",
175
+ choices=["default", "high_performance", "low_performance"],
176
+ help="Filter by thermal scenario")
177
+ filter_group.add_argument("--min-floor-area", type=float, help="Minimum floor area (m²)")
178
+ filter_group.add_argument("--max-floor-area", type=float, help="Maximum floor area (m²)")
179
+ filter_group.add_argument("--min-wwr", type=float, help="Minimum window-to-wall ratio")
180
+ filter_group.add_argument("--max-wwr", type=float, help="Maximum window-to-wall ratio")
181
+
182
+ # Table configuration
183
+ table_group = parser.add_argument_group("Table Configuration")
184
+ table_group.add_argument("--update-existing", action="store_true",
185
+ help="Update existing table instead of creating new")
186
+
187
+ args = parser.parse_args()
188
+
189
+ # Configure logging
190
+ configure_logging(args.log_level)
191
+ logger = logging.getLogger(__name__)
192
+
193
+ # Validate arguments
194
+ if args.wwr_ratios:
195
+ for wwr in args.wwr_ratios:
196
+ if not 0.0 <= wwr <= 1.0:
197
+ logger.error(f"WWR ratio must be between 0.0 and 1.0, got {wwr}")
198
+ sys.exit(1)
199
+
200
+ # Initialize pipeline
201
+ try:
202
+ pipeline = BuildingPipeline(args.data_dir)
203
+ logger.info(f"Initialized pipeline with data directory: {args.data_dir}")
204
+ except Exception as e:
205
+ logger.error(f"Failed to initialize pipeline: {e}")
206
+ sys.exit(1)
207
+
208
+ # Check if any action is requested
209
+ if not any([args.all, args.preprocess, args.occupancy_vars,
210
+ args.window_vars, args.thermal_vars, args.combined_vars, args.create_table,
211
+ args.query, args.stats, args.create_weather_table,
212
+ args.weather_stats, args.query_weather, args.validate_weather,
213
+ args.match_buildings_weather, args.create_combinations,
214
+ args.simulation_stats, args.weather_vars, args.weather_variation_stats]):
215
+ logger.error("No action specified. Use --help for options.")
216
+ sys.exit(1)
217
+
218
+ try:
219
+ # Execute pipeline steps
220
+ if args.all or args.preprocess:
221
+ logger.info("Starting preprocessing...")
222
+ processed, failed = pipeline.preprocess_buildings(
223
+ add_meters=not args.no_meters,
224
+ add_outdoor_vars=not args.no_outdoor_vars,
225
+ timesteps_per_hour=args.timesteps_per_hour,
226
+ add_setpoint_control=not args.no_setpoint_control,
227
+ validate=not args.no_validation
228
+ )
229
+ logger.info(f"Preprocessed {len(processed)} buildings ({len(failed)} failed)")
230
+
231
+ if args.all or args.occupancy_vars:
232
+ logger.info("Creating occupancy variations...")
233
+ count, failed = pipeline.create_occupancy_variations(args.occupancy_schedules)
234
+ logger.info(f"Created {count} occupancy variations ({len(failed)} failed)")
235
+
236
+ if args.all or args.window_vars:
237
+ logger.info("Creating window variations...")
238
+ count, failed = pipeline.create_window_variations(args.wwr_ratios)
239
+ logger.info(f"Created {count} window variations ({len(failed)} failed)")
240
+
241
+ if args.all or args.thermal_vars:
242
+ logger.info("Creating thermal resistance variations...")
243
+ count, failed = pipeline.create_thermal_variations(args.thermal_scenarios)
244
+ logger.info(f"Created {count} thermal variations ({len(failed)} failed)")
245
+
246
+
247
+ if args.all or args.combined_vars:
248
+ logger.info("Creating combined variations...")
249
+ # Create combinations of occupancy and thermal variations only
250
+ combinations = []
251
+ for occ in args.occupancy_schedules:
252
+ for thermal in args.thermal_scenarios:
253
+ combinations.append({"occupancy": occ, "thermal": thermal})
254
+
255
+ count, failed = pipeline.create_combined_variations(
256
+ variation_types=["occupancy", "thermal"], # Remove "windows"
257
+ combinations=combinations
258
+ )
259
+ logger.info(f"Created {count} combined variations ({len(failed)} failed)")
260
+
261
+ if args.all or args.create_table:
262
+ logger.info("Creating building table...")
263
+ table_file = pipeline.create_building_table(update_existing=args.update_existing)
264
+ logger.info(f"Building table created: {table_file}")
265
+
266
+ # Weather operations
267
+ if args.create_weather_table:
268
+ logger.info("Creating weather table...")
269
+ try:
270
+ from building_gen.database.weather_table import create_weather_table_with_real
271
+ weather_dirs = [
272
+ Path(args.data_dir) / "weather/base",
273
+ Path(args.data_dir) / "weather/expanded",
274
+ Path(args.data_dir) / "weather/real"
275
+ ]
276
+ output_path = Path(args.data_dir) / "weather/tables/all_weather.csv"
277
+ df = create_weather_table_with_real(weather_dirs, output_path)
278
+ logger.info(f"Created weather table with {len(df)} locations")
279
+ except ImportError:
280
+ logger.error("Weather table functionality not implemented yet")
281
+ except Exception as e:
282
+ logger.error(f"Failed to create weather table: {e}")
283
+
284
+ if args.weather_stats:
285
+ try:
286
+ weather_table_path = Path(args.data_dir) / "weather/tables/all_weather.csv"
287
+ if weather_table_path.exists():
288
+ df = pd.read_csv(weather_table_path)
289
+ print("\n🌤️ Weather Collection Statistics:")
290
+ print(f" Total locations: {len(df)}")
291
+ print(f" Countries: {df['country'].nunique()}")
292
+ print(f" Data sources: {df['data_source'].value_counts().to_dict()}")
293
+ print("\n Top 10 countries by location count:")
294
+ for country, count in df['country'].value_counts().head(10).items():
295
+ print(f" {country}: {count}")
296
+
297
+ if 'climate_zone_code' in df.columns:
298
+ print(f"\n Climate zones represented: {df['climate_zone_code'].nunique()}")
299
+ print(" Climate zone distribution:")
300
+ for zone, count in df['climate_zone_code'].value_counts().head(10).items():
301
+ print(f" {zone}: {count}")
302
+ else:
303
+ logger.error("Weather table not found. Run --create-weather-table first.")
304
+ except Exception as e:
305
+ logger.error(f"Failed to show weather statistics: {e}")
306
+
307
+ if args.query_weather or args.export_weather:
308
+ try:
309
+ weather_table_path = Path(args.data_dir) / "weather/tables/all_weather.csv"
310
+ if weather_table_path.exists():
311
+ df = pd.read_csv(weather_table_path)
312
+
313
+ # Apply filters
314
+ if args.country:
315
+ df = df[df['country'] == args.country]
316
+ if args.weather_climate_zone:
317
+ df = df[df['climate_zone_code'] == args.weather_climate_zone]
318
+ if args.data_source:
319
+ df = df[df['data_source'] == args.data_source]
320
+ if args.min_latitude:
321
+ df = df[df['latitude'] >= args.min_latitude]
322
+ if args.max_latitude:
323
+ df = df[df['latitude'] <= args.max_latitude]
324
+ if args.min_longitude:
325
+ df = df[df['longitude'] >= args.min_longitude]
326
+ if args.max_longitude:
327
+ df = df[df['longitude'] <= args.max_longitude]
328
+
329
+ if args.query_weather:
330
+ print(f"\n Found {len(df)} weather locations matching criteria:")
331
+ for _, row in df.head(15).iterrows():
332
+ print(f" {row['place']}, {row['country']}")
333
+ print(f" Coordinates: {row['latitude']:.2f}, {row['longitude']:.2f}")
334
+ if 'climate_zone_code' in row:
335
+ print(f" Climate zone: {row['climate_zone_code']}")
336
+ print(f" Source: {row['data_source']}")
337
+ print()
338
+
339
+ if len(df) > 15:
340
+ print(f" ... and {len(df) - 15} more locations")
341
+
342
+ if args.export_weather:
343
+ df.to_csv(args.export_weather, index=False)
344
+ logger.info(f"Exported {len(df)} weather locations to {args.export_weather}")
345
+ else:
346
+ logger.error("Weather table not found. Run --create-weather-table first.")
347
+ except Exception as e:
348
+ logger.error(f"Failed to query weather: {e}")
349
+
350
+ if args.validate_weather:
351
+ logger.info("Validating weather files...")
352
+ try:
353
+ from ladybug.epw import EPW
354
+ weather_dirs = [
355
+ Path(args.data_dir) / "weather/base",
356
+ Path(args.data_dir) / "weather/expanded",
357
+ Path(args.data_dir) / "weather/real"
358
+ ]
359
+
360
+ valid_count = 0
361
+ invalid_count = 0
362
+
363
+ for weather_dir in weather_dirs:
364
+ for epw_file in weather_dir.glob("*.epw"):
365
+ try:
366
+ weather = EPW(epw_file)
367
+ # Basic validation - check if we can read location data
368
+ _ = weather.location.city
369
+ _ = weather.location.latitude
370
+ _ = weather.location.longitude
371
+ valid_count += 1
372
+ except Exception as e:
373
+ logger.warning(f"Invalid weather file {epw_file}: {e}")
374
+ invalid_count += 1
375
+
376
+ logger.info(f"Weather validation complete: {valid_count} valid, {invalid_count} invalid")
377
+ except ImportError:
378
+ logger.error("ladybug library not available for weather validation")
379
+ except Exception as e:
380
+ logger.error(f"Weather validation failed: {e}")
381
+
382
+ # Weather variations
383
+ if args.weather_vars:
384
+ logger.info("Creating weather variations...")
385
+ try:
386
+ count, failed = pipeline.create_weather_variations(args.weather_types)
387
+ logger.info(f"Created {count} weather variations ({len(failed)} failed)")
388
+ except Exception as e:
389
+ logger.error(f"Failed to create weather variations: {e}")
390
+
391
+ if args.weather_variation_stats:
392
+ try:
393
+ stats = pipeline.get_weather_variation_stats()
394
+ print("\n🌤️ Weather Variation Statistics:")
395
+ for key, value in stats.items():
396
+ if isinstance(value, dict):
397
+ print(f" {key}:")
398
+ for subkey, subvalue in value.items():
399
+ print(f" {subkey}: {subvalue}")
400
+ else:
401
+ print(f" {key}: {value}")
402
+ except Exception as e:
403
+ logger.error(f"Failed to show weather variation statistics: {e}")
404
+
405
+ # Building-Weather Simulation operations
406
+ if args.match_buildings_weather:
407
+ logger.info("Matching buildings to weather files...")
408
+ try:
409
+ buildings_df = pipeline.match_buildings_to_weather()
410
+ logger.info(f"Successfully matched {len(buildings_df)} buildings to weather files")
411
+ except Exception as e:
412
+ logger.error(f"Failed to match buildings to weather: {e}")
413
+
414
+ if args.create_combinations:
415
+ logger.info("Creating building-weather combinations...")
416
+ try:
417
+ buildings_path = Path(args.data_dir) / "tables/buildings.csv"
418
+ weather_path = Path(args.data_dir) / "weather/tables/all_weather.csv"
419
+
420
+ buildings_df = pd.read_csv(buildings_path)
421
+ weather_df = pd.read_csv(weather_path)
422
+
423
+ combinations = create_building_weather_combinations(
424
+ buildings_df,
425
+ weather_df,
426
+ weather_df, # Using same table for base_weather_table - adjust if you have a separate base weather table
427
+ args.simulation_climate_zones
428
+ )
429
+
430
+ # Save combinations to CSV
431
+ combinations_df = pd.DataFrame(combinations, columns=['building_id', 'weather_id'])
432
+ combinations_path = Path(args.data_dir) / "tables/building_weather_combinations.csv"
433
+ combinations_df.to_csv(combinations_path, index=False)
434
+
435
+ logger.info(f"Created {len(combinations)} building-weather combinations")
436
+ logger.info(f"Combinations saved to: {combinations_path}")
437
+
438
+ if args.export_combinations:
439
+ combinations_df.to_csv(args.export_combinations, index=False)
440
+ logger.info(f"Exported combinations to: {args.export_combinations}")
441
+
442
+ except Exception as e:
443
+ logger.error(f"Failed to create combinations: {e}")
444
+
445
+ if args.simulation_stats:
446
+ try:
447
+ buildings_path = Path(args.data_dir) / "tables/buildings.csv"
448
+ weather_path = Path(args.data_dir) / "weather/tables/all_weather.csv"
449
+ combinations_path = Path(args.data_dir) / "tables/building_weather_combinations.csv"
450
+
451
+ if not all([buildings_path.exists(), weather_path.exists()]):
452
+ logger.error("Building or weather tables not found. Run --create-table and --create-weather-table first.")
453
+ else:
454
+ buildings_df = pd.read_csv(buildings_path)
455
+ weather_df = pd.read_csv(weather_path)
456
+
457
+ print("\n🏢 Building-Weather Simulation Statistics:")
458
+ print(f" Total buildings: {len(buildings_df)}")
459
+ print(f" Total weather locations: {len(weather_df)}")
460
+
461
+ # Buildings by climate zone
462
+ print("\n Buildings by climate zone:")
463
+ for zone, count in buildings_df['climate_zone'].value_counts().items():
464
+ print(f" {zone}: {count} buildings")
465
+
466
+ # Weather files by climate zone
467
+ print("\n Weather files by climate zone:")
468
+ for zone, count in weather_df['climate_zone_code'].value_counts().items():
469
+ print(f" {zone}: {count} weather files")
470
+
471
+ # Potential combinations by climate zone
472
+ print("\n Potential combinations by climate zone:")
473
+ for zone in buildings_df['climate_zone'].unique():
474
+ building_count = len(buildings_df[buildings_df['climate_zone'] == zone])
475
+ weather_count = len(weather_df[weather_df['climate_zone_code'] == zone])
476
+ combinations = building_count * weather_count
477
+ print(f" {zone}: {building_count} buildings × {weather_count} weather = {combinations} combinations")
478
+
479
+ # Total potential combinations
480
+ total_potential = sum(
481
+ len(buildings_df[buildings_df['climate_zone'] == zone]) *
482
+ len(weather_df[weather_df['climate_zone_code'] == zone])
483
+ for zone in buildings_df['climate_zone'].unique()
484
+ )
485
+ print(f"\n Total potential combinations: {total_potential}")
486
+
487
+ # Check if combinations have been created
488
+ if combinations_path.exists():
489
+ combinations_df = pd.read_csv(combinations_path)
490
+ print(f" Created combinations: {len(combinations_df)}")
491
+ else:
492
+ print(" Created combinations: 0 (run --create-combinations)")
493
+
494
+ except Exception as e:
495
+ logger.error(f"Failed to show simulation statistics: {e}")
496
+
497
+ # Building query operations
498
+ if args.query or args.stats or args.export:
499
+ # Build filter dictionary
500
+ filters = {}
501
+ if args.building_type:
502
+ filters['building_type'] = args.building_type
503
+ if args.climate_zone:
504
+ filters['climate_zone'] = args.climate_zone
505
+ if args.variation_type:
506
+ filters['variation_type'] = args.variation_type
507
+ if args.occupancy_schedule:
508
+ filters['occupancy_schedule'] = args.occupancy_schedule
509
+ if args.thermal_scenario:
510
+ filters['thermal_scenario'] = args.thermal_scenario
511
+
512
+ # Build WWR range
513
+ wwr_range = None
514
+ if args.min_wwr or args.max_wwr:
515
+ wwr_range = (args.min_wwr or 0.0, args.max_wwr or 1.0)
516
+
517
+ # Query buildings
518
+ if args.query or args.export:
519
+ buildings = pipeline.get_buildings(
520
+ wwr_range=wwr_range,
521
+ min_floor_area=args.min_floor_area,
522
+ max_floor_area=args.max_floor_area,
523
+ **filters
524
+ )
525
+
526
+ if args.query:
527
+ logger.info(f"Found {len(buildings)} buildings matching criteria")
528
+ if buildings:
529
+ print("\nMatching buildings:")
530
+ for i, building in enumerate(buildings[:10], 1): # Show first 10
531
+ print(f" {i:2d}. {building['name']}")
532
+ print(f" Type: {building['building_type']}, Climate: {building['climate_zone']}")
533
+ print(f" Variation: {building['variation_type']}, Occupancy: {building['occupancy_schedule']}")
534
+ if 'thermal_scenario' in building:
535
+ print(f" Thermal: {building['thermal_scenario']}, WWR: {building['window_wall_ratio']:.0%}, Floor area: {building['floor_area']:.0f} m²")
536
+ else:
537
+ print(f" WWR: {building['window_wall_ratio']:.0%}, Floor area: {building['floor_area']:.0f} m²")
538
+ print()
539
+
540
+ if len(buildings) > 10:
541
+ print(f" ... and {len(buildings) - 10} more buildings")
542
+ else:
543
+ print("No buildings found matching the criteria")
544
+
545
+ if args.export:
546
+ pipeline.export_building_list(args.export, **filters)
547
+ logger.info(f"Exported {len(buildings)} buildings to {args.export}")
548
+
549
+ # Show statistics
550
+ if args.stats:
551
+ stats = pipeline.get_summary_stats()
552
+ print("\n📊 Database Statistics:")
553
+ for key, value in stats.items():
554
+ if isinstance(value, dict):
555
+ print(f" {key}:")
556
+ for subkey, subvalue in value.items():
557
+ print(f" {subkey}: {subvalue}")
558
+ else:
559
+ print(f" {key}: {value}")
560
+
561
+ logger.info("Pipeline execution completed successfully!")
562
+
563
+ except KeyboardInterrupt:
564
+ logger.info("Pipeline execution interrupted by user")
565
+ sys.exit(1)
566
+ except Exception as e:
567
+ logger.error(f"Pipeline execution failed: {e}")
568
+ if args.log_level == "DEBUG":
569
+ import traceback
570
+ traceback.print_exc()
571
+ sys.exit(1)
572
+
573
+ if __name__ == "__main__":
574
+ main()