thaina.saraiva commited on
Commit
dc95ca1
·
1 Parent(s): fd999ec

add postgress DB

Browse files
Files changed (1) hide show
  1. example/save_in_postgress.py +285 -0
example/save_in_postgress.py ADDED
@@ -0,0 +1,285 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Load COOPER datasets from Hugging Face and persist them into a PostgreSQL database.
3
+
4
+ This script:
5
+ - Loads measurements_by_cell, topology, and performance_indicators_meanings from CelfAI/COOPER.
6
+ - Optionally computes aggregated views (mean/min by cell, mean by band/site).
7
+ - Creates the database if missing, then writes the main tables via pandas to_sql.
8
+
9
+ Usage:
10
+ python save_in_postgress.py
11
+
12
+ Requires: datasets, pandas, sqlalchemy, psycopg2-binary
13
+ """
14
+
15
+ from datasets import load_dataset
16
+ import pandas as pd
17
+ from sqlalchemy import create_engine, text
18
+
19
+ # ---------------------------------------------------------------------------
20
+ # Constants
21
+ # ---------------------------------------------------------------------------
22
+
23
+ DATASET_REPO = "CelfAI/COOPER"
24
+ SPLITS_MEASUREMENTS = ("train", "test")
25
+
26
+ # Default PostgreSQL connection (override via env or arguments if needed).
27
+ DEFAULT_CONFIG = {
28
+ "USERNAME": "postgres",
29
+ "PASSWORD": "postgres",
30
+ "HOST": "localhost",
31
+ "PORT": "5432",
32
+ "DB_NAME": "cooper",
33
+ }
34
+
35
+
36
+ # ---------------------------------------------------------------------------
37
+ # Data loading
38
+ # ---------------------------------------------------------------------------
39
+
40
+
41
+ def load_measurements_by_cell() -> pd.DataFrame:
42
+ """Load measurements_by_cell from COOPER, merge train and test splits."""
43
+ ds = load_dataset(DATASET_REPO, "measurements_by_cell")
44
+ train = ds["train"].to_pandas()
45
+ test = ds["test"].to_pandas()
46
+ return pd.concat([train, test], ignore_index=True)
47
+
48
+
49
+ def load_topology() -> pd.DataFrame:
50
+ """Load topology from COOPER (main split)."""
51
+ ds = load_dataset(DATASET_REPO, "topology")
52
+ return ds["main"].to_pandas()
53
+
54
+
55
+ def load_performance_indicators_meanings() -> pd.DataFrame:
56
+ """Load performance_indicators_meanings from COOPER (main split)."""
57
+ ds = load_dataset(DATASET_REPO, "performance_indicators_meanings")
58
+ return ds["main"].to_pandas()
59
+
60
+
61
+ def prepare_measurements_for_db(df: pd.DataFrame) -> pd.DataFrame:
62
+ """Normalize column names for PostgreSQL (dots -> underscores)."""
63
+ out = df.copy()
64
+ out.columns = out.columns.str.replace(".", "_", regex=False)
65
+ return out
66
+
67
+
68
+ def prepare_performance_indicators_for_db(df: pd.DataFrame) -> pd.DataFrame:
69
+ """Rename 3GPP_reference to reference_3gpp for valid SQL identifier."""
70
+ out = df.copy()
71
+ out.rename(columns={"3GPP_reference": "reference_3gpp"}, inplace=True)
72
+ return out
73
+
74
+
75
+ # ---------------------------------------------------------------------------
76
+ # Optional aggregated views (for analytics; not written to DB in this script)
77
+ # ---------------------------------------------------------------------------
78
+
79
+
80
+ def compute_aggregations(
81
+ measurements: pd.DataFrame,
82
+ topology: pd.DataFrame,
83
+ ) -> dict[str, pd.DataFrame]:
84
+ """
85
+ Join measurements with topology and compute mean/min by cell, mean by band/site.
86
+ Returns a dict of DataFrames for optional export or analysis.
87
+ """
88
+ all_data = pd.merge(measurements, topology, on="LocalCellName", how="left")
89
+ pm_columns = [
90
+ c for c in measurements.columns
91
+ if c not in ("LocalCellName", "datetime")
92
+ ]
93
+
94
+ return {
95
+ "mean_by_cell": measurements.groupby("LocalCellName")[pm_columns].mean().reset_index(),
96
+ "min_by_cell": measurements.groupby("LocalCellName")[pm_columns].min().reset_index(),
97
+ "mean_by_band": all_data.groupby("Band")[pm_columns].mean().reset_index(),
98
+ "mean_by_site": all_data.groupby("SiteLabel")[pm_columns].mean().reset_index(),
99
+ }
100
+
101
+
102
+ # ---------------------------------------------------------------------------
103
+ # Database setup and population
104
+ # ---------------------------------------------------------------------------
105
+
106
+
107
+ def ensure_database(engine_admin, db_name: str) -> None:
108
+ """Create database if it does not exist (idempotent)."""
109
+ with engine_admin.connect() as conn:
110
+ conn = conn.execution_options(isolation_level="AUTOCOMMIT")
111
+ result = conn.execute(
112
+ text("SELECT 1 FROM pg_database WHERE datname = :name"),
113
+ {"name": db_name},
114
+ )
115
+ if result.scalar() is None:
116
+ conn.execute(text(f"CREATE DATABASE {db_name} TEMPLATE template0;"))
117
+
118
+
119
+ def get_engine(config: dict, database: str | None = None):
120
+ """Build SQLAlchemy engine for the given database (default: postgres)."""
121
+ db = database or "postgres"
122
+ url = (
123
+ f"postgresql+psycopg2://{config['USERNAME']}:{config['PASSWORD']}"
124
+ f"@{config['HOST']}:{config['PORT']}/{db}"
125
+ )
126
+ return create_engine(url)
127
+
128
+
129
+ def write_tables(engine, measurements: pd.DataFrame, topology: pd.DataFrame, performance_indicators: pd.DataFrame) -> None:
130
+ """Write the three main DataFrames to PostgreSQL (replace existing tables)."""
131
+ measurements.to_sql("measurements", engine, if_exists="replace", index=False)
132
+ performance_indicators.to_sql(
133
+ "performance_indicators_meanings", engine, if_exists="replace", index=False
134
+ )
135
+ topology.to_sql("topology", engine, if_exists="replace", index=False)
136
+
137
+
138
+ def list_public_tables(engine) -> list[tuple]:
139
+ """Return list of (table_name,) in the public schema."""
140
+ with engine.connect() as conn:
141
+ result = conn.execute(
142
+ text(
143
+ "SELECT table_name FROM information_schema.tables "
144
+ "WHERE table_schema = 'public';"
145
+ )
146
+ )
147
+ return result.fetchall()
148
+
149
+
150
+ # ---------------------------------------------------------------------------
151
+ # DDL: CREATE TABLE IF NOT EXISTS (run before loading data)
152
+ # ---------------------------------------------------------------------------
153
+
154
+ query_Performance_Indicators_meaning = """
155
+ CREATE TABLE IF NOT EXISTS performance_indicators_meanings (
156
+ name TEXT PRIMARY KEY,
157
+ category TEXT,
158
+ description TEXT,
159
+ unit TEXT,
160
+ collection_method TEXT,
161
+ collection_condition TEXT,
162
+ measurement_entity TEXT,
163
+ reference_3gpp TEXT
164
+ );
165
+ """
166
+
167
+ query_Topology = """
168
+ CREATE TABLE IF NOT EXISTS topology (
169
+ SiteLabel TEXT,
170
+ LocalCellName TEXT PRIMARY KEY,
171
+ Sector INT,
172
+ PCI INT,
173
+ DuplexMode TEXT,
174
+ Band TEXT,
175
+ dlBandwidth TEXT,
176
+ Azimuth NUMERIC,
177
+ MDT INT,
178
+ EDT INT,
179
+ HBeamwidth INT,
180
+ AntennaHeight NUMERIC,
181
+ GroundHeight INT,
182
+ OperationMode TEXT,
183
+ distance_X NUMERIC,
184
+ distance_Y NUMERIC
185
+ );
186
+ """
187
+
188
+ query_Measurements = """
189
+ CREATE TABLE IF NOT EXISTS measurements (
190
+ LocalCellName TEXT REFERENCES topology(LocalCellName) ON DELETE CASCADE,
191
+ datetime TEXT,
192
+ RRC_ConnEstabSucc INT,
193
+ RRC_ConnEstabAtt INT,
194
+ RRC_Setup INT,
195
+ RRC_ConnMax INT,
196
+ MM_HoExeIntraFreqSuccOut INT,
197
+ MM_HoExeIntraFreqReqOut INT,
198
+ MM_HoExeIntraFreqSucc INT,
199
+ MM_HoExeIntraFreqAtt INT,
200
+ MM_HoExecInterFreqReqOut_Cov INT,
201
+ MM_HoExeInterFreqSuccOut_Cov INT,
202
+ MM_HoPrepInterFreqReqOut_Cov INT,
203
+ MM_HoExeInterFreqReqOut INT,
204
+ MM_HoExeInterFreqSuccOut INT,
205
+ MM_HoPrepInterFreqReqOut INT,
206
+ MM_HoPrepIntraFreqReqOut INT,
207
+ MM_HoFailIn_Admit INT,
208
+ MM_HoExeIntraFreqPrepReqIn INT,
209
+ MM_Redirection_Blind INT,
210
+ MM_Redirection_Cov INT,
211
+ CARR_WBCQIDist_Bin0 INT,
212
+ CARR_WBCQIDist_Bin1 INT,
213
+ CARR_WBCQIDist_Bin2 INT,
214
+ CARR_WBCQIDist_Bin3 INT,
215
+ CARR_WBCQIDist_Bin4 INT,
216
+ CARR_WBCQIDist_Bin5 INT,
217
+ CARR_WBCQIDist_Bin6 INT,
218
+ CARR_WBCQIDist_Bin7 INT,
219
+ CARR_WBCQIDist_Bin8 INT,
220
+ CARR_WBCQIDist_Bin9 INT,
221
+ CARR_WBCQIDist_Bin10 INT,
222
+ CARR_WBCQIDist_Bin11 INT,
223
+ CARR_WBCQIDist_Bin12 INT,
224
+ CARR_WBCQIDist_Bin13 INT,
225
+ CARR_WBCQIDist_Bin14 INT,
226
+ CARR_WBCQIDist_Bin15 INT,
227
+ ThpVolDl NUMERIC,
228
+ ThpVolUl NUMERIC,
229
+ ThpTimeDl NUMERIC,
230
+ ThpTimeUl NUMERIC,
231
+ CellUnavail_System INT,
232
+ CellUnavail_Manual INT,
233
+ CellUnavail_EnergySaving INT,
234
+ UECNTX_Est_Att INT,
235
+ UECNTX_Est_Succ INT,
236
+ UECNTX_Rem INT
237
+ );
238
+ """
239
+
240
+
241
+ def create_tables_if_not_exist(engine) -> None:
242
+ """Create tables from DDL if they do not exist (topology first, then measurements FK)."""
243
+ with engine.connect() as conn:
244
+ conn.execute(text(query_Performance_Indicators_meaning))
245
+ conn.execute(text(query_Topology))
246
+ conn.execute(text(query_Measurements))
247
+ conn.commit()
248
+
249
+
250
+ # ---------------------------------------------------------------------------
251
+ # Main
252
+ # ---------------------------------------------------------------------------
253
+
254
+
255
+ def main(config: dict | None = None) -> None:
256
+ config = config or DEFAULT_CONFIG
257
+ db_name = config["DB_NAME"]
258
+
259
+ # 1) Load and prepare data
260
+ measurements = load_measurements_by_cell()
261
+ topology = load_topology()
262
+ performance_indicators = load_performance_indicators_meanings()
263
+
264
+ measurements = prepare_measurements_for_db(measurements)
265
+ performance_indicators = prepare_performance_indicators_for_db(performance_indicators)
266
+
267
+
268
+ # 2) Create the database if it does not exist, then connect to it
269
+ engine_admin = get_engine(config, database="postgres")
270
+ ensure_database(engine_admin, db_name)
271
+ engine = get_engine(config, database=db_name)
272
+
273
+ # 3) Create tables from DDL if they do not exist
274
+ create_tables_if_not_exist(engine)
275
+
276
+ # 4) Load data into tables (replace existing data)
277
+ write_tables(engine, measurements, topology, performance_indicators)
278
+
279
+ # 5) Verify: list tables in public schema
280
+ tables = list_public_tables(engine)
281
+ print("Tables in public schema:", tables)
282
+
283
+
284
+ if __name__ == "__main__":
285
+ main()