zillow / processors /helpers.py
misikoff's picture
Revert "feat: try removing all non essential python and notebook files"
c83a125
import pandas as pd
import os
def get_data_path_for_config(config_name):
data_dir = "../data"
return os.path.join(data_dir, config_name)
def coalesce_columns(
df,
):
columns_to_coalesce = [col for col in df.columns if "_" not in col]
for index, row in df.iterrows():
for col in df.columns:
for column_to_coalesce in columns_to_coalesce:
if column_to_coalesce in col and "_" in col:
if not pd.isna(row[col]):
df.at[index, column_to_coalesce] = row[col]
continue
# remove columns with underscores
combined_df = df[columns_to_coalesce]
return combined_df
def set_home_type(cur_df, filename):
if "_sfrcondo_" in filename:
cur_df["Home Type"] = "all homes"
if "_sfrcondomfr_" in filename:
cur_df["Home Type"] = "all homes plus multifamily"
elif "_sfr_" in filename:
cur_df["Home Type"] = "SFR"
elif "_condo_" in filename:
cur_df["Home Type"] = "condo/co-op"
elif "_mfr_" in filename:
cur_df["Home Type"] = "multifamily"
return cur_df
def get_combined_df(data_frames, on):
combined_df = None
if len(data_frames) > 1:
# iterate over dataframes and merge or concat
combined_df = data_frames[0]
for i in range(1, len(data_frames)):
cur_df = data_frames[i]
combined_df = pd.merge(
combined_df,
cur_df,
on=on,
how="outer",
suffixes=("", "_" + str(i)),
)
elif len(data_frames) == 1:
combined_df = data_frames[0]
combined_df = coalesce_columns(combined_df)
return combined_df
def get_melted_df(
df,
exclude_columns,
columns_to_pivot,
col_name,
filename,
):
smoothed = "_sm_" in filename
seasonally_adjusted = "_sa_" in filename
if smoothed:
col_name += " (Smoothed)"
if seasonally_adjusted:
col_name += " (Seasonally Adjusted)"
df = pd.melt(
df,
id_vars=exclude_columns,
value_vars=columns_to_pivot,
var_name="Date",
value_name=col_name,
)
return df
def save_final_df_as_jsonl(config_name, df):
processed_dir = "../processed/"
if not os.path.exists(processed_dir):
os.makedirs(processed_dir)
full_path = os.path.join(processed_dir, config_name + ".jsonl")
df.to_json(full_path, orient="records", lines=True)
def handle_slug_column_mappings(
data_frames, slug_column_mappings, exclude_columns, filename, cur_df
):
# Identify columns to pivot
columns_to_pivot = [col for col in cur_df.columns if col not in exclude_columns]
for slug, col_name in slug_column_mappings.items():
if slug in filename:
cur_df = get_melted_df(
cur_df,
exclude_columns,
columns_to_pivot,
col_name,
filename,
)
data_frames.append(cur_df)
break
return data_frames