Spaces:
Sleeping
Sleeping
import gradio as gr | |
import pandas as pd | |
import numpy as np | |
import json | |
from io import StringIO | |
from collections import OrderedDict | |
import os | |
# ---------------------- Accessing data from Notion ---------------------- # | |
from notion_client import Client as client_notion | |
from imports_utils import fetch_all_database_pages | |
from imports_utils import get_property_value | |
from imports_utils import getDataFromNotion | |
from imports_utils import notionToken | |
from config import useNotionData | |
from config import landuseDatabaseId , subdomainAttributesDatabaseId | |
from config import landuseColumnName | |
from config import subdomainColumnName | |
from config import sqmPerEmployeeColumnName | |
from config import thresholdsColumnName | |
from config import maxPointsColumnName | |
from config import domainColumnName | |
if notionToken is None: | |
notionFound = "Notion token not found." | |
raise Exception(notionFound) | |
else: | |
notionFound = "Notion token found successfully!" | |
print(notionFound) | |
if useNotionData: | |
notion = client_notion(auth=notionToken) | |
lu_mapperDict, subdomain_mapperDict = getDataFromNotion( | |
notion=notion, | |
notionToken=notionToken, | |
landuseDatabaseID=landuseDatabaseId, | |
subdomainDatabaseID=subdomainAttributesDatabaseId | |
) | |
# ---------------------- Accessing data from Speckle ---------------------- # | |
from specklepy.api.client import SpeckleClient | |
from specklepy.api.credentials import get_default_account, get_local_accounts | |
from specklepy.transports.server import ServerTransport | |
from specklepy.api import operations | |
from specklepy.objects.geometry import Polyline, Point | |
from specklepy.objects import Base | |
import speckle_utils | |
import data_utils | |
from config import landuseDatabaseId , streamId, dmBranchName, dmCommitId, luBranchName, luCommitId, distanceMatrixActivityNodes | |
from imports_utils import speckleToken | |
from config import useSpeckleData | |
from imports_utils import getDataFromSpeckle | |
if speckleToken is None: | |
speckleFound = "Speckle token not found." | |
raise Exception(speckleFound) | |
else: | |
speckleFound = "Speckle token found successfully!" | |
print(speckleFound) | |
if useSpeckleData == True: | |
CLIENT = SpeckleClient(host="https://speckle.xyz/") | |
account = get_default_account() | |
CLIENT.authenticate_with_token(token=speckleToken) | |
landuses, matrices = getDataFromSpeckle(speckleClient = CLIENT, streamID=streamId,matrixBranchName=dmBranchName, landuseBranchName=luBranchName) | |
df_dm = matrices[distanceMatrixActivityNodes] | |
df_dm_dict = df_dm.to_dict('index') | |
df_dm = df_dm.replace([np.inf, -np.inf], 10000).fillna(0) | |
df_dm = df_dm.apply(pd.to_numeric, errors='coerce') | |
df_dm = df_dm.round(0).astype(int) | |
mask_connected = df_dm.index.tolist() | |
df_lu = landuses.loc[mask_connected] | |
mergeAssetNonAssetLanduse = True | |
if mergeAssetNonAssetLanduse: | |
df_lu.columns = [col.replace('ASSETS+', '') for col in df_lu.columns] | |
df_lu = df_lu.replace([np.inf, -np.inf], 10000).fillna(0) | |
df_lu = df_lu.apply(pd.to_numeric, errors='coerce') | |
df_lu = df_lu.astype(int) | |
df_lu = df_lu.T.groupby(level=0).sum().T | |
def test(input_json): | |
print("Received input") | |
# Parse the input JSON string | |
try: | |
inputs = json.loads(input_json) | |
except json.JSONDecodeError: | |
inputs = json.loads(input_json.replace("'", '"')) | |
# ------------------------- Accessing input data from Grasshopper ------------------------- # | |
from imports_utils import getDataFromGrasshopper | |
from config import alpha as alphaDefault | |
from config import threshold as thresholdDefault | |
from imports_utils import findUniqueDomains | |
from imports_utils import findUniqueSubdomains | |
from imports_utils import landusesToSubdomains | |
from imports_utils import FindWorkplacesNumber | |
from imports_utils import computeAccessibility | |
from imports_utils import computeAccessibility_pointOfInterest | |
from imports_utils import remap | |
from imports_utils import accessibilityToLivability | |
useGrasshopperData = inputs['input']["useGrasshopperData"] # fetch grasshoper data or not | |
if useGrasshopperData == "True": | |
dfMatrix_gh, dfLanduses_gh, attributeMapperDict_gh, landuseMapperDict_gh, alpha, threshold = getDataFromGrasshopper( | |
inputJson = inputs, | |
inputNameMatrix = "matrix", | |
inputNameLanduse = "landuse_areas", | |
inputNameAttributeMapper = "attributeMapperDict", | |
inputNameLanduseMapper = "landuseMapperDict", | |
inputNameAlpha = "alpha", | |
inputNameThreshold = "threshold" | |
) | |
dfMatrix = dfMatrix_gh | |
dfLanduses = dfLanduses_gh | |
if useNotionData != True: | |
attributeMapperDict = attributeMapperDict_gh | |
landuseMapperDict = landuseMapperDict_gh | |
else: | |
dfMatrix_gh, dfLanduses_gh, attributeMapperDict_gh, landuseMapperDict_gh, alpha, threshold = getDataFromGrasshopper( | |
inputJson = inputs, | |
inputNameMatrix = None, | |
inputNameLanduse = None, | |
inputNameAttributeMapper = None, | |
inputNameLanduseMapper = None, | |
inputNameAlpha = "alpha", | |
inputNameThreshold = "threshold" | |
) | |
dfLanduses = df_lu.copy() | |
dfMatrix = df_dm.copy() | |
landuseMapperDict = lu_mapperDict | |
livabilityMapperDict = subdomain_mapperDict | |
domainsUnique = findUniqueDomains(livabilityMapperDict) | |
subdomainsUnique = findUniqueSubdomains(landuseMapperDict) | |
LivabilitySubdomainsWeights = landusesToSubdomains(dfMatrix,dfLanduses,landuseMapperDict,subdomainsUnique) | |
WorkplacesNumber = FindWorkplacesNumber(dfMatrix,livabilityMapperDict,LivabilitySubdomainsWeights,subdomainsUnique) | |
# prepare an input weights dataframe for the parameter LivabilitySubdomainsInputs | |
LivabilitySubdomainsInputs =pd.concat([LivabilitySubdomainsWeights, WorkplacesNumber], axis=1) | |
subdomainsAccessibility = computeAccessibility(dfMatrix,LivabilitySubdomainsInputs,alpha,threshold) | |
livability = accessibilityToLivability(dfMatrix,subdomainsAccessibility,livabilityMapperDict,domainsUnique) | |
livability_dictionary = livability.to_dict('index') | |
LivabilitySubdomainsInputs_dictionary = LivabilitySubdomainsInputs.to_dict('index') | |
subdomainsAccessibility_dictionary = subdomainsAccessibility.to_dict('index') | |
LivabilitySubdomainsWeights_dictionary = LivabilitySubdomainsWeights.to_dict('index') | |
df_lu_dict = dfLanduses.to_dict('index') | |
dm_dictionary = dfMatrix.to_dict('index') | |
# generate report string | |
nan_count = dfMatrix.isna().sum().sum() | |
nan_count_after_inf_replacement = dfMatrix.replace([np.inf, -np.inf], np.nan).isna().sum().sum() | |
inf_count = nan_count_after_inf_replacement - nan_count | |
landuseSum = ', '.join([f"{column} = {dfLanduses[column].sum()}" for column in dfLanduses.columns]) | |
weightsSum = ', '.join([f"{column} = {LivabilitySubdomainsInputs[column].sum()}" for column in LivabilitySubdomainsInputs.columns]) | |
valid_indexes = [idx for idx in mask_connected if idx in dfLanduses.index] | |
missing_indexes = set(mask_connected) - set(valid_indexes) | |
if missing_indexes: | |
indexError = f"Error: The following indexes were not found in the Landuse dataframe: {missing_indexes}, length: {len(missing_indexes)}" | |
else: | |
indexError = "No missing indexes found in the Landuse dataframe." | |
accessibilityStats = [ | |
f"{column} = {subdomainsAccessibility[column].min()}, {subdomainsAccessibility[column].max()}, {subdomainsAccessibility[column].mean()}" for column in subdomainsAccessibility.columns | |
] | |
livabilityStats = [ | |
f"{column} = {livability[column].min()}, {livability[column].max()}, {livability[column].mean()}" for column in livability.columns | |
] | |
outputReport = [ | |
notionFound, | |
speckleFound, | |
f"Distance matrix length: {len(dfMatrix)}", | |
f"Number Infinity values: {inf_count}, number NaN values: {nan_count}", | |
f"Activity nodes with landuses number: {len(dfLanduses)}", | |
f"Total area per landuse: {landuseSum}", | |
f"Total area (jobs) per livability category: {weightsSum}", | |
indexError, | |
f"Accessibility parameters used: threshold: {threshold}, alpha: {alpha}", | |
f"Accessibility stats: {accessibilityStats}", | |
f"Livability stats: {livabilityStats}" | |
] | |
# Prepare the output | |
output = { | |
"subdomainsAccessibility_dictionary": subdomainsAccessibility_dictionary, | |
"livability_dictionary": livability_dictionary, | |
"subdomainsWeights_dictionary": LivabilitySubdomainsInputs_dictionary, | |
"luDomainMapper": landuseMapperDict, | |
"attributeMapper": livabilityMapperDict, | |
"mask_connected": mask_connected, | |
"dm_an": dm_dictionary, | |
"landuses":df_lu_dict, | |
"constants": [alpha, threshold], | |
"report": outputReport | |
} | |
return json.dumps(output) | |
# Define the Gradio interface with a single JSON input | |
iface = gr.Interface( | |
fn=test, | |
inputs=gr.Textbox(label="Input JSON", lines=20, placeholder="Enter JSON with all parameters here..."), | |
outputs=gr.JSON(label="Output JSON"), | |
title="testspace" | |
) | |
iface.launch() |