Spaces:
Build error
Build error
aristotletan
commited on
Commit
•
b731d10
1
Parent(s):
7e5f148
Upload folder using huggingface_hub
Browse files- .env +4 -0
- README.md +2 -8
- __pycache__/api.cpython-310.pyc +0 -0
- __pycache__/api.cpython-311.pyc +0 -0
- address.json +0 -0
- api.py +36 -0
- get_data.rest +1 -0
- llm.py +121 -0
- requirements.txt +117 -0
- services/__pycache__/location_services.cpython-310.pyc +0 -0
- services/__pycache__/location_services.cpython-311.pyc +0 -0
- services/location_services.py +125 -0
.env
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
OPENAI_API_KEY='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'
|
2 |
+
GMAPS_API_KEY='AIzaSyDXCm-fwucr6R93CHa9Bb4j5qArup5D4kY'
|
3 |
+
GOOGLE_MAPS_API_KEY='AIzaSyDXCm-fwucr6R93CHa9Bb4j5qArup5D4kY'
|
4 |
+
NEO4J_PASSWORD=222
|
README.md
CHANGED
@@ -1,12 +1,6 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
|
4 |
-
colorFrom: yellow
|
5 |
-
colorTo: blue
|
6 |
sdk: gradio
|
7 |
sdk_version: 4.7.1
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
---
|
11 |
-
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
+
title: webapp
|
3 |
+
app_file: llm.py
|
|
|
|
|
4 |
sdk: gradio
|
5 |
sdk_version: 4.7.1
|
|
|
|
|
6 |
---
|
|
|
|
__pycache__/api.cpython-310.pyc
ADDED
Binary file (1.2 kB). View file
|
|
__pycache__/api.cpython-311.pyc
ADDED
Binary file (2.17 kB). View file
|
|
address.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
api.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
+
import pandas as pd
|
3 |
+
from typing import List
|
4 |
+
|
5 |
+
from fastapi import FastAPI, HTTPException
|
6 |
+
import os
|
7 |
+
from services.location_services import LocationDataHandler
|
8 |
+
|
9 |
+
app = FastAPI()
|
10 |
+
|
11 |
+
# Initialize your LocationDataHandler
|
12 |
+
data_file = 'address.json'
|
13 |
+
df = pd.read_json(data_file).drop('embeddings_specialization', axis=1).dropna().reset_index().drop('index', axis=1)
|
14 |
+
handler = LocationDataHandler(df)
|
15 |
+
|
16 |
+
app = FastAPI()
|
17 |
+
|
18 |
+
@app.get("/data")
|
19 |
+
async def read_data():
|
20 |
+
return df.to_dict(orient='records')
|
21 |
+
|
22 |
+
|
23 |
+
@app.get("/filter_by_address")
|
24 |
+
async def api_filter_by_address(address: str, max_distance_km: float = 30):
|
25 |
+
try:
|
26 |
+
filtered_df = handler.filter_by_address(address, max_distance_km)
|
27 |
+
if filtered_df is not None:
|
28 |
+
return filtered_df.to_dict(orient='records')
|
29 |
+
else:
|
30 |
+
raise HTTPException(status_code=404, detail="No locations found within the specified distance")
|
31 |
+
except Exception as e:
|
32 |
+
raise HTTPException(status_code=500, detail=str(e))
|
33 |
+
|
34 |
+
|
35 |
+
# To run the server:
|
36 |
+
# uvicorn your_file_name:app --reload
|
get_data.rest
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
GET http://localhost:8000/filter_by_address?address=phileo+damansara
|
llm.py
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic.v1 import BaseModel, Field
|
2 |
+
from langchain.tools import tool
|
3 |
+
import requests
|
4 |
+
from langchain.chat_models import ChatOpenAI
|
5 |
+
from langchain.prompts import ChatPromptTemplate
|
6 |
+
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
|
7 |
+
from langchain.schema.agent import AgentFinish
|
8 |
+
from langchain.schema.runnable import RunnablePassthrough
|
9 |
+
from langchain.agents import AgentExecutor
|
10 |
+
from langchain.schema.output_parser import StrOutputParser
|
11 |
+
from langchain.schema.runnable import RunnableMap
|
12 |
+
from langchain.prompts import MessagesPlaceholder
|
13 |
+
from langchain.tools.render import format_tool_to_openai_function
|
14 |
+
from langchain.agents.format_scratchpad import format_to_openai_functions
|
15 |
+
from langchain.memory import ConversationBufferMemory
|
16 |
+
from dotenv import load_dotenv, find_dotenv
|
17 |
+
import openai
|
18 |
+
import os
|
19 |
+
import gradio as gr
|
20 |
+
|
21 |
+
|
22 |
+
|
23 |
+
_ = load_dotenv(find_dotenv()) # read local .env file
|
24 |
+
openai.api_key = os.environ['OPENAI_API_KEY']
|
25 |
+
model_name = 'gpt-4-1106-preview'
|
26 |
+
model = ChatOpenAI(temperature=0, model='gpt-4-1106-preview')
|
27 |
+
output_parser = StrOutputParser()
|
28 |
+
|
29 |
+
|
30 |
+
def invoke_llm(query, context, template):
|
31 |
+
prompt = ChatPromptTemplate.from_template(template)
|
32 |
+
chain = prompt | model | output_parser
|
33 |
+
return chain.invoke({"query": query, "context": context})
|
34 |
+
|
35 |
+
|
36 |
+
class SearchInput(BaseModel):
|
37 |
+
medical_condition: str = Field(..., description="The medical condition to base the search on.")
|
38 |
+
address: str = Field(..., description="The address or location to consider for proximity in the search.")
|
39 |
+
|
40 |
+
@tool(args_schema=SearchInput)
|
41 |
+
def search_healthcare_professionals(medical_condition: str, address: str) -> str:
|
42 |
+
"""
|
43 |
+
Search for healthcare professionals details based on their relevance to a specified medical condition
|
44 |
+
and proximity to a given address. Results are approximate and based on available data.
|
45 |
+
"""
|
46 |
+
BASE_URL = "http://localhost:8000/filter_by_address"
|
47 |
+
|
48 |
+
# Parameters for the request
|
49 |
+
params = {
|
50 |
+
'address': address,
|
51 |
+
}
|
52 |
+
|
53 |
+
response = requests.get(BASE_URL, params=params)
|
54 |
+
|
55 |
+
if response.status_code == 200:
|
56 |
+
results = response.json()
|
57 |
+
else:
|
58 |
+
raise Exception(f"API Request failed with status code: {response.status_code}")
|
59 |
+
|
60 |
+
# Part 2: Use LLM
|
61 |
+
template = """ You are a medical expert and have been provided with a list of healthcare professionals
|
62 |
+
along with their details including title, name, specialization, experience,
|
63 |
+
company, address, and distance. The data is as follows:
|
64 |
+
|
65 |
+
{context}
|
66 |
+
|
67 |
+
Query: I am seeking healthcare professionals who specialize in {query}. Based
|
68 |
+
on the provided data, please rank these professionals according to the relevance
|
69 |
+
of their specialization to the medical condition and their distance as mentioned
|
70 |
+
in the data. Provide a ranked tabular comparison, including their title, name,
|
71 |
+
specialization, experience, company, address, and distance."""
|
72 |
+
|
73 |
+
context = "Results from the database (within 10km):\n{df}".format(df=results)
|
74 |
+
|
75 |
+
results = invoke_llm(query=medical_condition, context=context, template=template)
|
76 |
+
return results
|
77 |
+
|
78 |
+
|
79 |
+
system_message = (
|
80 |
+
"""You are a helpful and professional assistant for Whitecoat360, a pharmacy service operating in Malaysia.
|
81 |
+
Your role is to assist users in finding and scheduling appointments with the most suitable pharmacists and nutritionists.
|
82 |
+
Start by asking users about their specific medical concerns or if they have a general health inquiry.
|
83 |
+
Use this information, along with their specific location, to employ the 'search_healthcare_professionals' function
|
84 |
+
for identifying the right specialists. Once an appropriate specialist is found, guide the users through the appointment setting process.
|
85 |
+
Remember, your assistance should focus on logistical support based on user needs and location;
|
86 |
+
avoid providing medical advice or consultations."""
|
87 |
+
)
|
88 |
+
|
89 |
+
prompt = ChatPromptTemplate.from_messages([
|
90 |
+
("system", system_message),
|
91 |
+
MessagesPlaceholder(variable_name="chat_history"),
|
92 |
+
("user", "{input}"),
|
93 |
+
MessagesPlaceholder(variable_name="agent_scratchpad")
|
94 |
+
])
|
95 |
+
|
96 |
+
tools = [search_healthcare_professionals]
|
97 |
+
functions = [format_tool_to_openai_function(f) for f in tools]
|
98 |
+
|
99 |
+
chat_model = ChatOpenAI(temperature=0, model='gpt-4-1106-preview').bind(functions=functions)
|
100 |
+
agent_chain = RunnablePassthrough.assign(
|
101 |
+
agent_scratchpad = lambda x: format_to_openai_functions(x["intermediate_steps"])
|
102 |
+
) | prompt | chat_model | OpenAIFunctionsAgentOutputParser()
|
103 |
+
|
104 |
+
|
105 |
+
def predict(message, history):
|
106 |
+
gradio_memory = ConversationBufferMemory(return_messages=True, memory_key="chat_history")
|
107 |
+
|
108 |
+
for human, ai in history:
|
109 |
+
gradio_memory.save_context({"input": human}, {"output": ai})
|
110 |
+
gradio_agent_executor = AgentExecutor(agent=agent_chain, tools=tools, verbose=False, memory=gradio_memory)
|
111 |
+
gpt_response = gradio_agent_executor.invoke({"input": message})
|
112 |
+
return gpt_response['output']
|
113 |
+
|
114 |
+
|
115 |
+
gr.ChatInterface(predict).launch(share=True)
|
116 |
+
|
117 |
+
# print(search_healthcare_professionals({"medical_condition":"fever", "address":"phileo"}))
|
118 |
+
|
119 |
+
# Implementation of the search logic goes here
|
120 |
+
# res = pharma_search(df, medical_condition, address)
|
121 |
+
# return res
|
requirements.txt
ADDED
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file may be used to create an environment using:
|
2 |
+
# $ conda create --name <env> --file <this file>
|
3 |
+
# platform: linux-64
|
4 |
+
_libgcc_mutex=0.1=main
|
5 |
+
_openmp_mutex=5.1=1_gnu
|
6 |
+
aiofiles=23.2.1=pypi_0
|
7 |
+
aiohttp=3.9.1=pypi_0
|
8 |
+
aiosignal=1.3.1=pypi_0
|
9 |
+
altair=5.2.0=pypi_0
|
10 |
+
annotated-types=0.6.0=pypi_0
|
11 |
+
anyio=3.7.1=pypi_0
|
12 |
+
argcomplete=1.12.3=pypi_0
|
13 |
+
async-timeout=4.0.3=pypi_0
|
14 |
+
attrs=23.1.0=pypi_0
|
15 |
+
bzip2=1.0.8=h7b6447c_0
|
16 |
+
ca-certificates=2023.08.22=h06a4308_0
|
17 |
+
certifi=2023.11.17=pypi_0
|
18 |
+
charset-normalizer=3.3.2=pypi_0
|
19 |
+
click=8.1.7=pypi_0
|
20 |
+
colorama=0.4.6=pypi_0
|
21 |
+
contourpy=1.2.0=pypi_0
|
22 |
+
cycler=0.12.1=pypi_0
|
23 |
+
dataclasses-json=0.6.3=pypi_0
|
24 |
+
distro=1.8.0=pypi_0
|
25 |
+
exceptiongroup=1.2.0=pypi_0
|
26 |
+
fastapi=0.104.1=pypi_0
|
27 |
+
ffmpy=0.3.1=pypi_0
|
28 |
+
filelock=3.13.1=pypi_0
|
29 |
+
fonttools=4.45.1=pypi_0
|
30 |
+
frozenlist=1.4.0=pypi_0
|
31 |
+
fsspec=2023.10.0=pypi_0
|
32 |
+
future=0.18.3=pypi_0
|
33 |
+
geographiclib=2.0=pypi_0
|
34 |
+
geopy=2.4.1=pypi_0
|
35 |
+
gradio=4.7.1=pypi_0
|
36 |
+
gradio-client=0.7.0=pypi_0
|
37 |
+
greenlet=3.0.1=pypi_0
|
38 |
+
h11=0.14.0=pypi_0
|
39 |
+
httpcore=1.0.2=pypi_0
|
40 |
+
httpx=0.25.2=pypi_0
|
41 |
+
huggingface-hub=0.19.4=pypi_0
|
42 |
+
idna=3.6=pypi_0
|
43 |
+
importlib-resources=6.1.1=pypi_0
|
44 |
+
jinja2=3.1.2=pypi_0
|
45 |
+
jsonpatch=1.33=pypi_0
|
46 |
+
jsonpointer=2.4=pypi_0
|
47 |
+
jsonschema=4.20.0=pypi_0
|
48 |
+
jsonschema-specifications=2023.11.1=pypi_0
|
49 |
+
kiwisolver=1.4.5=pypi_0
|
50 |
+
langchain=0.0.342=pypi_0
|
51 |
+
langchain-core=0.0.7=pypi_0
|
52 |
+
langsmith=0.0.67=pypi_0
|
53 |
+
ld_impl_linux-64=2.38=h1181459_1
|
54 |
+
libffi=3.4.4=h6a678d5_0
|
55 |
+
libgcc-ng=11.2.0=h1234567_1
|
56 |
+
libgomp=11.2.0=h1234567_1
|
57 |
+
libstdcxx-ng=11.2.0=h1234567_1
|
58 |
+
libuuid=1.41.5=h5eee18b_0
|
59 |
+
markdown-it-py=3.0.0=pypi_0
|
60 |
+
markupsafe=2.1.3=pypi_0
|
61 |
+
marshmallow=3.20.1=pypi_0
|
62 |
+
matplotlib=3.8.2=pypi_0
|
63 |
+
mdurl=0.1.2=pypi_0
|
64 |
+
multidict=6.0.4=pypi_0
|
65 |
+
mypy-extensions=1.0.0=pypi_0
|
66 |
+
ncurses=6.4=h6a678d5_0
|
67 |
+
numpy=1.26.2=pypi_0
|
68 |
+
openai=1.3.6=pypi_0
|
69 |
+
openssl=3.0.12=h7f8727e_0
|
70 |
+
orjson=3.9.10=pypi_0
|
71 |
+
packaging=23.2=pypi_0
|
72 |
+
pandas=2.1.3=pypi_0
|
73 |
+
pillow=10.1.0=pypi_0
|
74 |
+
pip=23.3.1=py310h06a4308_0
|
75 |
+
pydantic=2.5.2=pypi_0
|
76 |
+
pydantic-core=2.14.5=pypi_0
|
77 |
+
pydub=0.25.1=pypi_0
|
78 |
+
pygments=2.17.2=pypi_0
|
79 |
+
pyparsing=3.1.1=pypi_0
|
80 |
+
python=3.10.13=h955ad1f_0
|
81 |
+
python-dateutil=2.8.2=pypi_0
|
82 |
+
python-dotenv=1.0.0=pypi_0
|
83 |
+
python-multipart=0.0.6=pypi_0
|
84 |
+
pytz=2023.3.post1=pypi_0
|
85 |
+
pyyaml=6.0.1=pypi_0
|
86 |
+
readline=8.2=h5eee18b_0
|
87 |
+
referencing=0.31.0=pypi_0
|
88 |
+
requests=2.31.0=pypi_0
|
89 |
+
requests-toolbelt=0.9.1=pypi_0
|
90 |
+
rich=13.7.0=pypi_0
|
91 |
+
rpds-py=0.13.1=pypi_0
|
92 |
+
semantic-version=2.10.0=pypi_0
|
93 |
+
setuptools=68.0.0=py310h06a4308_0
|
94 |
+
shellingham=1.5.4=pypi_0
|
95 |
+
six=1.16.0=pypi_0
|
96 |
+
sniffio=1.3.0=pypi_0
|
97 |
+
sqlalchemy=2.0.23=pypi_0
|
98 |
+
sqlite=3.41.2=h5eee18b_0
|
99 |
+
starlette=0.27.0=pypi_0
|
100 |
+
tabulate=0.8.10=pypi_0
|
101 |
+
tenacity=8.2.3=pypi_0
|
102 |
+
tk=8.6.12=h1ccaba5_0
|
103 |
+
tomlkit=0.12.0=pypi_0
|
104 |
+
toolz=0.12.0=pypi_0
|
105 |
+
tqdm=4.66.1=pypi_0
|
106 |
+
typer=0.9.0=pypi_0
|
107 |
+
typing-extensions=4.8.0=pypi_0
|
108 |
+
typing-inspect=0.9.0=pypi_0
|
109 |
+
tzdata=2023.3=pypi_0
|
110 |
+
tzlocal=2.1=pypi_0
|
111 |
+
urllib3=2.1.0=pypi_0
|
112 |
+
uvicorn=0.24.0.post1=pypi_0
|
113 |
+
websockets=11.0.3=pypi_0
|
114 |
+
wheel=0.41.2=py310h06a4308_0
|
115 |
+
xz=5.4.2=h5eee18b_0
|
116 |
+
yarl=1.9.3=pypi_0
|
117 |
+
zlib=1.2.13=h5eee18b_0
|
services/__pycache__/location_services.cpython-310.pyc
ADDED
Binary file (3.99 kB). View file
|
|
services/__pycache__/location_services.cpython-311.pyc
ADDED
Binary file (6.53 kB). View file
|
|
services/location_services.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import pandas as pd
|
3 |
+
from dotenv import load_dotenv
|
4 |
+
import requests
|
5 |
+
from geopy.distance import geodesic
|
6 |
+
|
7 |
+
load_dotenv()
|
8 |
+
|
9 |
+
def find_place(input_text, api_key):
|
10 |
+
# Define the base URL and parameters for the request
|
11 |
+
base_url = "https://maps.googleapis.com/maps/api/place/findplacefromtext/json"
|
12 |
+
params = {
|
13 |
+
"fields": "formatted_address,name,rating,opening_hours,geometry",
|
14 |
+
"input": input_text,
|
15 |
+
"inputtype": "textquery",
|
16 |
+
"key": api_key # Replace with your actual API key
|
17 |
+
}
|
18 |
+
|
19 |
+
# Make the GET request
|
20 |
+
response = requests.get(base_url, params=params)
|
21 |
+
|
22 |
+
# Check if the request was successful
|
23 |
+
if response.status_code == 200:
|
24 |
+
# Return the response content (JSON)
|
25 |
+
return response.json()
|
26 |
+
else:
|
27 |
+
# Return an error message if the request failed
|
28 |
+
return f"Request failed with status code: {response.status_code}"
|
29 |
+
|
30 |
+
|
31 |
+
|
32 |
+
def get_zip_code(address):
|
33 |
+
base_url = "https://maps.googleapis.com/maps/api/geocode/json"
|
34 |
+
params = {
|
35 |
+
"address": address,
|
36 |
+
"key": "AIzaSyDXCm-fwucr6R93CHa9Bb4j5qArup5D4kY"
|
37 |
+
}
|
38 |
+
response = requests.get(base_url, params=params)
|
39 |
+
if response.status_code == 200:
|
40 |
+
data = response.json()
|
41 |
+
if data['status'] == 'OK':
|
42 |
+
# Extract the postal code from the address components
|
43 |
+
for component in data['results'][0]['address_components']:
|
44 |
+
if 'postal_code' in component['types']:
|
45 |
+
return component['long_name']
|
46 |
+
return None
|
47 |
+
else:
|
48 |
+
return f"API request returned status: {data['status']}"
|
49 |
+
else:
|
50 |
+
return f"Request failed with status code: {response.status_code}"
|
51 |
+
|
52 |
+
|
53 |
+
def get_distances_for_multiple_destinations(origin, destinations, api_key):
|
54 |
+
destinations_param = '|'.join(destinations)
|
55 |
+
|
56 |
+
base_url = "https://maps.googleapis.com/maps/api/distancematrix/json"
|
57 |
+
params = {
|
58 |
+
"origins": origin,
|
59 |
+
"destinations": destinations_param,
|
60 |
+
"key": api_key
|
61 |
+
}
|
62 |
+
|
63 |
+
response = requests.get(base_url, params=params)
|
64 |
+
if response.status_code == 200:
|
65 |
+
data = response.json()
|
66 |
+
if data['status'] == 'OK':
|
67 |
+
results = []
|
68 |
+
for element in data['rows'][0]['elements']:
|
69 |
+
if element['status'] == 'OK':
|
70 |
+
distance = element['distance']['text']
|
71 |
+
results.append(distance)
|
72 |
+
else:
|
73 |
+
results.append("Distance calculation failed")
|
74 |
+
return results
|
75 |
+
else:
|
76 |
+
return ["API request returned error: " + data['status']]
|
77 |
+
else:
|
78 |
+
return [f"Request failed with status code: {response.status_code}"]
|
79 |
+
|
80 |
+
class LocationDataHandler:
|
81 |
+
def __init__(self, data_frame):
|
82 |
+
# self.data_file = data_file
|
83 |
+
self.api_key = os.getenv('GOOGLE_MAPS_API_KEY')
|
84 |
+
self.data_frame = data_frame
|
85 |
+
|
86 |
+
def get_latlong_from_address(self, address):
|
87 |
+
try:
|
88 |
+
result = find_place(address, self.api_key)
|
89 |
+
lat = result['candidates'][0]['geometry']['location']['lat']
|
90 |
+
lng = result['candidates'][0]['geometry']['location']['lng']
|
91 |
+
return lat, lng
|
92 |
+
except Exception as e:
|
93 |
+
print(f"Error retrieving location for {address}: {e}")
|
94 |
+
return None
|
95 |
+
|
96 |
+
def calculate_distance(self, ref_point, latitude, longitude):
|
97 |
+
try:
|
98 |
+
location = (latitude, longitude)
|
99 |
+
return geodesic(ref_point, location).kilometers
|
100 |
+
except Exception as e:
|
101 |
+
print(f"Error calculating distance: {e}")
|
102 |
+
return None
|
103 |
+
|
104 |
+
def filter_by_address(self, address, max_distance_km=30):
|
105 |
+
ref_point = self.get_latlong_from_address(address)
|
106 |
+
if not ref_point:
|
107 |
+
return None
|
108 |
+
|
109 |
+
def distance_calculation(row):
|
110 |
+
return self.calculate_distance(ref_point, row['Latitude'], row['Longitude'])
|
111 |
+
|
112 |
+
# Calculate distance for each row
|
113 |
+
self.data_frame['Distance'] = self.data_frame.apply(distance_calculation, axis=1)
|
114 |
+
|
115 |
+
# Select top 10 within the maximum distance
|
116 |
+
df = self.data_frame[self.data_frame['Distance'] <= max_distance_km].sort_values(by='Distance').head(10).reset_index(drop=True)
|
117 |
+
|
118 |
+
# Calculate Actual Distance Using GMaps API
|
119 |
+
destinations = df.apply(lambda row: f"{row['Latitude']},{row['Longitude']}", axis=1).tolist()
|
120 |
+
distances = get_distances_for_multiple_destinations(f"{ref_point[0]},{ref_point[1]}", destinations, self.api_key)
|
121 |
+
|
122 |
+
# Add the distances back to the DataFrame
|
123 |
+
df['Distance'] = distances
|
124 |
+
|
125 |
+
return df
|