fastapi / main.py
ka1kuk's picture
Update main.py
a1505c8
raw
history blame
1.55 kB
import os
from langchain.memory import ConversationBufferMemory
from langchain.utilities import GoogleSearchAPIWrapper
from langchain.agents import initialize_agent, Tool
from lang import G4F
from fastapi import FastAPI
from pydantic import BaseModel
from fastapi.middleware.cors import CORSMiddleware
app = FastAPI()
app.add_middleware( # add the middleware
CORSMiddleware,
allow_credentials=True, # allow credentials
allow_origins=["*"], # allow all origins
allow_methods=["*"], # allow all methods
allow_headers=["*"], # allow all headers
)
google_api_key = os.environ["GOOGLE_API_KEY"]
cse_id = os.environ["GOOGLE_CSE_ID"]
model = os.environ['default_model']
search = GoogleSearchAPIWrapper()
tools = [
Tool(
name ="Search" ,
func=search.run,
description="useful when you need to answer questions about current events"
),
]
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
llm = G4F(model=model)
agent_chain = initialize_agent(tools, llm, agent="chat-conversational-react-description",
verbose=True, memory=memory)
@app.get("/")
def gello():
return "Hello! My name is Linlada."
@app.post('/linlada')
async def hello_post():
llm = G4F(model=model)
data = await request.json()
prompt = data['prompt']
chat = llm(prompt)
return chat
@app.post('/search')
async def searches():
data = await request.json()
prompt = data['prompt']
response = agent_chain.run(input=prompt)
return response