Spaces:
Runtime error
Runtime error
File size: 5,110 Bytes
6288a82 3c8d75d ae5391f 3c8d75d ae5391f 3c8d75d ae5391f 3c8d75d ae5391f 6288a82 3c8d75d ae5391f 3c8d75d ae5391f 3c8d75d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
import datetime
import os
import openai
import requests
import uvicorn
from dotenv import load_dotenv
# from flask import Flask, redirect, render_template, request, session, url_for
from fastapi import (Cookie, Depends, FastAPI, Form, HTTPException, Request,
Response)
from fastapi.responses import HTMLResponse, RedirectResponse
from fastapi.security import APIKeyHeader
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from models.bloom import initialize_index
load_dotenv()
app = FastAPI()
app.secret_key = os.environ.get('SECRET_KEY', 'my-secret-key')
# openai.api_key = os.environ.get("OPENAI_API_KEY")
openai_api_key_header = APIKeyHeader(name="token")
index = None
# index = initialize_index("index.json")
OPENAI_API_KEY = ""
# mounts the static folder that contains the css file
app.mount("/static", StaticFiles(directory="static", html=True), name="static")
# locates the template files that will be modified at run time
# with the dialog form the user and bot
templates = Jinja2Templates(directory="templates")
# user data
user = {}
@app.get("/", response_class=HTMLResponse)
async def home(request: Request):
return templates.TemplateResponse('login.html', {"request": request})
def validate(token: str):
api_endpoint = "https://api.openai.com/v1/chat/completions"
api_key = token
headers = {
"Content-Type" : "application/json",
"Authorization": f"Bearer {api_key}"
}
messages = [
{"role": "user", "content": "Say this is a test!"}
]
data = {
"model": "gpt-3.5-turbo",
"messages": messages
}
response = requests.post(api_endpoint, json=data, headers=headers)
return response
# Beware that most browsers cache GET requests
# (i.e., saved in browser's history), thus making
# them less secure compared to POST, as the data
# sent are part of the URL and visible to anyone
# who has access to the device. Thus, GET method
# should not be used when sending passwords or
# other sensitive information.
# @app.post("/login", response_class=HTMLResponse)
# async def login(token: str, request: Request):
# # form = await request.form()
# # print("login form data:", form.values())
# # api_key = form.get("access_token")
# print("apikey from login main.py: ", token)
# try:
# response = validate(token)
# print("login response: ", response.json())
# if ("error" in response.json()):
# error = response.json()["error"]
# raise HTTPException(status_code=400, detail=error["message"])
# except Exception as e:
# context = {"request": request, "error_message": str(e)}
# print("login error: ", context)
# return templates.TemplateResponse('login.html', context=context)
# return templates.TemplateResponse("chatbot.html", {"request": request})
# @app.post("/login")
# async def login(token: str, response: Response):
# print("apikey from login main.py: ", token)
# response = validate(token)
# print("login response: ", response.json())
# if ("error" in response.json()):
# error = response.json()["error"]
# # Redirect back to the login page with an error message
# response.set_cookie(key="message", value=error.message)
# return RedirectResponse(url="/login")
# # Store the session token or other identifying information in a cookie
# # response.set_cookie(key="username", value=username)
# # Redirect to the chatbot page upon successful login
# return RedirectResponse(url="/chatbot")
@app.post("/login")
async def login(token: str, response: Response):
result = validate(token)
print("login response: ", result.json())
if ("error" in result.json()):
error = result.json()["error"]
response.set_cookie(key="message", value=error["message"])
return {"error": error["message"]}
else:
return {"redirect": "/chatbot"}
@app.post("/initLlamaIndex")
async def initLlamaIndex(token: str):
openai.api_key = token
global index
index = initialize_index("index.json")
return {"success": True}
# Chatbot endpoint
@app.get("/chatbot", response_class=HTMLResponse)
async def chatbot(request: Request):
startTime = ""
context = {"request": request, "startTime": startTime}
return templates.TemplateResponse("chatbot.html", context=context)
# @app.get("/chatbot", response_class=HTMLResponse)
# async def chatbot(request: Request, username: str = Cookie(None)):
# if not username:
# # Redirect back to the login page if the user is not authenticated
# return RedirectResponse(url="/login")
# else:
# # Render the chatbot page with the username passed to the template
# return templates.TemplateResponse("chatbot.html", {"request": request, "username": username})
@app.get("/reply")
def reply(input: str):
bot_reply = index.query(input)
return {"bot_reply": bot_reply}
if __name__ == "__main__":
uvicorn.run("main:app", reload=True)
|