taldemir commited on
Commit
c92f9f4
Β·
verified Β·
1 Parent(s): 3273f38

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -15
app.py CHANGED
@@ -2,11 +2,13 @@ import os
2
  import gradio as gr
3
  from dotenv import load_dotenv
4
  from openai import OpenAI
 
 
 
5
  from prompts.initial_prompt import INITIAL_PROMPT
6
  from prompts.main_prompt import MAIN_PROMPT
7
- from fastapi import FastAPI
8
 
9
- # Load OpenAI API Key from .env file
10
  if os.path.exists(".env"):
11
  load_dotenv(".env")
12
 
@@ -14,19 +16,18 @@ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
14
 
15
  client = OpenAI(api_key=OPENAI_API_KEY)
16
 
17
- # βœ… Create FastAPI app
18
- app = FastAPI()
19
 
20
- # βœ… API Endpoints to Serve Prompts
21
- @app.get("/initial_prompt")
22
  def get_initial_prompt():
23
  return {"prompt": INITIAL_PROMPT}
24
 
25
- @app.get("/main_prompt")
26
  def get_main_prompt():
27
  return {"prompt": MAIN_PROMPT}
28
 
29
- # βœ… Chatbot function
30
  def gpt_call(history, user_message,
31
  model="gpt-4o-mini",
32
  max_tokens=512,
@@ -64,12 +65,14 @@ def respond(user_message, history):
64
 
65
  return "", history
66
 
67
- with gr.Blocks() as demo:
 
68
  gr.Markdown("## Simple Chat Interface")
69
 
70
  chatbot = gr.Chatbot(
71
- value=[("", INITIAL_PROMPT)],
72
- height=500
 
73
  )
74
 
75
  state_history = gr.State([("", INITIAL_PROMPT)])
@@ -89,10 +92,11 @@ with gr.Blocks() as demo:
89
  outputs=[state_history]
90
  )
91
 
92
- # βœ… Launch Gradio and FastAPI together
93
- import threading
94
-
95
  def run_gradio():
96
- demo.launch(server_name="0.0.0.0", server_port=7860, share=True)
97
 
98
  threading.Thread(target=run_gradio).start()
 
 
 
 
2
  import gradio as gr
3
  from dotenv import load_dotenv
4
  from openai import OpenAI
5
+ from fastapi import FastAPI
6
+ import threading
7
+ import uvicorn
8
  from prompts.initial_prompt import INITIAL_PROMPT
9
  from prompts.main_prompt import MAIN_PROMPT
 
10
 
11
+ # βœ… Load OpenAI API Key
12
  if os.path.exists(".env"):
13
  load_dotenv(".env")
14
 
 
16
 
17
  client = OpenAI(api_key=OPENAI_API_KEY)
18
 
19
+ # βœ… FastAPI App for Serving Prompts
20
+ fastapi_app = FastAPI()
21
 
22
+ @fastapi_app.get("/initial_prompt")
 
23
  def get_initial_prompt():
24
  return {"prompt": INITIAL_PROMPT}
25
 
26
+ @fastapi_app.get("/main_prompt")
27
  def get_main_prompt():
28
  return {"prompt": MAIN_PROMPT}
29
 
30
+ # βœ… Chatbot Function
31
  def gpt_call(history, user_message,
32
  model="gpt-4o-mini",
33
  max_tokens=512,
 
65
 
66
  return "", history
67
 
68
+ # βœ… Gradio UI
69
+ with gr.Blocks() as gradio_app:
70
  gr.Markdown("## Simple Chat Interface")
71
 
72
  chatbot = gr.Chatbot(
73
+ value=[{"role": "assistant", "content": INITIAL_PROMPT}],
74
+ height=500,
75
+ type="messages"
76
  )
77
 
78
  state_history = gr.State([("", INITIAL_PROMPT)])
 
92
  outputs=[state_history]
93
  )
94
 
95
+ # βœ… Run FastAPI and Gradio Together
 
 
96
  def run_gradio():
97
+ gradio_app.launch(server_name="0.0.0.0", server_port=7860)
98
 
99
  threading.Thread(target=run_gradio).start()
100
+
101
+ if __name__ == "__main__":
102
+ uvicorn.run(fastapi_app, host="0.0.0.0", port=8000)