Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -12,10 +12,7 @@ from dotenv import load_dotenv
|
|
12 |
import openai
|
13 |
|
14 |
load_dotenv()
|
15 |
-
|
16 |
-
# api_key = os.getenv('OPENAI_API_KEY') ## .env νμΌ μ
λ‘λνλ©΄ μ¨κ²¨μ§μ§ μμ μλ¨
|
17 |
-
# api_key = os.environ['my_secret'] ## μλΆλ¬μμ§
|
18 |
-
# api_key = os.getenv('my_secret') ## 3νΈ .env λμ secretν€λ₯Ό λΆλ¬μ€λ ννλ‘ λμ
|
19 |
os.environ["OPENAI_API_KEY"] = os.environ['my_secret']
|
20 |
|
21 |
loader = PyPDFLoader("/home/user/app/docs.pdf")
|
@@ -32,7 +29,7 @@ retriever = vector_store.as_retriever(search_kwargs={"k": 2})
|
|
32 |
from langchain.chat_models import ChatOpenAI
|
33 |
from langchain.chains import RetrievalQAWithSourcesChain
|
34 |
|
35 |
-
llm = ChatOpenAI(model_name="gpt-
|
36 |
|
37 |
chain = RetrievalQAWithSourcesChain.from_chain_type(
|
38 |
llm=llm,
|
@@ -66,7 +63,7 @@ from langchain.chains import RetrievalQAWithSourcesChain
|
|
66 |
|
67 |
chain_type_kwargs = {"prompt": prompt}
|
68 |
|
69 |
-
llm = ChatOpenAI(model_name="gpt-
|
70 |
|
71 |
chain = RetrievalQAWithSourcesChain.from_chain_type(
|
72 |
llm=llm,
|
@@ -100,7 +97,7 @@ def respond(message, chat_history): # μ±ν
λ΄μ μλ΅μ μ²λ¦¬νλ ν¨
|
|
100 |
return "", chat_history # μμ λ μ±ν
κΈ°λ‘μ λ°νν©λλ€.
|
101 |
|
102 |
with gr.Blocks(theme='gstaff/sketch') as demo: # gr.Blocks()λ₯Ό μ¬μ©νμ¬ μΈν°νμ΄μ€λ₯Ό μμ±ν©λλ€.
|
103 |
-
gr.Markdown("# μλ
νμΈμ. μΈμ΄λ
Έμ λνν΄λ³΄μΈμ.")
|
104 |
chatbot = gr.Chatbot(label="μ±ν
μ°½") # 'μ±ν
μ°½'μ΄λΌλ λ μ΄λΈμ κ°μ§ μ±ν
λ΄ μ»΄ν¬λνΈλ₯Ό μμ±ν©λλ€.
|
105 |
msg = gr.Textbox(label="μ
λ ₯") # 'μ
λ ₯'μ΄λΌλ λ μ΄λΈμ κ°μ§ ν
μ€νΈλ°μ€λ₯Ό μμ±ν©λλ€.
|
106 |
clear = gr.Button("μ΄κΈ°ν") # 'μ΄κΈ°ν'λΌλ λ μ΄λΈμ κ°μ§ λ²νΌμ μμ±ν©λλ€.
|
|
|
12 |
import openai
|
13 |
|
14 |
load_dotenv()
|
15 |
+
|
|
|
|
|
|
|
16 |
os.environ["OPENAI_API_KEY"] = os.environ['my_secret']
|
17 |
|
18 |
loader = PyPDFLoader("/home/user/app/docs.pdf")
|
|
|
29 |
from langchain.chat_models import ChatOpenAI
|
30 |
from langchain.chains import RetrievalQAWithSourcesChain
|
31 |
|
32 |
+
llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0) # Modify model_name if you have access to GPT-4
|
33 |
|
34 |
chain = RetrievalQAWithSourcesChain.from_chain_type(
|
35 |
llm=llm,
|
|
|
63 |
|
64 |
chain_type_kwargs = {"prompt": prompt}
|
65 |
|
66 |
+
llm = ChatOpenAI(model_name="gpt-3.5", temperature=0) # Modify model_name if you have access to GPT-4
|
67 |
|
68 |
chain = RetrievalQAWithSourcesChain.from_chain_type(
|
69 |
llm=llm,
|
|
|
97 |
return "", chat_history # μμ λ μ±ν
κΈ°λ‘μ λ°νν©λλ€.
|
98 |
|
99 |
with gr.Blocks(theme='gstaff/sketch') as demo: # gr.Blocks()λ₯Ό μ¬μ©νμ¬ μΈν°νμ΄μ€λ₯Ό μμ±ν©λλ€.
|
100 |
+
gr.Markdown("# μλ
νμΈμ. μΈμ΄λ
Έμ λνν΄λ³΄μΈμ. \n λ΅λ³ μμ±μ μ‘°κΈ μκ°μ΄ μμλ μ μμ΅λλ€.")
|
101 |
chatbot = gr.Chatbot(label="μ±ν
μ°½") # 'μ±ν
μ°½'μ΄λΌλ λ μ΄λΈμ κ°μ§ μ±ν
λ΄ μ»΄ν¬λνΈλ₯Ό μμ±ν©λλ€.
|
102 |
msg = gr.Textbox(label="μ
λ ₯") # 'μ
λ ₯'μ΄λΌλ λ μ΄λΈμ κ°μ§ ν
μ€νΈλ°μ€λ₯Ό μμ±ν©λλ€.
|
103 |
clear = gr.Button("μ΄κΈ°ν") # 'μ΄κΈ°ν'λΌλ λ μ΄λΈμ κ°μ§ λ²νΌμ μμ±ν©λλ€.
|