Update app.py
Browse files
app.py
CHANGED
@@ -6,42 +6,63 @@ from retriever import *
|
|
6 |
from retrieve_docs import *
|
7 |
from make_chain_model import make_chain_llm
|
8 |
from make_answer import *
|
|
|
9 |
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
max_tokens,
|
15 |
-
temperature,
|
16 |
-
top_p,
|
17 |
-
):
|
18 |
-
messages = [{"role": "system", "content": system_message}]
|
19 |
|
20 |
-
|
21 |
-
|
22 |
-
messages.append({"role": "user", "content": val[0]})
|
23 |
-
if val[1]:
|
24 |
-
messages.append({"role": "assistant", "content": val[1]})
|
25 |
|
26 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
|
28 |
-
|
|
|
|
|
|
|
29 |
|
30 |
-
|
31 |
-
messages,
|
32 |
-
max_tokens=max_tokens,
|
33 |
-
stream=True,
|
34 |
-
temperature=temperature,
|
35 |
-
top_p=top_p,
|
36 |
-
):
|
37 |
-
token = message.choices[0].delta.content
|
38 |
|
39 |
-
|
40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
|
42 |
"""
|
43 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
44 |
"""
|
|
|
45 |
TITLE = "FUT FUT Chatbot"
|
46 |
|
47 |
DESCRIPTION = """
|
@@ -57,7 +78,7 @@ Resources used to build this project :
|
|
57 |
Examples = [['시흥 풋살 구장 추천해줘'],['풋살 경기 규칙 설명해줘'], ['풋살 경기 시간 알려줘']]
|
58 |
|
59 |
demo = gr.ChatInterface(
|
60 |
-
fn=
|
61 |
chatbot=gr.Chatbot(
|
62 |
show_label=True,
|
63 |
show_share_button=True,
|
@@ -67,11 +88,9 @@ demo = gr.ChatInterface(
|
|
67 |
bubble_full_width=False,
|
68 |
),
|
69 |
theme="Soft",
|
70 |
-
examples=
|
71 |
title=TITLE,
|
72 |
description=DESCRIPTION,
|
73 |
-
examples=Examples
|
74 |
-
|
75 |
)
|
76 |
demo.launch(debug=True)
|
77 |
|
|
|
6 |
from retrieve_docs import *
|
7 |
from make_chain_model import make_chain_llm
|
8 |
from make_answer import *
|
9 |
+
from transformers import TextStreamer
|
10 |
|
11 |
+
llm = load_Auto()
|
12 |
+
pinecone,bm25 = all_files('files')
|
13 |
+
retriever=retriever(pinecone,bm25)
|
14 |
+
rag_chain = make_chain_llm(retriever,llm)
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
+
def response(message, history):
|
17 |
+
return rag_chain.invoke(message)
|
|
|
|
|
|
|
18 |
|
19 |
+
# def talk(,history):
|
20 |
+
# # k = 1 # number of retrieved documents
|
21 |
+
# # scores , retrieved_documents = search(prompt, k)
|
22 |
+
# # formatted_prompt = format_prompt(prompt,retrieved_documents,k)
|
23 |
+
# # formatted_prompt = formatted_prompt[:2000] # to avoid GPU OOM
|
24 |
+
|
25 |
+
# messages =[
|
26 |
+
# {"role": "system", "content": f"{PROMPT}"},
|
27 |
+
# {}
|
28 |
+
# {"role": "user", "content": f"{instruction}"}
|
29 |
+
# ]
|
30 |
+
|
31 |
+
# # tell the model to generate
|
32 |
+
# input_ids = tokenizer.apply_chat_template(
|
33 |
+
# messages,
|
34 |
+
# add_generation_prompt=True,
|
35 |
+
# return_tensors="pt"
|
36 |
+
# ).to(model.device)
|
37 |
|
38 |
+
# terminators = [
|
39 |
+
# tokenizer.eos_token_id,
|
40 |
+
# tokenizer.convert_tokens_to_ids("<|eot_id|>")
|
41 |
+
# ]
|
42 |
|
43 |
+
# text_streamer = TextStreamer(tokenizer)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
|
45 |
+
# outputs = model.generate(
|
46 |
+
# input_ids,
|
47 |
+
# max_new_tokens=4096,
|
48 |
+
# eos_token_id=terminators,
|
49 |
+
# do_sample=True,
|
50 |
+
# streamer = text_streamer,
|
51 |
+
# temperature=0.6,
|
52 |
+
# top_p=0.9,
|
53 |
+
# repetition_penalty = 1.1
|
54 |
+
# )
|
55 |
+
# response = []
|
56 |
+
# for text in streamer:
|
57 |
+
# response.append(outputs)
|
58 |
+
# print(response)
|
59 |
+
# yield "".join(response)
|
60 |
+
|
61 |
|
62 |
"""
|
63 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
64 |
"""
|
65 |
+
|
66 |
TITLE = "FUT FUT Chatbot"
|
67 |
|
68 |
DESCRIPTION = """
|
|
|
78 |
Examples = [['시흥 풋살 구장 추천해줘'],['풋살 경기 규칙 설명해줘'], ['풋살 경기 시간 알려줘']]
|
79 |
|
80 |
demo = gr.ChatInterface(
|
81 |
+
fn=response,
|
82 |
chatbot=gr.Chatbot(
|
83 |
show_label=True,
|
84 |
show_share_button=True,
|
|
|
88 |
bubble_full_width=False,
|
89 |
),
|
90 |
theme="Soft",
|
91 |
+
examples=Examples,
|
92 |
title=TITLE,
|
93 |
description=DESCRIPTION,
|
|
|
|
|
94 |
)
|
95 |
demo.launch(debug=True)
|
96 |
|