Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,7 @@ model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
8 |
client = InferenceClient(model)
|
9 |
|
10 |
# Embedded system prompt
|
11 |
-
system_prompt_text = "You are a smart and helpful co-worker of Thailand based multi-national company PTT, and PTTEP. You help with any kind of request and provide a detailed answer to the question. But if you are asked about something unethical or dangerous, you must refuse and provide a safe and respectful way to handle that"
|
12 |
|
13 |
# Read the content of the info.md file
|
14 |
with open("info.md", "r") as file:
|
@@ -18,14 +18,13 @@ with open("info.md", "r") as file:
|
|
18 |
chunk_size = 2000 # Adjust this size as needed
|
19 |
info_md_chunks = textwrap.wrap(info_md_content, chunk_size)
|
20 |
|
21 |
-
def
|
22 |
-
|
23 |
-
return chunks[0]
|
24 |
|
25 |
-
def format_prompt_mixtral(message, history,
|
26 |
prompt = "<s>"
|
27 |
-
|
28 |
-
prompt += f"{
|
29 |
prompt += f"{system_prompt_text}\n\n" # Add the system prompt
|
30 |
|
31 |
if history:
|
@@ -66,7 +65,7 @@ def check_rand(inp, val):
|
|
66 |
return gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, value=int(val))
|
67 |
|
68 |
with gr.Blocks() as app: # Add auth here
|
69 |
-
gr.HTML("""<center><h1 style='font-size:xx-large;'>PTT Chatbot</h1><br><h3>running on Huggingface Inference </h3><br><h7>EXPERIMENTAL""")
|
70 |
with gr.Row():
|
71 |
chat = gr.Chatbot(height=500)
|
72 |
with gr.Group():
|
|
|
8 |
client = InferenceClient(model)
|
9 |
|
10 |
# Embedded system prompt
|
11 |
+
system_prompt_text = "You are a smart and helpful co-worker of Thailand based multi-national company PTT, and PTTEP. You help with any kind of request and provide a detailed answer to the question. But if you are asked about something unethical or dangerous, you must refuse and provide a safe and respectful way to handle that."
|
12 |
|
13 |
# Read the content of the info.md file
|
14 |
with open("info.md", "r") as file:
|
|
|
18 |
chunk_size = 2000 # Adjust this size as needed
|
19 |
info_md_chunks = textwrap.wrap(info_md_content, chunk_size)
|
20 |
|
21 |
+
def get_all_chunks(chunks):
|
22 |
+
return "\n\n".join(chunks)
|
|
|
23 |
|
24 |
+
def format_prompt_mixtral(message, history, info_md_chunks):
|
25 |
prompt = "<s>"
|
26 |
+
all_chunks = get_all_chunks(info_md_chunks)
|
27 |
+
prompt += f"{all_chunks}\n\n" # Add all chunks of info.md at the beginning
|
28 |
prompt += f"{system_prompt_text}\n\n" # Add the system prompt
|
29 |
|
30 |
if history:
|
|
|
65 |
return gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, value=int(val))
|
66 |
|
67 |
with gr.Blocks() as app: # Add auth here
|
68 |
+
gr.HTML("""<center><h1 style='font-size:xx-large;'>PTT Chatbot</h1><br><h3>running on Huggingface Inference </h3><br><h7>EXPERIMENTAL</center>""")
|
69 |
with gr.Row():
|
70 |
chat = gr.Chatbot(height=500)
|
71 |
with gr.Group():
|