Spaces:
Sleeping
Sleeping
pragneshbarik
commited on
Commit
•
1ae5964
1
Parent(s):
5dafb15
added nous hermes 2
Browse files- config.yaml +1 -0
- middlewares/chat_client.py +28 -7
config.yaml
CHANGED
@@ -12,6 +12,7 @@ PROMPT_ENGINEERING_DICT:
|
|
12 |
POST_PROMPT: PRIORITIZE DATA, FACTS AND STATISTICS OVER PERSONAL EXPERIENCES AND OPINIONS, FOCUS MORE ON STATISTICS AND DATA.
|
13 |
|
14 |
CHAT_BOTS:
|
|
|
15 |
Mixtral 8x7B v0.1: mistralai/Mixtral-8x7B-Instruct-v0.1
|
16 |
Mistral 7B v0.1: mistralai/Mistral-7B-Instruct-v0.1
|
17 |
Mistral 7B v0.2: mistralai/Mistral-7B-Instruct-v0.2
|
|
|
12 |
POST_PROMPT: PRIORITIZE DATA, FACTS AND STATISTICS OVER PERSONAL EXPERIENCES AND OPINIONS, FOCUS MORE ON STATISTICS AND DATA.
|
13 |
|
14 |
CHAT_BOTS:
|
15 |
+
Nous Hermes 2: NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO
|
16 |
Mixtral 8x7B v0.1: mistralai/Mixtral-8x7B-Instruct-v0.1
|
17 |
Mistral 7B v0.1: mistralai/Mistral-7B-Instruct-v0.1
|
18 |
Mistral 7B v0.2: mistralai/Mistral-7B-Instruct-v0.2
|
middlewares/chat_client.py
CHANGED
@@ -7,13 +7,34 @@ load_dotenv()
|
|
7 |
API_TOKEN = os.getenv("HF_TOKEN")
|
8 |
|
9 |
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
|
18 |
|
19 |
def chat(
|
|
|
7 |
API_TOKEN = os.getenv("HF_TOKEN")
|
8 |
|
9 |
|
10 |
+
|
11 |
+
|
12 |
+
def format_prompt(session_state,query, history, chat_client):
|
13 |
+
if chat_client=="NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO" :
|
14 |
+
model_input = f"""<|im_start|>system
|
15 |
+
{session_state.system_instruction}
|
16 |
+
"""
|
17 |
+
for user_prompt, bot_response in history:
|
18 |
+
model_input += f"""<|im_start|>user
|
19 |
+
{user_prompt}<|im_end|>
|
20 |
+
"""
|
21 |
+
model_input += f"""<|im_start|>assistant
|
22 |
+
{bot_response}<|im_end|>
|
23 |
+
"""
|
24 |
+
model_input += f"""<|im_start|>user
|
25 |
+
{query}<|im_end|>
|
26 |
+
<|im_start|>assistant"""
|
27 |
+
|
28 |
+
return model_input
|
29 |
+
|
30 |
+
|
31 |
+
else :
|
32 |
+
model_input = "<s>"
|
33 |
+
for user_prompt, bot_response in history:
|
34 |
+
model_input += f"[INST] {user_prompt} [/INST]"
|
35 |
+
model_input += f" {bot_response}</s> "
|
36 |
+
model_input += f"[INST] {query} [/INST]"
|
37 |
+
return model_input
|
38 |
|
39 |
|
40 |
def chat(
|