convert to ChatHuggingFace client wrapper
Browse files- app/prompts.py +4 -2
app/prompts.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
from langchain_core.prompts import PromptTemplate
|
| 2 |
from typing import List
|
| 3 |
# import models
|
| 4 |
|
|
@@ -36,7 +36,9 @@ rag_prompt: str = None
|
|
| 36 |
|
| 37 |
# TODO: create raw_prompt_formatted by using format_prompt
|
| 38 |
# raw_prompt_formatted = format_prompt(raw_prompt)
|
| 39 |
-
raw_prompt =
|
|
|
|
|
|
|
| 40 |
|
| 41 |
# TODO: use format_prompt to create history_prompt_formatted
|
| 42 |
history_prompt_formatted: PromptTemplate = None
|
|
|
|
| 1 |
+
from langchain_core.prompts import PromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate
|
| 2 |
from typing import List
|
| 3 |
# import models
|
| 4 |
|
|
|
|
| 36 |
|
| 37 |
# TODO: create raw_prompt_formatted by using format_prompt
|
| 38 |
# raw_prompt_formatted = format_prompt(raw_prompt)
|
| 39 |
+
raw_prompt = ChatPromptTemplate.from_messages([
|
| 40 |
+
HumanMessagePromptTemplate.from_template(raw_prompt)
|
| 41 |
+
])
|
| 42 |
|
| 43 |
# TODO: use format_prompt to create history_prompt_formatted
|
| 44 |
history_prompt_formatted: PromptTemplate = None
|