Prompt Template

#2
by viral1008 - opened

Can Anyone Help me with Prompt Template

from langchain.memory import ConversationBufferMemory
from langchain.prompts import PromptTemplate

def get_prompt_template(promptTemplate_type=None, history=False):

prompt_template = "Verwenden Sie die folgenden Kontextelemente, um die Frage am Ende zu beantworten. Wenn Sie die Antwort nicht kennen, sagen Sie einfach, dass Sie es nicht wissen, und versuchen Sie nicht, eine Antwort zu erfinden. USER: {instruction} ASSISTANT:"

prompt = PromptTemplate(
    template=prompt_template, input_variables=["instruction"]
)

memory = ConversationBufferMemory(input_key="instruction", memory_key="history")

return (
    prompt,
    memory,
)

|
error

3 = 0 | VSX = 0 |
ERROR:app:Exception on /ChatBotAPI/get-response-from-chatbot [POST]
Traceback (most recent call last):
File "D:\Zoo_Chatbot\venv\Lib\site-packages\flask\app.py", line 1455, in wsgi_app
response = self.full_dispatch_request()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\venv\Lib\site-packages\flask\app.py", line 869, in full_dispatch_request
rv = self.handle_user_exception(e)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\venv\Lib\site-packages\flask_cors\extension.py", line 176, in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
^^^^^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\venv\Lib\site-packages\flask\app.py", line 867, in full_dispatch_request
rv = self.dispatch_request()
^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\venv\Lib\site-packages\flask\app.py", line 852, in dispatch_request
return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\venv\Lib\site-packages\flask_cors\decorator.py", line 130, in wrapped_function
resp = make_response(f(*args, **kwargs))
^^^^^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\app.py", line 18, in get_response_from_chatbot
response = main(user_input)
^^^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\run_localGPT.py", line 119, in main
QA = RetrievalQA.from_chain_type(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\venv\Lib\site-packages\langchain\chains\retrieval_qa\base.py", line 100, in from_chain_type
combine_documents_chain = load_qa_chain(
^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\venv\Lib\site-packages\langchain\chains\question_answering_init_.py", line 249, in load_qa_chain
return loader_mapping[chain_type](
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\Zoo_Chatbot\venv\Lib\site-packages\langchain\chains\question_answering__init__.py", line 73, in _load_stuff_chain
llm_chain = LLMChain(
^^^^^^^^^
File "D:\Zoo_Chatbot\venv\Lib\site-packages\langchain\load\serializable.py", line 74, in __init__
super().init(**kwargs)
File "pydantic\main.py", line 341, in pydantic.main.BaseModel.init
pydantic.error_wrappers.ValidationError: 1 validation error for LLMChain
prompt
value is not a valid dict (type=type_error.dict)

viral1008 changed discussion title from Prompt Templatee to Prompt Template

Sign up or log in to comment