Evan Lesmez commited on
Commit
cc4975d
1 Parent(s): 696f5ad

Save checkpoint of recipe prompt with discord msg

Browse files

Rename ingredients prompt to init prompt more aptly.
Move OpenAI API calling to main

Files changed (1) hide show
  1. chatbot/engineer_prompt.py +29 -25
chatbot/engineer_prompt.py CHANGED
@@ -11,8 +11,10 @@ from langchain.prompts.chat import (
11
  )
12
 
13
  # TODO Multiple chains sequenced?
 
 
14
 
15
- ingredients_prompt = ChatPromptTemplate.from_messages(
16
  [
17
  SystemMessagePromptTemplate.from_template(
18
  """
@@ -64,32 +66,34 @@ Steps (detailed):
64
  ]
65
  )
66
 
67
- # MessagesPlaceholder(variable_name="history"),
68
- # HumanMessagePromptTemplate.from_template("{input}"),
69
- chat = PromptLayerChatOpenAI(temperature=1, pl_tags=["langchain"], return_pl_id=True)
70
- memory = ConversationBufferMemory(return_messages=True)
71
 
72
- chat_msgs = ingredients_prompt.format_prompt(
73
- ingredients="tofu, pickles, olives, tomatoes, lettuce, bell peppers, carrots, bread",
74
- allergies="",
75
- recipe_freeform_input="The preparation time should be less than 30 minutes. I really love Thai food!",
76
- )
77
- chat_msgs = chat_msgs.to_messages()
78
- results = chat.generate([chat_msgs])
79
- chat_msgs.extend(
80
- [
81
- results.generations[0][0].message,
82
- MessagesPlaceholder(variable_name="history"),
83
- HumanMessagePromptTemplate.from_template("{input}"),
84
- ]
85
- )
86
- open_prompt = ChatPromptTemplate.from_messages(chat_msgs)
87
- conversation = ConversationChain(
88
- llm=chat, verbose=True, memory=memory, prompt=open_prompt
89
- )
 
 
 
 
 
 
90
 
91
- result = conversation.predict(input="Recommend a different recipe please.")
92
- print(result)
93
 
94
  #! PL score example
95
  # chat_results = chat.generate([[HumanMessage(content=prompt)]])
 
11
  )
12
 
13
  # TODO Multiple chains sequenced?
14
+ # I think your way works fine, though you'd probably want to wrap it up in some initializer so you can "initialize" the chain via LLM calls. I'd probably use 2 chains and have a wrapping chain switch from the first to the second after initializing.
15
+ # https://discord.com/channels/1038097195422978059/1038097349660135474/1100533951136800828
16
 
17
+ init_prompt = ChatPromptTemplate.from_messages(
18
  [
19
  SystemMessagePromptTemplate.from_template(
20
  """
 
66
  ]
67
  )
68
 
 
 
 
 
69
 
70
+ if __name__ == "__main__":
71
+ chat = PromptLayerChatOpenAI(
72
+ temperature=1, pl_tags=["langchain"], return_pl_id=True
73
+ )
74
+ memory = ConversationBufferMemory(return_messages=True)
75
+ chat_msgs = init_prompt.format_prompt(
76
+ ingredients="tofu, pickles, olives, tomatoes, lettuce, bell peppers, carrots, bread",
77
+ allergies="",
78
+ recipe_freeform_input="The preparation time should be less than 30 minutes. I really love Thai food!",
79
+ )
80
+
81
+ chat_msgs = chat_msgs.to_messages()
82
+ results = chat.generate([chat_msgs])
83
+ chat_msgs.extend(
84
+ [
85
+ results.generations[0][0].message,
86
+ MessagesPlaceholder(variable_name="history"),
87
+ HumanMessagePromptTemplate.from_template("{input}"),
88
+ ]
89
+ )
90
+ open_prompt = ChatPromptTemplate.from_messages(chat_msgs)
91
+ conversation = ConversationChain(
92
+ llm=chat, verbose=True, memory=memory, prompt=open_prompt
93
+ )
94
 
95
+ result = conversation.predict(input="Recommend a different recipe please.")
96
+ print(result)
97
 
98
  #! PL score example
99
  # chat_results = chat.generate([[HumanMessage(content=prompt)]])