File size: 1,374 Bytes
7b856a8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 |
# Adapted from Prompt-aided Language Models [PAL](https://arxiv.org/pdf/2211.10435.pdf).
import minichain
# PAL Prompt
class PalPrompt(minichain.TemplatePrompt):
template_file = "pal.pmpt.tpl"
# Prompt to run and print python code.
class PyPrompt(minichain.Prompt):
def prompt(self, inp):
return inp + "\nprint(solution())"
def parse(self, response, inp):
return int(response)
# Chain the prompts.
with minichain.start_chain("pal") as backend:
prompt = PalPrompt(backend.OpenAI()).chain(PyPrompt(backend.Python()))
# result = prompt({"question": question})
question = "Melanie is a door-to-door saleswoman. She sold a third of her " \
"vacuum cleaners at the green house, 2 more to the red house, and half of " \
"what was left at the orange house. If Melanie has 5 vacuum cleaners left, " \
"how many did she start with?"
gradio = prompt.to_gradio(fields =["question"],
examples=[question])
if __name__ == "__main__":
gradio.launch()
# View prompt examples.
# # + tags=["hide_inp"]
# PalPrompt().show(
# {"question": "Joe has 10 cars and Bobby has 12. How many do they have together?"},
# "def solution():\n\treturn 10 + 12",
# )
# # -
# # + tags=["hide_inp"]
# PyPrompt().show("def solution():\n\treturn 10 + 12", "22")
# # -
# # View the log.
# minichain.show_log("pal.log")
|