File size: 2,384 Bytes
cd607b2 eac37df f5ec828 cd607b2 eac37df 4e3dc76 cd607b2 7b856a8 69deff6 4e3dc76 69deff6 8200c4e 7b856a8 5b30d27 7b856a8 4e3dc76 8200c4e 4e3dc76 7b856a8 8200c4e 69deff6 4e3dc76 7b856a8 8200c4e 4e3dc76 8200c4e 4e3dc76 7b856a8 5b30d27 7b856a8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
# + tags=["hide_inp"]
desc = """
### Chat
A chat-like example for multi-turn chat with state. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/chat.ipynb)
(Adapted from [LangChain](https://langchain.readthedocs.io/en/latest/modules/memory/examples/chatgpt_clone.html)'s version of this [blog post](https://www.engraved.blog/building-a-virtual-machine-inside/).)
"""
# -
# $
from dataclasses import dataclass, replace
from typing import List, Tuple
from minichain import OpenAI, prompt, show, transform, Mock
# Generic stateful Memory
MEMORY = 2
@dataclass
class State:
memory: List[Tuple[str, str]]
human_input: str = ""
def push(self, response: str) -> "State":
memory = self.memory if len(self.memory) < MEMORY else self.memory[1:]
return State(memory + [(self.human_input, response)])
def __str__(self):
return self.memory[-1][-1]
# Chat prompt with memory
@prompt(OpenAI(), template_file="chat.pmpt.tpl")
def chat_response(model, state: State) -> State:
return model.stream(state)
@transform()
def update(state, chat_output):
result = chat_output.split("Assistant:")[-1]
return state.push(result)
def chat(command, state):
state = replace(state, human_input=command)
return update(state, chat_response(state))
# $
examples = [
"ls ~",
"cd ~",
"{Please make a file jokes.txt inside and put some jokes inside}",
"""echo -e "x=lambda y:y*5+3;print('Result:' + str(x(6)))" > run.py && python3 run.py""",
"""echo -e "print(list(filter(lambda x: all(x%d for d in range(2,x)),range(2,3**10)))[:10])" > run.py && python3 run.py""",
"""echo -e "echo 'Hello from Docker" > entrypoint.sh && echo -e "FROM ubuntu:20.04\nCOPY entrypoint.sh entrypoint.sh\nENTRYPOINT [\"/bin/sh\",\"entrypoint.sh\"]">Dockerfile && docker build . -t my_docker_image && docker run -t my_docker_image""",
"nvidia-smi"
]
print(chat("ls", State([])).run())
gradio = show(chat,
initial_state=State([]),
subprompts=[chat_response],
examples=examples,
out_type="json",
description=desc,
code=open("chat.py", "r").read().split("$")[1].strip().strip("#").strip(),
)
if __name__ == "__main__":
gradio.queue().launch()
|