rwkv-raven / app.py
jeevavijay10's picture
Upload app.py
86be357
from transformers import AutoTokenizer, AutoModelForCausalLM
import gradio as gr
model_id = "RWKV/rwkv-raven-1b5"
model = AutoModelForCausalLM.from_pretrained(model_id)
tokenizer = AutoTokenizer.from_pretrained(model_id)
def chat(question):
prompt = f"### Instruction: {question}\n### Response:"
inputs = tokenizer(prompt, return_tensors="pt")
output = model.generate(inputs["input_ids"], max_new_tokens=100)
print(tokenizer.decode(output[0].tolist(), skip_special_tokens=True))
iface = gr.Interface(fn=chat,
inputs=gr.inputs.Textbox(label="Enter your text"),
outputs="text",
title="Chat with Surrey County Council InfoBot")
# index = construct_index("docs")
iface.launch()