File size: 526 Bytes
12e65a2
a4a7438
9ddca05
12e65a2
9ddca05
12e65a2
9ddca05
a4a7438
 
9ddca05
 
 
 
36b0a4e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
import gradio as gr
from transformers import pipeline
from transformers import AutoModelForCausalLM

# chat = pipeline("text-generation", model="LeoLM/leo-mistral-hessianai-7b-chat")

model = AutoModelForCausalLM.from_pretrained("LeoLM/leo-mistral-hessianai-7b-chat", load_in_4bit=True, device_map="auto")


def chat_with_model(prompt, history):
    #response = chat(prompt, max_length=50)
    #return response[0]['generated_text']
    return "works"
demo = gr.ChatInterface(fn=chat_with_model, title="Echo Bot")
demo.launch()