File size: 2,573 Bytes
86a9345
 
6f7eef0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99d9827
6f7eef0
 
 
26f0734
 
 
 
 
 
6f7eef0
 
 
26f0734
 
6f7eef0
 
 
 
 
 
 
c768c74
6f7eef0
dbc8d08
6f7eef0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73

from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
from threading import Thread
import gradio as gr

class ChatbotService:
    def __init__(self, model_name="RajuKandasamy/tamillama_tiny_30m"):
        self.model = AutoModelForCausalLM.from_pretrained(model_name)
        self.tokenizer = AutoTokenizer.from_pretrained(model_name)
        self.streamer = None

    def call(self, prompt):
        self.streamer = TextIteratorStreamer(self.tokenizer, skip_prompt=True, timeout=5)
        prompt = prompt.replace("<br>", "\n")
        print(prompt)
        inputs = self.tokenizer(prompt, return_tensors="pt")
        print(inputs)
        kwargs = dict(input_ids=inputs["input_ids"], streamer=self.streamer, max_new_tokens=512, do_sample=True, top_p=0.8, top_k=500, temperature=0.001, repetition_penalty=1.4)
        thread = Thread(target=self.model.generate, kwargs=kwargs)
        thread.start()
        return ""


import gradio as gr

example_questions = [
    f"""சொற்கள்:
வீழ்ச்சி, சீட்டு, பிடிவாதம்
சுருக்கம்:""",
    f"""சொற்கள்:
ஓட்டம், பயணம், குழப்பம்
சுருக்கம்:""",
    f"""Words: prevent, car, broken
Features: Dialogue""",
    f"""சொற்கள்:
திரும்பு, வாசனை திரவியம், துணிச்சல்
சுருக்கம்:"""
]


chatbot_service = ChatbotService()


with gr.Blocks() as demo:
    chatbot = gr.Chatbot()
    with gr.Row():
        msg = gr.Textbox(placeholder="Type your message here...", label="Story Prompt:")
        run = gr.Button("Run")
    examples_dropdown = gr.Dropdown(choices=example_questions, label="Select an example prompt")
    examples_dropdown.change(fn=lambda x: x, inputs=examples_dropdown, outputs=msg)

    clear = gr.Button("Clear")

    def user(question, user_message, history):
        if history == None:
            history = []
        user_message = question
        return "", history + [[user_message, None]]

    def bot(history):
        #print("Question: ", history[-1][0])
        chatbot_service.call(history[-1][0])
        history[-1][1] = ""
        for character in chatbot_service.streamer:
            print(character)
            history[-1][1] += character
            yield history

    run.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(bot, chatbot, chatbot)
    clear.click(lambda: None, None, chatbot, queue=False)

demo.queue()
demo.launch()