Sunny / app.py
Roastem's picture
app.py + requirements #1
3b92a2c
raw
history blame
No virus
1.82 kB
from langchain.llms import CTransformers
from langchain.chains import LLMChain
from langchain import PromptTemplate
import os
import io
import gradio as gr
import time
custom_prompt_template = """
You are an AI Chatbot named Sunny, you are created by 'Sic Team' and your task is to provide information to users and chat with them based on given user's query. Below is the user's query.
Query: {query}
You just return the helpful message in English and always try to provide relevant answers to user's query.
"""
def set_custom_prompt():
prompt = PromptTemplate(template=custom_prompt_template,
input_variables=['query'])
return prompt
#Loading the model
def load_model():
# Load the locally downloaded model here
llm = CTransformers(
model = "TheBloke/zephyr-7B-beta-GGUF",
model_type="llama",
max_new_tokens = 1096,
temperature = 0.2,
repetition_penalty = 1.13,
gpu=True,
gpu_layers = 3,
)
return llm
print(load_model())
def chain_pipeline():
llm = load_model()
main_prompt = set_custom_prompt()
main_chain = LLMChain(
prompt=main_prompt,
llm=llm
)
return main_chain
llmchain = chain_pipeline()
def bot(query):
llm_response = llmchain.run({"query": query})
return llm_response
with gr.Blocks(title='Sunny') as main:
# gr.HTML("Code Llama main")
gr.Markdown("# Sunny Chatbot")
chatbot = gr.Chatbot([], elem_id="chatbot", height=700)
msg = gr.Textbox()
clear = gr.ClearButton([msg, chatbot])
def respond(message, chat_history):
bot_message = bot(message)
chat_history.append((message, bot_message))
time.sleep(2)
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot])
main.launch(share=True)