abishek-official's picture
Update app.py
37b28e0 verified
from langchain.llms import HuggingFaceHub
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
import os
import gradio as gr
model_id = "distilgpt2"
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
hf = HuggingFaceHub(
repo_id=model_id,
huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
model_kwargs={"temperature":0.7,"max_new_tokens":50})
# hf = HuggingFacePipeline.from_model_id(
# model_id=model_id,
# task="text-generation",
# huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
# pipeline_kwargs={"max_new_tokens": 100}
# )
from langchain.memory import ConversationBufferWindowMemory
template = """
You are Abi, a useful AI assistant and answer for user questions.
{chat_history}
Question: {question}
Answer:
"""
# prompt = PromptTemplate.from_template(template)
promt = PromptTemplate(template = template, input_variables=['chat_history','question'])
memory = ConversationBufferMemory(memory_key="chat_history")
history =[]
llm_chain = LLMChain(llm=hf, prompt=promt, memory= memory,verbose = False)
def ask_bot(query,hist = history):
result = llm_chain({"question": query, "chat_history": history})
history.append((query,result))
return result['text']
demo = gr.ChatInterface(ask_bot,
examples=['How are you doing?','What can you do?','Tell me a story','Tell me a joke','Who is prime minister of India?'],
title="Abi - An AI Assistant",
description="This is an AI Chatbot Assistant that uses Falcon 7b-instruct LLM to generate the output for the user queries.\n You can further know more from my GitHub Repo👉 https://github.com/abishekbabuofficial/AI-Assistant-Chatbot.\n♥️♥️♥️Application developed by Abishek B♥️♥️♥️")
demo.launch(debug=True)