abishek-official's picture
Update app.py
710054d verified
raw
history blame
1.7 kB
from langchain.llms import HuggingFacePipeline
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
import os
import gradio as gr
model_id = "distilgpt2"
HUGGINGFACEHUB_API_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
# LLM = HuggingFaceHub(
# repo_id=model_id,
# model_kwargs={"temperature":0.7,"max_new_tokens":700})
hf = HuggingFacePipeline.from_model_id(
model_id=model_id,
task="text-generation",
huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
pipeline_kwargs={"max_new_tokens": 100}
)
from langchain.memory import ConversationBufferWindowMemory
memory = ConversationBufferWindowMemory(k=3)
template = """
You are Abi, a useful AI assistant and answer for user questions.
Question: {question}
Answer:
"""
prompt = PromptTemplate.from_template(template)
# promt = PromptTemplate(template = template, input_variables=['question'])
llm_chain = LLMChain(llm=hf, prompt=promt, memory= memory)
# llm_chain = promt | hf
def ask(question,history):
ans = llm_chain.invoke({"question": question})
return ans
demo = gr.ChatInterface(ask,
examples=['How are you doing?','What can you do?','Tell me a story','Tell me a joke','Who is prime minister of India?'],
title="Abi - An AI Assistant",
description="This is an AI Chatbot Assistant that uses Falcon 7b-instruct LLM to generate the output for the user queries.\n You can further know more from my GitHub Repo👉 https://github.com/abishekbabuofficial/AI-Assistant-Chatbot.\n♥️♥️♥️Application developed by Abishek B♥️♥️♥️")
demo.launch(debug=True)