Spaces:
Sleeping
Sleeping
import streamlit as st | |
import os | |
from streamlit_chat import message | |
from langchain.prompts import PromptTemplate | |
from langchain import LLMChain | |
from langchain_community.llms.huggingface_hub import HuggingFaceHub | |
llm = HuggingFaceHub(repo_id="suriya7/MaxMini-Instruct-248M", | |
task ='text2text-generation', | |
huggingfacehub_api_token=os.getenv('HF_TOKEN'), | |
model_kwargs={ | |
"do_sample":True, | |
"max_new_tokens":250 | |
}) | |
template = """You are a friendly chatbot called "MAXMINI" who give clear an well having a conversation with a human and you are created by suriya an AI Enthusiastic. | |
previous chat: | |
{previous_history} | |
User: | |
{question} | |
Chatbot:""" | |
prompt = PromptTemplate(template=template,input_variables=['question','previous_history']) | |
llm_chain = LLMChain( | |
llm=llm, | |
prompt=prompt, | |
verbose=True, | |
) | |
previous_response = "" | |
def conversational_chat(user_query): | |
previous_response = "".join([f"Human: {i[0]}\nChatbot: {i[1]}" for i in st.session_state['history'] if i is not None]) | |
result = llm_chain.predict( | |
question=user_query, | |
previous_history = previous_response | |
) | |
st.session_state['history'].append((user_query, result)) | |
return result | |
st.title("Chat Bot MaxMini:") | |
st.text("I am MaxMini Your Friendly Assitant") | |
st.markdown("Built by [Suriya❤️](https://github.com/theSuriya)") | |
st.session_state['history'] = [] | |
if 'assistant' not in st.session_state: | |
st.session_state['assistant'] = ['Hey There! How Can I Assist You'] | |
st.session_state['human'] = [" "] | |
# Create containers for chat history and user input | |
response_container = st.container() | |
container = st.container() | |
# User input form | |
user_input = st.chat_input("Ask Your Questions 👉..") | |
with container: | |
if user_input: | |
output = conversational_chat(user_input) | |
# answer = response_generator(output) | |
st.session_state['human'].append(user_input) | |
st.session_state['assistant'].append(output) | |
## Display chat history | |
if st.session_state['assistant']: | |
with response_container: | |
for i in range(len(st.session_state['assistant'])): | |
if i != 0: | |
message(st.session_state["human"][i], is_user=True, key=str(i) + '_user', avatar_style="adventurer") | |
message(st.session_state["assistant"][i], key=str(i), avatar_style="bottts") | |