EmailGenerator / app.py
datastx's picture
download at runtime
a43d5d1
import requests
import streamlit as st
from langchain.llms import CTransformers
from langchain.prompts import PromptTemplate
import os
def download_model() -> None:
"""
Downloads the model from the provided URL and saves it to the current directory.
"""
url = 'https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/resolve/main/llama-2-7b-chat.ggmlv3.q8_0.bin'
file_name = url.split('/')[-1]
response = requests.get(url, stream=True)
with open(file_name, 'wb') as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
print("File downloaded successfully!")
def getLLMResponse(form_input: str, email_sender: str, email_recipient: str, email_style: str) -> str:
"""
Generates a response using the LLM model.
:param form_input: Email topic provided by the user.
:param email_sender: Sender name provided by the user.
:param email_recipient: Recipient name provided by the user.
:param email_style: Writing style provided by the user.
:return: Generated response.
"""
llm = CTransformers(model='llama-2-7b-chat.ggmlv3.q8_0.bin',
model_type='llama',
config={'max_new_tokens': 256,
'temperature': 0.01})
template = """
Write an email with {style} style and includes topic :{email_topic}.\n\nSender: {sender}\nRecipient: {recipient}
\n\nEmail Text:
"""
prompt = PromptTemplate(
input_variables=["style", "email_topic", "sender", "recipient"],
template=template,)
response = llm(prompt.format(email_topic=form_input, sender=email_sender, recipient=email_recipient, style=email_style))
print(response)
return response
st.set_page_config(page_title="Generate Emails",
page_icon='πŸ“§',
layout='centered',
initial_sidebar_state='collapsed')
st.header("Generate Emails πŸ“§")
model_loaded = st.session_state.get('model_loaded', False)
if not model_loaded:
if st.button('Load Model'):
model_file = 'llama-2-7b-chat.ggmlv3.q8_0.bin'
if not os.path.isfile(model_file):
st.info('Loading the model, this could take ~5 minutes')
download_model()
st.session_state.model_loaded = True
st.info('Model loaded successfully')
if st.session_state.get('model_loaded'):
form_input = st.text_area('Enter the email topic', height=275)
col1, col2, col3 = st.columns([10, 10, 5])
with col1:
email_sender = st.text_input('Sender Name')
with col2:
email_recipient = st.text_input('Recipient Name')
with col3:
email_style = st.selectbox('Writing Style',
('Formal', 'Appreciating', 'Not Satisfied', 'Neutral'),
index=0)
submit = st.button("Generate")
if submit:
st.write(getLLMResponse(form_input, email_sender, email_recipient, email_style))
else:
st.write("Please load the model to proceed.")