Blog_Generation / app.py
drkareemkamal's picture
Update app.py
e498dfc verified
import streamlit as st
from langchain.prompts import PromptTemplate
from langchain.llms import CTransformers
# functio to get response from LLAMA 2 model
def get_llama_response(input_text,no_words,blog_style):
### LLama 2 model
llm = CTransformers(model = 'TheBloke/Llama-2-7B-Chat-GGML',
model_type = 'llama',
config = {'max_new_tokens': 256,
'temperature': 0.01})
## Prompt Template
template = """
write a blog for {blog_style} job profile for a topic {input_text}
within {no_words} words
"""
prompt = PromptTemplate(input_vairables =['blog_style','input_text','no_words'],
template = template)
## Generate the response from LLMA 2 model
response = llm(prompt.format(blog_style=blog_style , input_text = input_text , no_words = no_words))
print(response)
return response
st.set_page_config(page_title = 'Generate Blogs',
page_icon = '',
layout = 'centered',
initial_sidebar_state = 'collapsed')
st.header('Generate Blogs ')
input_text = st.text_input('Enter the blog Topic')
## creating two more columns additional 2 fields
col1 , col2 = st.columns([5,5])
with col1 :
no_words = st.text_input('No. of words ')
with col2 :
blog_style = st.selectbox('Wiriting the blog for ',
('Researchers','Data Scientist','Common People'),index=0)
submit = st.button('Generate')
## final response
if submit :
st.write(get_llama_response(input_text,no_words,blog_style))