import streamlit as st from langchain.prompts import PromptTemplate from langchain.llms import CTransformers def getmodel(text,no_words,blog_style): llm = CTransformers(model = "llama-2-7b-chat.ggmlv3.q8_0.bin", model_type = "llama", config = {"max_new_tokens" : 256, "temperature" : 0.01}) template = """Write a blog for {blog_style} job profile for a tpoic {text} within {no_words} words""" prompt = PromptTemplate(input_variables = ["blog_style","text","no_words"], template = template) response = llm(prompt.format(blog_style = blog_style,text = text,no_words = no_words)) return response st.set_page_config(page_title="Generate_Blogs", page_icon="@", layout="centered", initial_sidebar_state="collapsed") st.header("Generate_Blogs") input_txt = st.text_input("Enter blog topic : ") col1 , col2 = st.columns([5,5]) with col1: no_words = st.text_input("No. of words : ") with col2: blog_style = st.selectbox("Writing blog for : ",("Researchers","Data Scientist"),index = 0) submit = st.button("Generate") if submit: st.write(getmodel(input_txt,no_words,blog_style))