GuvichatGPT / app.py
Anandhavalli2's picture
Update app.py
dc07dc7 verified
raw
history blame contribute delete
No virus
5.17 kB
import streamlit as st
from transformers import GPT2LMHeadModel, GPT2Tokenizer
import torch
import sqlite3
import pandas as pd
import pickle
import datetime
conn = sqlite3.connect('database.db', check_same_thread=False)
cursor = conn.cursor()
table_create_sql = 'CREATE TABLE IF NOT EXISTS mytables(Username,Logintime);'
cursor.execute(table_create_sql)
# Commit changes and close the connection
conn.commit()
# function to insert into table
def Insert(Username,Logintime):
try:
#conn = sqlite3.connect('database.db', check_same_thread=False)
#cursor = conn.cursor()
Logintime = datetime.datetime.now()
cursor.execute("Insert into mytables values (?,?)",(Username,Logintime))
conn.commit()
return {'status':'Data inserted successfully'}
except Exception as e:
return {'Error',str(e)}
#with open ("/content/huggmodel3.pkl",'rb') as file:
#loaded=pickle.load(file)
# Load the fine-tuned model and tokenizer
model_name_or_path = "/content/drive/MyDrive/GuviProject/fine_tuned_model123"
#model_name_or_path = "gpt2"
model = GPT2LMHeadModel.from_pretrained(model_name_or_path)
tokenizer = GPT2Tokenizer.from_pretrained(model_name_or_path)
# Move the model to GPU if available
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
# Define the text generation function
def generate_text(model, tokenizer, seed_text, max_length=100, temperature=1.0, num_return_sequences=1):
input_ids = tokenizer.encode(seed_text, return_tensors='pt').to(device)
with torch.no_grad():
output = model.generate(
input_ids,
max_length=max_length,
temperature=temperature,
num_return_sequences=num_return_sequences,
do_sample=True,
top_k=50,
top_p=0.95,
)
generated_texts = [tokenizer.decode(output[i], skip_special_tokens=True) for i in range(num_return_sequences)]
return generated_texts
def show_database():
new_df = pd.read_sql("SELECT * FROM mytables", con=conn)
return new_df
# Streamlit app
st.set_page_config(
page_title="Text Generation with GPT-2",
page_icon="🖼️",
layout="wide")
# -------------------------------This is the sidebar in a Streamlit application, helps in navigation--------------------
home,Testing= st.tabs(
['Home','Testing'])
with home:
st.markdown("# :red[Text Generation with GPT-2]")
st.subheader(':violet[Login details]')
Username = st.text_input("enter Username")
Logintime = datetime.datetime.now()
if st.button('Login'):
Insert(Username,Logintime)
df=show_database()
st.write(df)
st.markdown('<div style="height: 50px;"></div>', unsafe_allow_html=True)
st.markdown("### :blue[Technologies :] Deep Learning,Transformers,Hugging face models,LLM, Streamlit, "
)
st.markdown("### :blue[Overview :] This project aims to construct to deploy a pre-trained or Fine tuned GPT model specifically on GUVI’s company data,using HUGGING FACE SPACES,"
"making it accessible through a web application built with Streamlit. "
"it as a user-friendly online application in order to provide the model can handle initial customer inquiries, provide information"
"on courses, pricing, and enrollment procedures, and escalate complex issues to human"
"agents when necessary. The marketing team can input topics or keywords into the web"
"application, and the model will generate relevant, high-quality content that can be edited"
"and published. Students can interact with the virtual assistant through the web"
"application to get immediate answers to their questions, clarifications on course"
"material, and personalized study recommendations."
)
st.markdown("### :blue[Domain :] AIOps or artificial intelligence for IT operations")
with Testing:
st.write("This app generates text using a fine-tuned GPT-2 model. Enter a prompt and the model will generate a continuation.")
st.info("This app's data is continuously improved, but it may still contain inaccuracies.")
seed_text = st.text_input("Enter your prompt:", "Google is known for")
max_length = st.slider("Max Length:", min_value=50, max_value=500, value=100)
temperature = st.slider("Temperature:", min_value=0.1, max_value=2.0, value=1.0)
if st.button("Generate"):
with st.spinner("Generating text..."):
generated_texts = generate_text(model, tokenizer, seed_text, max_length, temperature)
for i, generated_text in enumerate(generated_texts):
st.subheader(f"Generated Text {i + 1}")
st.write(generated_text)
st.warning("I only collected this Data from Website not from Guvi")