final_bio_mimic / app.py
Rushi2903's picture
Update app.py
23ad3f4
# import streamlit as st
# import pickle
import openai
# file_path = 'concat_list.pkl'
# with open(file_path, 'rb') as file:
# concat_list = pickle.load(file)
# # Print the length of the concat_list
# st.write(len(concat_list))
org = 'org-JUm8VrpZZhXblDWHMVmxnLTF'
openai.api_key = "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"
import operations as op
import textwrap
import numpy as np
# from dotenv import load_dotenv
import streamlit as st
import os
import nltk
from nltk.tokenize import sent_tokenize
import pickle
nltk.download('punkt')
st.title("BIOMIMICRY")
cl_file_path = 'concat_list.pkl'
with open(cl_file_path, 'rb') as file:
concat_list = pickle.load(file)
file_path = 'content_embeddings.npy'
content_embeddings = np.load(file_path)
# st.write(content_embeddings.shape)
###############################################################################################
# GPT CALL
def final_ask(query, prompt_content):
# Set up OpenAI API credentials
# openai.api_key = "YOUR_API_KEY"
# Define the prompt with an improved structure and context
prompt = f'''You are an expert in biomimicry, and you are asked to answer the following question:
Question: {query}
Context: {prompt_content}
Please respond to the question as if you were having a natural language conversation, using the given context. If the answer is not contained within the provided text, kindly state "I don't have that information."'''
# Generate the response using the Davinci model
response = openai.Completion.create(
engine="text-davinci-003",
prompt=prompt,
max_tokens=100,
temperature=0.7,
n=1,
stop=None
)
# Retrieve the generated answer
answer = response.choices[0].text.strip()
return answer
##################################################################################################
query = st.text_input('Ask me anything!', placeholder='Type.....')
try:
if st.button("Confirm!"):
que_embedd = op.create_query_embeddings(query)
cosine_lis= op.calculate_cosine(que_embedd, content_embeddings, concat_list)
indexes_final = op.fetch_top_rank_ans(cosine_lis, 16)
# for i in indexes_final:
# st.write(concat_list[i])
sentences = [concat_list[i] for i in indexes_final]
# Create a prompt or content using the retrieved sentences
prompt_content = "\n".join(sentences)
answer = final_ask(query, prompt_content)
st.write(answer)
except Exception as e:
st.write(e)
st.warning("Something went wrong. Please try again.")