Spaces:
Sleeping
Sleeping
Upload 9 files
Browse files- .env +1 -0
- .gitattributes +35 -35
- .gitignore +2 -0
- app.py +129 -0
- create_database.py +40 -0
- icon.png +0 -0
- phones.db +0 -0
- requirements.txt +2 -0
- style.css +3 -0
.env
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
GEMINI_API_KEY="AIzaSyAVpLXDazfH6mSlo-CfMzQ4nq5YOnqEA9A"
|
.gitattributes
CHANGED
@@ -1,35 +1,35 @@
|
|
1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
.env
|
2 |
+
env
|
app.py
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from itertools import tee
|
3 |
+
import google.generativeai as genai
|
4 |
+
import sqlite3
|
5 |
+
|
6 |
+
|
7 |
+
def connect_to_database(db_path):
|
8 |
+
# debugging
|
9 |
+
try:
|
10 |
+
conn = sqlite3.connect(db_path)
|
11 |
+
cursor = conn.cursor()
|
12 |
+
|
13 |
+
# Running a test query
|
14 |
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
15 |
+
tables = cursor.fetchall()
|
16 |
+
if tables:
|
17 |
+
print("Connection successful. Found tables:", tables)
|
18 |
+
else:
|
19 |
+
print("Connection successful but no tables found.")
|
20 |
+
|
21 |
+
return conn
|
22 |
+
except sqlite3.Error as e:
|
23 |
+
print(f"Error connecting to database: {e}")
|
24 |
+
return None
|
25 |
+
|
26 |
+
# Connect to the database
|
27 |
+
db_connection = connect_to_database('phones.db')
|
28 |
+
|
29 |
+
st.set_page_config(layout="wide")
|
30 |
+
MODEL_AVATAR_URL = "./icon.png"
|
31 |
+
|
32 |
+
GEMINI = st.secrets["GEMINI_API_KEY"]
|
33 |
+
|
34 |
+
genai.configure(api_key=GEMINI)
|
35 |
+
generation_config = {
|
36 |
+
"temperature": 0.7,
|
37 |
+
"top_p": 0.95,
|
38 |
+
"top_k": 0,
|
39 |
+
"max_output_tokens": 8192,
|
40 |
+
}
|
41 |
+
safety_settings = [
|
42 |
+
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_MEDIUM_AND_ABOVE"},
|
43 |
+
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_MEDIUM_AND_ABOVE"},
|
44 |
+
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_MEDIUM_AND_ABOVE"},
|
45 |
+
{"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_MEDIUM_AND_ABOVE"},
|
46 |
+
]
|
47 |
+
model = genai.GenerativeModel(model_name="gemini-1.0-pro",
|
48 |
+
generation_config=generation_config,
|
49 |
+
safety_settings=safety_settings)
|
50 |
+
|
51 |
+
st.title("DBRX Instruct")
|
52 |
+
|
53 |
+
with open("style.css") as css:
|
54 |
+
st.markdown(f'<style>{css.read()}</style>', unsafe_allow_html=True)
|
55 |
+
|
56 |
+
if "messages" not in st.session_state:
|
57 |
+
st.session_state["messages"] = []
|
58 |
+
|
59 |
+
def clear_chat_history():
|
60 |
+
st.session_state["messages"] = []
|
61 |
+
|
62 |
+
st.button('Clear Chat', on_click=clear_chat_history)
|
63 |
+
|
64 |
+
|
65 |
+
# Function to generate SQL query
|
66 |
+
def generate_sql_query(user_input):
|
67 |
+
prompt = f"Generate an SQL query to find information based on the user's question: '{user_input}'. Note: The table name is 'phones' and the columns are 'ProductName', 'Brand', 'Storage', 'Color', 'Price', 'QuantityInStock', and 'Location'. Note: GIve the query without '''sql at the start and end of the query. just give the query text content"
|
68 |
+
response = model.generate_content(prompt)
|
69 |
+
print(response.text)
|
70 |
+
return response.text
|
71 |
+
|
72 |
+
def execute_sql_query(query):
|
73 |
+
try:
|
74 |
+
cursor = db_connection.cursor()
|
75 |
+
cursor.execute(query)
|
76 |
+
results = cursor.fetchall()
|
77 |
+
cursor.close()
|
78 |
+
print(results)
|
79 |
+
return results
|
80 |
+
except Exception as e:
|
81 |
+
return f"Error executing query: {str(e)}"
|
82 |
+
|
83 |
+
def format_response(user_input, query_results):
|
84 |
+
if not query_results:
|
85 |
+
return "No data found for your query."
|
86 |
+
|
87 |
+
result_text = f"Found {len(query_results)} results: " + ', '.join([str(item) for sublist in query_results for item in sublist])
|
88 |
+
|
89 |
+
prompt = f"Rephrase this in a more conversational and informative way based on the user's question: '{user_input}'. Here are the details: {result_text}. Answer the user's question in a conversational manner. Note: as its a conversational response, give the response in correct mannser with correct formatting"
|
90 |
+
formatted_response = model.generate_content(prompt)
|
91 |
+
print(formatted_response.text)
|
92 |
+
return formatted_response.text
|
93 |
+
|
94 |
+
|
95 |
+
def handle_user_input(user_input):
|
96 |
+
with history:
|
97 |
+
st.session_state["messages"].append({"role": "user", "content": user_input})
|
98 |
+
with st.chat_message("user"):
|
99 |
+
st.markdown(user_input)
|
100 |
+
|
101 |
+
# Generate SQL query from user input
|
102 |
+
sql_query = generate_sql_query(user_input)
|
103 |
+
if sql_query:
|
104 |
+
# Execute the generated SQL query
|
105 |
+
query_results = execute_sql_query(sql_query)
|
106 |
+
formatted_answer = format_response(user_input, query_results)
|
107 |
+
|
108 |
+
with st.chat_message("assistant", avatar=MODEL_AVATAR_URL):
|
109 |
+
st.markdown(formatted_answer)
|
110 |
+
st.session_state["messages"].append({"role": "assistant", "content": formatted_answer})
|
111 |
+
else:
|
112 |
+
with st.chat_message("assistant", avatar=MODEL_AVATAR_URL):
|
113 |
+
st.markdown("Failed to generate a valid SQL query.")
|
114 |
+
st.session_state["messages"].append({"role": "assistant", "content": "Failed to generate a valid SQL query."})
|
115 |
+
|
116 |
+
|
117 |
+
main = st.container()
|
118 |
+
with main:
|
119 |
+
history = st.container(height=400)
|
120 |
+
with history:
|
121 |
+
for message in st.session_state["messages"]:
|
122 |
+
avatar = None
|
123 |
+
if message["role"] == "assistant":
|
124 |
+
avatar = MODEL_AVATAR_URL
|
125 |
+
with st.chat_message(message["role"], avatar=avatar):
|
126 |
+
st.markdown(message["content"])
|
127 |
+
|
128 |
+
if prompt := st.chat_input("Type your question:", max_chars=1000):
|
129 |
+
handle_user_input(prompt)
|
create_database.py
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sqlite3
|
2 |
+
|
3 |
+
# Connect to the SQLite database (creates a new file if it doesn't exist)
|
4 |
+
conn = sqlite3.connect('phones.db')
|
5 |
+
cursor = conn.cursor()
|
6 |
+
|
7 |
+
# Create the table
|
8 |
+
cursor.execute("""
|
9 |
+
CREATE TABLE IF NOT EXISTS phones (
|
10 |
+
ProductID INTEGER PRIMARY KEY,
|
11 |
+
ProductName TEXT,
|
12 |
+
Brand TEXT,
|
13 |
+
Storage TEXT,
|
14 |
+
Color TEXT,
|
15 |
+
Price REAL,
|
16 |
+
QuantityInStock INTEGER,
|
17 |
+
Location TEXT
|
18 |
+
)
|
19 |
+
""")
|
20 |
+
|
21 |
+
# Sample data
|
22 |
+
data = [
|
23 |
+
(1, 'iPhone 13', 'Apple', '128GB', 'Blue', 999, 20, 'Shelf 1'),
|
24 |
+
(2, 'Galaxy S21', 'Samsung', '256GB', 'Phantom Black', 899, 15, 'Shelf 2'),
|
25 |
+
(3, 'Pixel 6', 'Google', '128GB', 'Stormy Black', 799, 18, 'Shelf 3'),
|
26 |
+
(4, 'OnePlus 9', 'OnePlus', '256GB', 'Winter Mist', 729, 25, 'Shelf 1'),
|
27 |
+
(5, 'Xperia 1 III', 'Sony', '256GB', 'Frosted Black', 1199, 10, 'Shelf 2'),
|
28 |
+
(6, 'Mi 11X', 'Xiaomi', '128GB', 'Cosmic Black', 599, 30, 'Shelf 3'),
|
29 |
+
(7, 'Find X3 Pro', 'OPPO', '256GB', 'Gloss Black', 1099, 12, 'Shelf 1'),
|
30 |
+
(8, 'ROG Phone 5', 'ASUS', '256GB', 'Phantom Black', 999, 20, 'Shelf 2'),
|
31 |
+
(9, 'Vivo X60 Pro', 'Vivo', '256GB', 'Midnight Black', 799, 15, 'Shelf 3'),
|
32 |
+
(10, 'Moto G Power 2021', 'Motorola', '64GB', 'Aurora Black', 249, 35, 'Shelf 1')
|
33 |
+
]
|
34 |
+
|
35 |
+
# Insert the data
|
36 |
+
cursor.executemany("INSERT INTO phones VALUES (?, ?, ?, ?, ?, ?, ?, ?)", data)
|
37 |
+
conn.commit()
|
38 |
+
|
39 |
+
# Close the connection
|
40 |
+
conn.close()
|
icon.png
ADDED
![]() |
phones.db
ADDED
Binary file (8.19 kB). View file
|
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
google-generativeai
|
2 |
+
streamlit
|
style.css
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
.st-emotion-cache-1tpusnk a{
|
2 |
+
color: #FF5F46;
|
3 |
+
}
|