codewithdark commited on
Commit
fa8e86c
ยท
verified ยท
1 Parent(s): 4cac5ca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -95
app.py CHANGED
@@ -5,14 +5,8 @@ import sqlite3
5
  import google.generativeai as genai
6
  # import pyttsx3
7
  import pyperclip
8
- import requests
9
- from PIL import Image
10
- import io
11
 
12
 
13
- API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
14
- headers = {"Authorization": "Bearer Your_huggingface_Api_key"}
15
-
16
  def local_css(file_name):
17
  with open(file_name) as f:
18
  st.markdown(f"<style>{f.read()}</style>", unsafe_allow_html=True)
@@ -32,25 +26,6 @@ try:
32
  except Exception as e:
33
  st.error(f"An error occurred: {e}")
34
 
35
- def generate_image_from_model(prompt):
36
- try:
37
- response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
38
- response.raise_for_status() # Raise an error for bad responses
39
- image_bytes = response.content
40
- if not image_bytes:
41
- raise ValueError("Empty image content received from the API")
42
- image = Image.open(io.BytesIO(image_bytes))
43
- return image
44
- except Exception as e:
45
- st.error(f"Error generating image from model: {e}")
46
-
47
- return None
48
-
49
- def generate_image(prompt):
50
- response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
51
- image_bytes = response.content
52
- image = Image.open(io.BytesIO(image_bytes))
53
- return image
54
 
55
  # Streamlit app
56
  def main():
@@ -63,8 +38,11 @@ def main():
63
 
64
  models = {
65
  "๐Ÿš€ Airoboros 70B": "airoboros-70b",
 
 
 
66
  "๐Ÿ”ฎ Gemini Pro": "gemini-pro",
67
- "๐Ÿ“ท StabilityAI": "stabilityai/stable-diffusion-xl-base-1.0"
68
  }
69
 
70
  columns = st.columns(3) # Split the layout into three columns
@@ -106,40 +84,41 @@ def main():
106
  user_input = st.chat_input("Ask Anything ...")
107
 
108
  if user_input:
109
- if selected_model == "gemini-pro":
110
  try:
 
 
 
 
 
 
111
 
112
- if user_input.startswith("/image"):
113
- prompt = user_input[len("/image"):].strip() # Extract prompt after "/image"
114
 
115
- # Use Gemini Pro to generate content based on the prompt
116
- GOOGLE_API_KEY = "AIzaSyC8_gwU5LSVQJk3iIXyj5xJ94ArNK11dXU"
117
- genai.configure(api_key=GOOGLE_API_KEY)
118
- model = genai.GenerativeModel('gemini-1.0-pro')
119
- response = model.generate_content(prompt)
120
- bot_response = response.candidates[0].content.parts[0].text
121
 
122
- # Generate image based on the generated text prompt
123
- generated_image = generate_image(bot_response)
 
 
 
 
 
124
 
125
- st.session_state.chat_history.append({"role": "user", "content": user_input})
126
- st.session_state.chat_history.append({"role": "bot", "content": generated_image})
127
-
128
- # Display the generated image
129
- if generated_image is not None:
130
- for index, chat in enumerate(st.session_state.chat_history):
131
- with st.chat_message(chat["role"]):
132
- if chat["role"] == "user":
133
- st.markdown(user_input)
134
- elif chat["role"] == "bot":
135
- st.image(generated_image, width=400)
136
- else:
137
- st.error("Failed to generate image. Check logs for details.")
138
-
139
- else:
140
- GOOGLE_API_KEY = "your_gemini_Api_key"
141
  genai.configure(api_key=GOOGLE_API_KEY)
142
- model = genai.GenerativeModel('gemini-1.0-pro')
143
  prompt = user_input
144
  response = model.generate_content(prompt)
145
  bot_response = response.candidates[0].content.parts[0].text
@@ -163,48 +142,7 @@ def main():
163
  except Exception as e:
164
  st.error(f"An error occurred: {e}")
165
 
166
- elif selected_model == "stabilityai/stable-diffusion-xl-base-1.0":
167
- prompt = user_input
168
- generated_image = generate_image_from_model(prompt)
169
- if generated_image is not None:
170
- for index, chat in enumerate(st.session_state.chat_history):
171
- with st.chat_message(chat["role"]):
172
- if chat["role"] == "user":
173
- st.markdown(user_input)
174
- elif chat["role"] == "bot":
175
- st.image(generated_image, width=400)
176
- else:
177
- st.error("Failed to generate image. Check logs for details.")
178
 
179
- else:
180
- try:
181
- client = Client()
182
- response = client.chat.completions.create(
183
- model=models[selected_model_display_name],
184
- messages=[{"role": "user", "content": user_input}],
185
- )
186
- bot_response = response.choices[0].message.content
187
-
188
- st.session_state.chat_history.append({"role": "user", "content": user_input})
189
- st.session_state.chat_history.append({"role": "bot", "content": bot_response})
190
-
191
- # Store chat in the database
192
- for chat in st.session_state.chat_history:
193
- c.execute("INSERT INTO chat_history VALUES (?, ?, ?)",
194
- (st.session_state.conversation_id, chat["role"], chat["content"]))
195
- conn.commit()
196
-
197
- # Display chat history
198
- for index, chat in enumerate(st.session_state.chat_history):
199
- with st.chat_message(chat["role"]):
200
- if chat["role"] == "user":
201
- st.markdown(chat["content"])
202
- elif chat["role"] == "bot":
203
- st.markdown(chat["content"])
204
-
205
-
206
- except Exception as e:
207
- st.error(f"An error occurred: {e}")
208
 
209
 
210
 
 
5
  import google.generativeai as genai
6
  # import pyttsx3
7
  import pyperclip
 
 
 
8
 
9
 
 
 
 
10
  def local_css(file_name):
11
  with open(file_name) as f:
12
  st.markdown(f"<style>{f.read()}</style>", unsafe_allow_html=True)
 
26
  except Exception as e:
27
  st.error(f"An error occurred: {e}")
28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
 
30
  # Streamlit app
31
  def main():
 
38
 
39
  models = {
40
  "๐Ÿš€ Airoboros 70B": "airoboros-70b",
41
+ "๐Ÿ‘‘ Gemini 1.0": "gemini-1.0-pro",
42
+ "๐Ÿงจ Gemini 1.0 Pro ": "gemini-1.0-pro-001",
43
+ "โšก Gemini 1.0 pro latest": "gemini-1.0-pro-latest",
44
  "๐Ÿ”ฎ Gemini Pro": "gemini-pro",
45
+ "๐ŸŽ‰ Gemini pro vision": "gemini-pro-vision"
46
  }
47
 
48
  columns = st.columns(3) # Split the layout into three columns
 
84
  user_input = st.chat_input("Ask Anything ...")
85
 
86
  if user_input:
87
+ if selected_model == "airoboros-70b":
88
  try:
89
+ client = Client()
90
+ response = client.chat.completions.create(
91
+ model=models[selected_model_display_name],
92
+ messages=[{"role": "user", "content": user_input}],
93
+ )
94
+ bot_response = response.choices[0].message.content
95
 
96
+ st.session_state.chat_history.append({"role": "user", "content": user_input})
97
+ st.session_state.chat_history.append({"role": "bot", "content": bot_response})
98
 
99
+ # Store chat in the database
100
+ for chat in st.session_state.chat_history:
101
+ c.execute("INSERT INTO chat_history VALUES (?, ?, ?)",
102
+ (st.session_state.conversation_id, chat["role"], chat["content"]))
103
+ conn.commit()
 
104
 
105
+ # Display chat history
106
+ for index, chat in enumerate(st.session_state.chat_history):
107
+ with st.chat_message(chat["role"]):
108
+ if chat["role"] == "user":
109
+ st.markdown(chat["content"])
110
+ elif chat["role"] == "bot":
111
+ st.markdown(chat["content"])
112
 
113
+
114
+ except Exception as e:
115
+ st.error(f"An error occurred: {e}")
116
+
117
+ else:
118
+ try:
119
+ GOOGLE_API_KEY = "your_Gemini_Api_key"
 
 
 
 
 
 
 
 
 
120
  genai.configure(api_key=GOOGLE_API_KEY)
121
+ model = genai.GenerativeModel(selected_model)
122
  prompt = user_input
123
  response = model.generate_content(prompt)
124
  bot_response = response.candidates[0].content.parts[0].text
 
142
  except Exception as e:
143
  st.error(f"An error occurred: {e}")
144
 
 
 
 
 
 
 
 
 
 
 
 
 
145
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
 
147
 
148