randydev commited on
Commit
3ea8ec0
·
verified ·
1 Parent(s): 841dd86

Update chatbot/plugins/chat.py

Browse files
Files changed (1) hide show
  1. chatbot/plugins/chat.py +208 -164
chatbot/plugins/chat.py CHANGED
@@ -1,17 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import asyncio
2
- import logging
3
  import os
 
 
4
 
 
5
  from pyrogram import Client, filters
6
- from pyrogram.types import Message
 
 
 
7
 
8
  from database import db
9
  from logger import LOGS
10
- from config import *
11
- from RyuzakiLib import GeminiLatest
12
  import google.generativeai as genai
13
  from google.api_core.exceptions import InvalidArgument
14
 
 
 
 
 
 
 
 
15
 
16
  @Client.on_message(
17
  filters.incoming
@@ -24,177 +58,187 @@ from google.api_core.exceptions import InvalidArgument
24
  | filters.regex(r"\b(Randy|Rendi)\b(.*)", flags=re.IGNORECASE)
25
  )
26
  & filters.private
 
27
  & ~filters.via_bot
28
  & ~filters.forwarded,
29
  group=2,
30
  )
31
  async def chatbot_talk(client: Client, message: Message):
32
- try:
33
- genai.configure(api_key=GOOGLE_API_KEY)
34
- if message.photo:
35
- try:
36
- file_path = await message.download()
37
- caption = message.caption or "What's this?"
38
- x = GeminiLatest(api_keys=GOOGLE_API_KEY)
39
- ai_reply = await message.reply_text("Processing...")
40
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
41
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
42
- response_reads = x.get_response_image(caption, file_path)
43
- if len(response_reads) > 4096:
44
- with open("chat.txt", "w+", encoding="utf8") as out_file:
45
- out_file.write(response_reads)
46
- await message.reply_document(
47
- document="chat.txt",
48
- disable_notification=True
49
- )
50
- await ai_reply.delete()
51
- os.remove("chat.txt")
52
- else:
53
- await ai_reply.edit_text(response_reads)
54
- backup_chat.append({"role": "model", "parts": [{"text": response_reads}]})
55
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
56
- os.remove(file_path)
57
- except InvalidArgument as e:
58
- await ai_reply.edit_text(f"Error: {e}")
59
- except Exception as e:
60
- await ai_reply.edit_text(f"Error: {e}")
61
  return
 
 
 
 
62
 
63
- if message.audio or message.voice:
64
- try:
65
- ai_reply = await message.reply_text("Processing...")
66
- audio_file_name = await message.download()
67
- caption = message.caption or "What's this?"
68
- model = genai.GenerativeModel(
69
- model_name="gemini-1.5-flash",
70
- safety_settings={
71
- genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
72
- genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
73
- genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
74
- genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
75
- }
76
- )
77
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
78
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
79
- await ai_reply.edit_text("Uploading file...")
80
- audio_file = genai.upload_file(path=audio_file_name)
81
- while audio_file.state.name == "PROCESSING":
82
- await asyncio.sleep(10)
83
- audio_file = genai.get_file(audio_file.name)
84
- if audio_file.state.name == "FAILED":
85
- await ai_reply.edit_text(f"Error: {audio_file.state.name}")
86
- return
87
- response = model.generate_content(
88
- [audio_file, caption],
89
- request_options={"timeout": 600}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
  )
91
- if len(response.text) > 4096:
92
- with open("chat.txt", "w+", encoding="utf8") as out_file:
93
- out_file.write(response.text)
94
- await message.reply_document(
95
- document="chat.txt",
96
- disable_notification=True
97
- )
98
- await ai_reply.delete()
99
- os.remove("chat.txt")
100
- else:
101
- await ai_reply.edit_text(response.text)
102
-
103
- backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
104
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
105
-
106
- audio_file.delete()
107
- os.remove(audio_file_name)
108
- except InvalidArgument as e:
109
- await ai_reply.edit_text(f"Error: {e}")
110
- except Exception as e:
111
- await ai_reply.edit_text(f"Error: {e}")
112
  return
 
 
 
 
113
 
114
- if message.video:
115
- try:
116
- ai_reply = await message.reply_text("Processing...")
117
- video_file_name = await message.download(file_name="newvideo.mp4")
118
- caption = message.caption or "What's this?"
119
- model = genai.GenerativeModel(
120
- model_name="gemini-1.5-pro",
121
- safety_settings={
122
- genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
123
- genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
124
- genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
125
- genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
126
- }
127
- )
128
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
129
- backup_chat.append({"role": "user", "parts": [{"text": caption}]})
130
- await ai_reply.edit_text("Uploading file...")
131
- video_file = genai.upload_file(path=video_file_name)
132
- while video_file.state.name == "PROCESSING":
133
- await asyncio.sleep(10)
134
- video_file = genai.get_file(video_file.name)
135
- if video_file.state.name == "FAILED":
136
- await ai_reply.edit_text(f"Error: {video_file.state.name}")
137
- return
138
-
139
- response = model.generate_content(
140
- [video_file, caption],
141
- request_options={"timeout": 600}
 
 
 
 
 
 
 
 
 
 
 
142
  )
143
- if len(response.text) > 4096:
144
- with open("chat.txt", "w+", encoding="utf8") as out_file:
145
- out_file.write(response.text)
146
- await message.reply_document(
147
- document="chat.txt",
148
- disable_notification=True
149
- )
150
- await ai_reply.delete()
151
- os.remove("chat.txt")
152
- else:
153
- await ai_reply.edit_text(response.text)
154
-
155
- backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
156
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
157
- video_file.delete()
158
- os.remove(video_file_name)
159
- except InvalidArgument as e:
160
- await ai_reply.edit_text(f"Error: {e}")
161
- except Exception as e:
162
- await ai_reply.edit_text(f"Error: {e}")
163
  return
 
 
 
 
164
 
165
- if message.text:
166
- try:
167
- query = message.text.strip()
168
- match = re.search(r"\b(Randy|Rendi)\b(.*)", query, flags=re.IGNORECASE)
169
- if match:
170
- rest_of_sentence = match.group(2).strip()
171
- query_base = rest_of_sentence if rest_of_sentence else query
172
- else:
173
- query_base = query
174
-
175
- parts = query.split(maxsplit=1)
176
- command = parts[0].lower()
177
- pic_query = parts[1].strip() if len(parts) > 1 else ""
178
-
179
- model_flash = genai.GenerativeModel(
180
- model_name="gemini-1.5-flash"
 
 
 
 
 
 
 
 
 
 
181
  )
182
- backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
183
- backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
184
- chat_session = model_flash.start_chat(history=backup_chat)
185
- response_data = chat_session.send_message(query_base)
186
- output = response_data.text
187
- if len(output) > 4096:
188
- with open("chat.txt", "w+", encoding="utf8") as out_file:
189
- out_file.write(output)
190
- await message.reply_document(
191
- document="chat.txt",
192
- disable_notification=True
193
- )
194
- os.remove("chat.txt")
195
- else:
196
- await message.reply_text(output)
197
- backup_chat.append({"role": "model", "parts": [{"text": output}]})
198
- await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
199
- except Exception as e:
200
- return await message.reply_text(str(e))
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # Copyright 2020-2024 (c) Randy W @xtdevs, @xtsea
4
+ #
5
+ # from : https://github.com/TeamKillerX
6
+ # Channel : @RendyProjects
7
+ # This program is free software: you can redistribute it and/or modify
8
+ # it under the terms of the GNU Affero General Public License as published by
9
+ # the Free Software Foundation, either version 3 of the License, or
10
+ # (at your option) any later version.
11
+ #
12
+ # This program is distributed in the hope that it will be useful,
13
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
14
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15
+ # GNU Affero General Public License for more details.
16
+ #
17
+ # You should have received a copy of the GNU Affero General Public License
18
+ # along with this program. If not, see <https://www.gnu.org/licenses/>.
19
+
20
+ import requests
21
+ import time
22
+ import json
23
  import asyncio
24
+ import io
25
  import os
26
+ import re
27
+ from PIL import Image
28
 
29
+ from pyrogram import *
30
  from pyrogram import Client, filters
31
+ from pyrogram.types import *
32
+ from pyrogram.errors import *
33
+ from RyuzakiLib import FaceAI, FullStackDev, GeminiLatest, RendyDevChat
34
+ from config import *
35
 
36
  from database import db
37
  from logger import LOGS
38
+
 
39
  import google.generativeai as genai
40
  from google.api_core.exceptions import InvalidArgument
41
 
42
+ async def geni_files_delete(name: str):
43
+ url = f"https://generativelanguage.googleapis.com/v1beta/{name}"
44
+ params = {"key": GOOGLE_API_KEY}
45
+ response = requests.delete(url, params=params)
46
+ if response.status_code != 200:
47
+ return None
48
+ return response.text
49
 
50
  @Client.on_message(
51
  filters.incoming
 
58
  | filters.regex(r"\b(Randy|Rendi)\b(.*)", flags=re.IGNORECASE)
59
  )
60
  & filters.private
61
+ & ~filters.bot
62
  & ~filters.via_bot
63
  & ~filters.forwarded,
64
  group=2,
65
  )
66
  async def chatbot_talk(client: Client, message: Message):
67
+ genai.configure(api_key=GOOGLE_API_KEY)
68
+ if message.photo:
69
+ file_path = await message.download()
70
+ caption = message.caption or "What's this?"
71
+ x = GeminiLatest(api_keys=GOOGLE_API_KEY)
72
+ if client.me.is_premium:
73
+ ai_reply = await message.reply_text(f"{custom_loading}Processing...")
74
+ else:
75
+ ai_reply = await message.reply_text(f"Processing...")
76
+ try:
77
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
78
+ backup_chat.append({"role": "user", "parts": [{"text": caption}]})
79
+ response_reads = x.get_response_image(caption, file_path)
80
+ if len(response_reads) > 4096:
81
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
82
+ out_file.write(response_reads)
83
+ await message.reply_document(
84
+ document="chat.txt",
85
+ disable_notification=True
86
+ )
87
+ await ai_reply.delete()
88
+ os.remove("chat.txt")
89
+ else:
90
+ await ai_reply.edit_text(response_reads)
91
+ backup_chat.append({"role": "model", "parts": [{"text": response_reads}]})
92
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
93
+ os.remove(file_path)
 
 
94
  return
95
+ except InvalidArgument as e:
96
+ return await ai_reply.edit_text(f"Error: {e}")
97
+ except Exception as e:
98
+ return await ai_reply.edit_text(f"Error: {e}")
99
 
100
+ if message.audio or message.voice:
101
+ if client.me.is_premium:
102
+ ai_reply = await message.reply_text(f"{custom_loading}Processing...")
103
+ else:
104
+ ai_reply = await message.reply_text(f"Processing...")
105
+ if message.audio:
106
+ audio_file_name = await message.download()
107
+ if message.voice:
108
+ audio_file_name = await message.download()
109
+ caption = message.caption or "What's this?"
110
+ model = genai.GenerativeModel(
111
+ model_name="gemini-1.5-flash",
112
+ safety_settings={
113
+ genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
114
+ genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
115
+ genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
116
+ genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
117
+ }
118
+ )
119
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
120
+ backup_chat.append({"role": "user", "parts": [{"text": caption}]})
121
+ if client.me.is_premium:
122
+ await ai_reply.edit_text(f"{custom_loading}Uploading file..")
123
+ else:
124
+ await ai_reply.edit_text("Uploading file..")
125
+ audio_file = genai.upload_file(path=audio_file_name)
126
+ while audio_file.state.name == "PROCESSING":
127
+ await asyncio.sleep(10)
128
+ audio_file = genai.get_file(audio_file.name)
129
+ if audio_file.state.name == "FAILED":
130
+ return await ai_reply.edit_text(f"Error: {audio_file.state.name}")
131
+ try:
132
+ response = model.generate_content(
133
+ [audio_file, caption],
134
+ request_options={"timeout": 600}
135
+ )
136
+ if len(response.text) > 4096:
137
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
138
+ out_file.write(response.text)
139
+ await message.reply_document(
140
+ document="chat.txt",
141
+ disable_notification=True
142
  )
143
+ await ai_reply.delete()
144
+ os.remove("chat.txt")
145
+ else:
146
+ await ai_reply.edit_text(response.text)
147
+ backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
148
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
149
+ audio_file.delete()
150
+ os.remove(audio_file_name)
 
 
 
 
 
 
 
 
 
 
 
 
 
151
  return
152
+ except InvalidArgument as e:
153
+ return await ai_reply.edit_text(f"Error: {e}")
154
+ except Exception as e:
155
+ return await ai_reply.edit_text(f"Error: {e}")
156
 
157
+ if message.video:
158
+ if client.me.is_premium:
159
+ ai_reply = await message.reply_text(f"{custom_loading}Processing...")
160
+ else:
161
+ ai_reply = await message.reply_text(f"Processing...")
162
+ video_file_name = await message.download(file_name="newvideo.mp4")
163
+ caption = message.caption or "What's this?"
164
+ model = genai.GenerativeModel(
165
+ model_name="gemini-1.5-pro",
166
+ safety_settings={
167
+ genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
168
+ genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
169
+ genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
170
+ genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
171
+ }
172
+ )
173
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
174
+ backup_chat.append({"role": "user", "parts": [{"text": caption}]})
175
+ if client.me.is_premium:
176
+ await ai_reply.edit_text(f"{custom_loading}Uploading file..")
177
+ else:
178
+ await ai_reply.edit_text("Uploading file..")
179
+ video_file = genai.upload_file(path=video_file_name)
180
+ while video_file.state.name == "PROCESSING":
181
+ await asyncio.sleep(10)
182
+ video_file = genai.get_file(video_file.name)
183
+ if video_file.state.name == "FAILED":
184
+ return await ai_reply.edit_text(f"Error: {video_file.state.name}")
185
+ try:
186
+ response = model.generate_content(
187
+ [video_file, caption],
188
+ request_options={"timeout": 600}
189
+ )
190
+ if len(response.text) > 4096:
191
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
192
+ out_file.write(response.text)
193
+ await message.reply_document(
194
+ document="chat.txt",
195
+ disable_notification=True
196
  )
197
+ await ai_reply.delete()
198
+ os.remove("chat.txt")
199
+ else:
200
+ await ai_reply.edit_text(response.text)
201
+ backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
202
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
203
+ video_file.delete()
204
+ os.remove(video_file_name)
 
 
 
 
 
 
 
 
 
 
 
 
205
  return
206
+ except InvalidArgument as e:
207
+ return await ai_reply.edit_text(f"Error: {e}")
208
+ except Exception as e:
209
+ return await ai_reply.edit_text(f"Error: {e}")
210
 
211
+ if message.text:
212
+ query = message.text.strip()
213
+ match = re.search(r"\b(Randy|Rendi)\b(.*)", query, flags=re.IGNORECASE)
214
+ if match:
215
+ rest_of_sentence = match.group(2).strip()
216
+ query_base = rest_of_sentence if rest_of_sentence else query
217
+ else:
218
+ query_base = query
219
+ parts = query.split(maxsplit=1)
220
+ command = parts[0].lower()
221
+ pic_query = parts[1].strip() if len(parts) > 1 else ""
222
+ try:
223
+ model_flash = genai.GenerativeModel(
224
+ model_name="gemini-1.5-flash"
225
+ )
226
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
227
+ backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
228
+ chat_session = model_flash.start_chat(history=backup_chat)
229
+ response_data = chat_session.send_message(query_base)
230
+ output = response_data.text
231
+ if len(output) > 4096:
232
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
233
+ out_file.write(output)
234
+ await message.reply_document(
235
+ document="chat.txt",
236
+ disable_notification=True
237
  )
238
+ os.remove("chat.txt")
239
+ else:
240
+ await message.reply_text(output)
241
+ backup_chat.append({"role": "model", "parts": [{"text": output}]})
242
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
243
+ except Exception as e:
244
+ return await message.reply_text(str(e))