Update chatbot/plugins/chat.py
Browse files- chatbot/plugins/chat.py +43 -2
chatbot/plugins/chat.py
CHANGED
@@ -62,6 +62,27 @@ js = AkenoXJs(DifferentAPIDefault()).connect()
|
|
62 |
|
63 |
message_memory_state = {}
|
64 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
async def geni_files_delete(name: str):
|
66 |
url = f"https://generativelanguage.googleapis.com/v1beta/{name}"
|
67 |
params = {"key": GOOGLE_API_KEY}
|
@@ -274,8 +295,13 @@ async def geminigen_prompt(client, callback):
|
|
274 |
get_response = data.get("prompt_image", None)
|
275 |
if not get_response:
|
276 |
return await callback.answer("Server busy try again later", True)
|
277 |
-
|
|
|
278 |
|
|
|
|
|
|
|
|
|
279 |
backup_chat.append({"role": "user", "parts": [{"text": get_response}]})
|
280 |
response = await gen.aio.models.generate_content(
|
281 |
model="gemini-2.0-flash-exp-image-generation",
|
@@ -330,8 +356,13 @@ async def flux_prompt(client, callback):
|
|
330 |
get_response = data.get("prompt_image", None)
|
331 |
if not get_response:
|
332 |
return await callback.answer("Server busy try again later", True)
|
333 |
-
|
|
|
334 |
|
|
|
|
|
|
|
|
|
335 |
backup_chat.append({"role": "user", "parts": [{"text": get_response}]})
|
336 |
response_js = await js.image.create(
|
337 |
"black-forest-labs/flux-1-schnell",
|
@@ -620,6 +651,9 @@ async def chatbot_talk(client: Client, message: Message):
|
|
620 |
await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
|
621 |
await asyncio.sleep(1.5)
|
622 |
|
|
|
|
|
|
|
623 |
if re.findall(r"\b(This is a picture of me)\b", caption, re.IGNORECASE):
|
624 |
try:
|
625 |
backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
|
@@ -737,6 +771,9 @@ async def chatbot_talk(client: Client, message: Message):
|
|
737 |
await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_VIDEO)
|
738 |
await asyncio.sleep(2.0)
|
739 |
caption = message.caption or "What this?"
|
|
|
|
|
|
|
740 |
backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
|
741 |
backup_chat.append({"role": "user", "parts": [{"text": caption}]})
|
742 |
video_file = await gen.aio.files.upload(file=await message.download())
|
@@ -839,6 +876,10 @@ async def chatbot_talk(client: Client, message: Message):
|
|
839 |
captions = ""
|
840 |
file_path = "gemini-native-image.png"
|
841 |
try:
|
|
|
|
|
|
|
|
|
842 |
if query_base in ["/", "/help"]:
|
843 |
await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
|
844 |
return
|
|
|
62 |
|
63 |
message_memory_state = {}
|
64 |
|
65 |
+
def obfuscate(word):
|
66 |
+
return word.lower()\
|
67 |
+
.replace("a", "[a@4]")\
|
68 |
+
.replace("b", "[b8]")\
|
69 |
+
.replace("e", "[e3]")\
|
70 |
+
.replace("g", "[g69]")\
|
71 |
+
.replace("i", "[i1!|]")\
|
72 |
+
.replace("o", "[o0]")\
|
73 |
+
.replace("s", "[s5$]")\
|
74 |
+
.replace("t", "[t7+]")\
|
75 |
+
.replace("l", "[l1|]")
|
76 |
+
|
77 |
+
def regex_all_blacklist(text):
|
78 |
+
with open("blacklist.txt", "r", encoding="utf-8") as f:
|
79 |
+
words = [obfuscate(w.strip()) for w in f if w.strip()]
|
80 |
+
pattern = r"\b(" + "|".join(words) + r")\b"
|
81 |
+
if re.search(pattern, text, re.IGNORECASE):
|
82 |
+
return True
|
83 |
+
else:
|
84 |
+
return False
|
85 |
+
|
86 |
async def geni_files_delete(name: str):
|
87 |
url = f"https://generativelanguage.googleapis.com/v1beta/{name}"
|
88 |
params = {"key": GOOGLE_API_KEY}
|
|
|
295 |
get_response = data.get("prompt_image", None)
|
296 |
if not get_response:
|
297 |
return await callback.answer("Server busy try again later", True)
|
298 |
+
if regex_all_blacklist(get_response):
|
299 |
+
return await callback.answer("You been blocked blacklisted", True)
|
300 |
|
301 |
+
await callback.answer("Ok Processed")
|
302 |
+
await callback.message.delete()
|
303 |
+
await client.send_chat_action(callback.message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
|
304 |
+
await asyncio.sleep(2.0)
|
305 |
backup_chat.append({"role": "user", "parts": [{"text": get_response}]})
|
306 |
response = await gen.aio.models.generate_content(
|
307 |
model="gemini-2.0-flash-exp-image-generation",
|
|
|
356 |
get_response = data.get("prompt_image", None)
|
357 |
if not get_response:
|
358 |
return await callback.answer("Server busy try again later", True)
|
359 |
+
if regex_all_blacklist(get_response):
|
360 |
+
return await callback.answer("You been blocked blacklisted", True)
|
361 |
|
362 |
+
await callback.answer("Ok Processed")
|
363 |
+
await callback.message.delete()
|
364 |
+
await client.send_chat_action(callback.message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
|
365 |
+
await asyncio.sleep(2.0)
|
366 |
backup_chat.append({"role": "user", "parts": [{"text": get_response}]})
|
367 |
response_js = await js.image.create(
|
368 |
"black-forest-labs/flux-1-schnell",
|
|
|
651 |
await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_PHOTO)
|
652 |
await asyncio.sleep(1.5)
|
653 |
|
654 |
+
if regex_all_blacklist(caption):
|
655 |
+
return await message.reply_text("You been blocked blacklisted")
|
656 |
+
|
657 |
if re.findall(r"\b(This is a picture of me)\b", caption, re.IGNORECASE):
|
658 |
try:
|
659 |
backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
|
|
|
771 |
await client.send_chat_action(message.chat.id, enums.ChatAction.UPLOAD_VIDEO)
|
772 |
await asyncio.sleep(2.0)
|
773 |
caption = message.caption or "What this?"
|
774 |
+
if regex_all_blacklist(caption):
|
775 |
+
return await message.reply_text("You been blocked blacklisted")
|
776 |
+
|
777 |
backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
|
778 |
backup_chat.append({"role": "user", "parts": [{"text": caption}]})
|
779 |
video_file = await gen.aio.files.upload(file=await message.download())
|
|
|
876 |
captions = ""
|
877 |
file_path = "gemini-native-image.png"
|
878 |
try:
|
879 |
+
|
880 |
+
if regex_all_blacklist(query_base):
|
881 |
+
return await message.reply_text("You been blocked blacklisted")
|
882 |
+
|
883 |
if query_base in ["/", "/help"]:
|
884 |
await client.send_chat_action(message.chat.id, enums.ChatAction.CANCEL)
|
885 |
return
|