Spaces:
Running
Running
Upload main.py
Browse files
main.py
CHANGED
@@ -117,6 +117,13 @@ trans = SyncTranslator()
|
|
117 |
|
118 |
app = FastAPI(docs_url=None, redoc_url="/")
|
119 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
120 |
def get_all_api_keys():
|
121 |
user = collection.find({})
|
122 |
api_keys = []
|
@@ -472,18 +479,12 @@ def ocr_space_url(
|
|
472 |
|
473 |
@app.get("/ryuzaki/nochatgpt", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
|
474 |
def no_chatgpt(query: str=None):
|
475 |
-
contact_support = """
|
476 |
-
We are aware that AI is currently offline. This seems to be caused by the API
|
477 |
-
We are investigating and doing our best to get things back online as soon as possible. Thank you for your patience
|
478 |
-
|
479 |
-
Contact Support @xtdevs
|
480 |
-
"""
|
481 |
try:
|
482 |
url = "http://216.98.10.228:5000"
|
483 |
api_method = f"{url}/gpt?ask={query}"
|
484 |
response = requests.get(api_method)
|
485 |
if response.status_code != 200:
|
486 |
-
return SuccessResponse(status="False", randydev={"message":
|
487 |
return SuccessResponse(status="True", randydev={"message": response.text})
|
488 |
except requests.exceptions.ConnectTimeout:
|
489 |
return SuccessResponse(status="False", randydev={"message": contact_support})
|
@@ -494,27 +495,17 @@ Contact Support @xtdevs
|
|
494 |
def free_chatgpt4_beta(item: ChatgptCustom):
|
495 |
try:
|
496 |
response = RendyDevChat(item.query).get_response_beta(joke=True)
|
497 |
-
return SuccessResponse(
|
498 |
-
status="True",
|
499 |
-
randydev={
|
500 |
-
"message": response
|
501 |
-
}
|
502 |
-
)
|
503 |
except:
|
504 |
-
return
|
505 |
|
506 |
@app.get("/ryuzaki/freechatgpt-bing", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
507 |
def free_chatgpt4_bing(item: ChatgptCustom):
|
508 |
try:
|
509 |
response = RendyDevChat(query).get_response_bing(bing=True)
|
510 |
-
return SuccessResponse(
|
511 |
-
status="True",
|
512 |
-
randydev={
|
513 |
-
"message": response
|
514 |
-
}
|
515 |
-
)
|
516 |
except:
|
517 |
-
return
|
518 |
|
519 |
@app.post("/ryuzaki/ai", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
520 |
def ryuzaki_ai(
|
@@ -528,16 +519,10 @@ def ryuzaki_ai(
|
|
528 |
first_result = response_data[0]
|
529 |
if "generated_text" in first_result:
|
530 |
message = first_result["generated_text"]
|
531 |
-
return SuccessResponse(
|
532 |
-
|
533 |
-
randydev={
|
534 |
-
"ryuzaki_text": message
|
535 |
-
}
|
536 |
-
)
|
537 |
-
return {"status": "false", "message": "Invalid response format"}
|
538 |
-
|
539 |
except Exception:
|
540 |
-
return
|
541 |
|
542 |
@app.post("/ryuzaki/opendalle")
|
543 |
def open_dalle(
|
@@ -565,7 +550,7 @@ def open_dalle(
|
|
565 |
if encoded_string:
|
566 |
return SuccessResponse(status="True", randydev={"data": encoded_string})
|
567 |
else:
|
568 |
-
return SuccessResponse(status="False", randydev={"data":
|
569 |
|
570 |
@app.post("/ryuzaki/picsart-pro")
|
571 |
def Picsart_Pro(
|
@@ -607,7 +592,7 @@ def Picsart_Pro(
|
|
607 |
if encoded_string:
|
608 |
return SuccessResponse(status="True", randydev={"data": encoded_string})
|
609 |
else:
|
610 |
-
return SuccessResponse(status="False", randydev={"data":
|
611 |
|
612 |
@app.post("/ryuzaki/anime-styled")
|
613 |
def Anime_Styled(
|
@@ -635,7 +620,7 @@ def Anime_Styled(
|
|
635 |
if encoded_string:
|
636 |
return SuccessResponse(status="True", randydev={"data": encoded_string})
|
637 |
else:
|
638 |
-
return SuccessResponse(status="False", randydev={"data":
|
639 |
|
640 |
@app.post("/ryuzaki/unsplash")
|
641 |
def image_unsplash(item: GetImageUnsplash):
|
@@ -663,12 +648,6 @@ def image_unsplash(item: GetImageUnsplash):
|
|
663 |
|
664 |
@app.post("/ryuzaki/chatgpt-model", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
665 |
def chatgpt_model(item: ChatgptModel):
|
666 |
-
contact_support = """
|
667 |
-
We are aware that AI is currently offline. This seems to be caused by the API
|
668 |
-
We are investigating and doing our best to get things back online as soon as possible. Thank you for your patience
|
669 |
-
|
670 |
-
Contact Support @xtdevs
|
671 |
-
"""
|
672 |
url = "https://lexica.qewertyy.me/models"
|
673 |
if item.is_models:
|
674 |
params = {"model_id": item.model_id, "prompt": item.query}
|
@@ -712,19 +691,16 @@ def chatgpt3_turbo(
|
|
712 |
}
|
713 |
)
|
714 |
except Exception:
|
715 |
-
return SuccessResponse(status="False", randydev={"message":
|
716 |
else:
|
717 |
url = "https://lexica.qewertyy.me/models"
|
718 |
params = {"model_id": 5, "prompt": item.query}
|
719 |
response = requests.post(url, params=params)
|
720 |
if response.status_code != 200:
|
721 |
-
return
|
722 |
check_response = response.json()
|
723 |
answer = check_response.get("content")
|
724 |
-
return SuccessResponse(
|
725 |
-
status="True",
|
726 |
-
randydev={"message": answer}
|
727 |
-
)
|
728 |
|
729 |
@app.post("/ryuzaki/chatgpt4-turbo", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
730 |
def chatgpt4_turbo(
|
@@ -748,7 +724,7 @@ def chatgpt4_turbo(
|
|
748 |
}
|
749 |
)
|
750 |
except Exception:
|
751 |
-
return SuccessResponse(status="False", randydev={"message":
|
752 |
|
753 |
@app.post("/ryuzaki/google-ai", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
754 |
def v1beta3_google_ai(
|
@@ -770,14 +746,9 @@ def v1beta3_google_ai(
|
|
770 |
answer = response_str["candidates"]
|
771 |
for results in answer:
|
772 |
message = results.get("output")
|
773 |
-
return SuccessResponse(
|
774 |
-
status="True",
|
775 |
-
randydev={
|
776 |
-
"message": message
|
777 |
-
}
|
778 |
-
)
|
779 |
except:
|
780 |
-
return
|
781 |
|
782 |
@app.post("/ryuzaki/gemini-ai-pro", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
783 |
def gemini_pro(item: GeminiPro):
|
@@ -787,12 +758,6 @@ def gemini_pro(item: GeminiPro):
|
|
787 |
|
788 |
Today is {dt.now():%A %d %B %Y %H:%M}
|
789 |
"""
|
790 |
-
contact_support = """
|
791 |
-
We are aware that AI is currently offline. This seems to be caused by the API
|
792 |
-
We are investigating and doing our best to get things back online as soon as possible. Thank you for your patience
|
793 |
-
|
794 |
-
Contact Support @xtdevs
|
795 |
-
"""
|
796 |
if item.is_multi_chat:
|
797 |
selected_api_key = ASSISTANT_GOOGLE_API_KEYS or item.gemini_api_key
|
798 |
try:
|
@@ -849,14 +814,9 @@ def v1beta2_google_ai(
|
|
849 |
answer = response_str["candidates"]
|
850 |
for results in answer:
|
851 |
message = results.get("content")
|
852 |
-
return SuccessResponse(
|
853 |
-
status="True",
|
854 |
-
randydev={
|
855 |
-
"message": message
|
856 |
-
}
|
857 |
-
)
|
858 |
except:
|
859 |
-
return
|
860 |
|
861 |
@app.post("/ryuzaki/new-monitor", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
862 |
def new_monitor(
|
@@ -1008,24 +968,19 @@ def chatbot(item: ChatBots):
|
|
1008 |
except:
|
1009 |
return {"status": "false", "message": "Error response."}
|
1010 |
|
1011 |
-
@app.get("/ryuzaki/llama", response_model=SuccessResponse, responses={422: {"model":
|
1012 |
def get_llama(item: ChatgptCustom):
|
1013 |
api_url = SOURCE_WHAT_GAY_URL
|
1014 |
params = {"query": item.query}
|
1015 |
x = requests.get(f"{api_url}/llama", params=params)
|
1016 |
if x.status_code != 200:
|
1017 |
-
return "
|
1018 |
try:
|
1019 |
y = x.json()
|
1020 |
response = y["answer"]
|
1021 |
-
return {
|
1022 |
-
"status": "true",
|
1023 |
-
"randydev":{
|
1024 |
-
"message": response
|
1025 |
-
}
|
1026 |
-
}
|
1027 |
except:
|
1028 |
-
return
|
1029 |
|
1030 |
@app.get("/ryuzaki/waifu", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
1031 |
def waifu_pics(item: WaifuPics):
|
|
|
117 |
|
118 |
app = FastAPI(docs_url=None, redoc_url="/")
|
119 |
|
120 |
+
contact_support = """
|
121 |
+
We are aware that AI is currently offline. This seems to be caused by the API
|
122 |
+
We are investigating and doing our best to get things back online as soon as possible. Thank you for your patience
|
123 |
+
|
124 |
+
Contact Support @xtdevs
|
125 |
+
"""
|
126 |
+
|
127 |
def get_all_api_keys():
|
128 |
user = collection.find({})
|
129 |
api_keys = []
|
|
|
479 |
|
480 |
@app.get("/ryuzaki/nochatgpt", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
|
481 |
def no_chatgpt(query: str=None):
|
|
|
|
|
|
|
|
|
|
|
|
|
482 |
try:
|
483 |
url = "http://216.98.10.228:5000"
|
484 |
api_method = f"{url}/gpt?ask={query}"
|
485 |
response = requests.get(api_method)
|
486 |
if response.status_code != 200:
|
487 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
488 |
return SuccessResponse(status="True", randydev={"message": response.text})
|
489 |
except requests.exceptions.ConnectTimeout:
|
490 |
return SuccessResponse(status="False", randydev={"message": contact_support})
|
|
|
495 |
def free_chatgpt4_beta(item: ChatgptCustom):
|
496 |
try:
|
497 |
response = RendyDevChat(item.query).get_response_beta(joke=True)
|
498 |
+
return SuccessResponse(status="True", randydev={"message": response})
|
|
|
|
|
|
|
|
|
|
|
499 |
except:
|
500 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
501 |
|
502 |
@app.get("/ryuzaki/freechatgpt-bing", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
503 |
def free_chatgpt4_bing(item: ChatgptCustom):
|
504 |
try:
|
505 |
response = RendyDevChat(query).get_response_bing(bing=True)
|
506 |
+
return SuccessResponse(status="True", randydev={"message": response})
|
|
|
|
|
|
|
|
|
|
|
507 |
except:
|
508 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
509 |
|
510 |
@app.post("/ryuzaki/ai", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
511 |
def ryuzaki_ai(
|
|
|
519 |
first_result = response_data[0]
|
520 |
if "generated_text" in first_result:
|
521 |
message = first_result["generated_text"]
|
522 |
+
return SuccessResponse(status="True", randydev={"ryuzaki_text": message})
|
523 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
|
|
|
|
|
|
|
|
|
|
|
|
524 |
except Exception:
|
525 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
526 |
|
527 |
@app.post("/ryuzaki/opendalle")
|
528 |
def open_dalle(
|
|
|
550 |
if encoded_string:
|
551 |
return SuccessResponse(status="True", randydev={"data": encoded_string})
|
552 |
else:
|
553 |
+
return SuccessResponse(status="False", randydev={"data": contact_support})
|
554 |
|
555 |
@app.post("/ryuzaki/picsart-pro")
|
556 |
def Picsart_Pro(
|
|
|
592 |
if encoded_string:
|
593 |
return SuccessResponse(status="True", randydev={"data": encoded_string})
|
594 |
else:
|
595 |
+
return SuccessResponse(status="False", randydev={"data": contact_support})
|
596 |
|
597 |
@app.post("/ryuzaki/anime-styled")
|
598 |
def Anime_Styled(
|
|
|
620 |
if encoded_string:
|
621 |
return SuccessResponse(status="True", randydev={"data": encoded_string})
|
622 |
else:
|
623 |
+
return SuccessResponse(status="False", randydev={"data": contact_support})
|
624 |
|
625 |
@app.post("/ryuzaki/unsplash")
|
626 |
def image_unsplash(item: GetImageUnsplash):
|
|
|
648 |
|
649 |
@app.post("/ryuzaki/chatgpt-model", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
650 |
def chatgpt_model(item: ChatgptModel):
|
|
|
|
|
|
|
|
|
|
|
|
|
651 |
url = "https://lexica.qewertyy.me/models"
|
652 |
if item.is_models:
|
653 |
params = {"model_id": item.model_id, "prompt": item.query}
|
|
|
691 |
}
|
692 |
)
|
693 |
except Exception:
|
694 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
695 |
else:
|
696 |
url = "https://lexica.qewertyy.me/models"
|
697 |
params = {"model_id": 5, "prompt": item.query}
|
698 |
response = requests.post(url, params=params)
|
699 |
if response.status_code != 200:
|
700 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
701 |
check_response = response.json()
|
702 |
answer = check_response.get("content")
|
703 |
+
return SuccessResponse(status="True", randydev={"message": answer})
|
|
|
|
|
|
|
704 |
|
705 |
@app.post("/ryuzaki/chatgpt4-turbo", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
706 |
def chatgpt4_turbo(
|
|
|
724 |
}
|
725 |
)
|
726 |
except Exception:
|
727 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
728 |
|
729 |
@app.post("/ryuzaki/google-ai", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
730 |
def v1beta3_google_ai(
|
|
|
746 |
answer = response_str["candidates"]
|
747 |
for results in answer:
|
748 |
message = results.get("output")
|
749 |
+
return SuccessResponse(status="True", randydev={"message": message})
|
|
|
|
|
|
|
|
|
|
|
750 |
except:
|
751 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
752 |
|
753 |
@app.post("/ryuzaki/gemini-ai-pro", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
754 |
def gemini_pro(item: GeminiPro):
|
|
|
758 |
|
759 |
Today is {dt.now():%A %d %B %Y %H:%M}
|
760 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
761 |
if item.is_multi_chat:
|
762 |
selected_api_key = ASSISTANT_GOOGLE_API_KEYS or item.gemini_api_key
|
763 |
try:
|
|
|
814 |
answer = response_str["candidates"]
|
815 |
for results in answer:
|
816 |
message = results.get("content")
|
817 |
+
return SuccessResponse(status="True", randydev={"message": message})
|
|
|
|
|
|
|
|
|
|
|
818 |
except:
|
819 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
820 |
|
821 |
@app.post("/ryuzaki/new-monitor", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
822 |
def new_monitor(
|
|
|
968 |
except:
|
969 |
return {"status": "false", "message": "Error response."}
|
970 |
|
971 |
+
@app.get("/ryuzaki/llama", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
|
972 |
def get_llama(item: ChatgptCustom):
|
973 |
api_url = SOURCE_WHAT_GAY_URL
|
974 |
params = {"query": item.query}
|
975 |
x = requests.get(f"{api_url}/llama", params=params)
|
976 |
if x.status_code != 200:
|
977 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
978 |
try:
|
979 |
y = x.json()
|
980 |
response = y["answer"]
|
981 |
+
return SuccessResponse(status="True", randydev={"message": response})
|
|
|
|
|
|
|
|
|
|
|
982 |
except:
|
983 |
+
return SuccessResponse(status="False", randydev={"message": contact_support})
|
984 |
|
985 |
@app.get("/ryuzaki/waifu", response_model=SuccessResponse, responses={422: {"model": ErrorStatus}})
|
986 |
def waifu_pics(item: WaifuPics):
|