randydev commited on
Commit
cfcc4f8
1 Parent(s): f1f2ebd

Upload fluxai.py

Browse files
Files changed (1) hide show
  1. fluxai.py +33 -20
fluxai.py CHANGED
@@ -3,17 +3,15 @@ from fastapi.responses import StreamingResponse
3
  from PIL import Image, ImageEnhance
4
  from fastapi import HTTPException
5
  import io
6
- from io import BytesIO
7
  import requests
8
  import os
9
- import base64
10
  from dotenv import load_dotenv
11
  from pydantic import BaseModel
12
  from pymongo import MongoClient
13
  from models import *
14
  from huggingface_hub import InferenceClient
15
- from fastapi import UploadFile, File
16
- from fastapi.responses import JSONResponse, FileResponse
17
  import uuid
18
  from RyuzakiLib import GeminiLatest
19
 
@@ -78,6 +76,20 @@ def deduct_tokens_gpt(user_id, amount):
78
  return False
79
 
80
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  @router.post("/akeno/mistralai", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
82
  async def mistralai_(payload: MistralAI):
83
  try:
@@ -93,7 +105,7 @@ async def mistralai_(payload: MistralAI):
93
  )
94
 
95
  @router.post("/akeno/fluxai", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
96
- async def fluxai_image(payload: FluxAI):
97
  if deduct_tokens_gpt(payload.user_id, amount=20):
98
  try:
99
  image_bytes = await schellwithflux(payload.args)
@@ -102,33 +114,34 @@ async def fluxai_image(payload: FluxAI):
102
  status="False",
103
  randydev={"error": "Failed to generate an image"}
104
  )
105
-
106
  if payload.auto_enhancer:
107
- with Image.open(BytesIO(image_bytes)) as image:
108
  enhancer = ImageEnhance.Sharpness(image)
109
  image = enhancer.enhance(1.5)
110
  enhancer = ImageEnhance.Contrast(image)
111
  image = enhancer.enhance(1.2)
112
  enhancer = ImageEnhance.Color(image)
113
  image = enhancer.enhance(1.1)
114
-
115
- enhanced_image_bytes = "akeno.jpg"
116
  image.save(enhanced_image_bytes, format="JPEG", quality=95)
117
- with open(enhanced_image_bytes, "rb") as image_file:
118
- encoded_string = base64.b64encode(image_file.read())
119
-
120
- example_test = "Explain how this picture looks like."
 
 
 
 
121
  x = GeminiLatest(api_keys=GOOGLE_API_KEY)
122
- response = x.get_response_image(example_test, enhanced_image_bytes)
123
-
124
  return SuccessResponse(
125
  status="True",
126
- randydev={"image_data": encoded_string, "caption": response}
127
  )
128
-
129
  else:
130
- return StreamingResponse(BytesIO(image_bytes), media_type="image/jpeg")
131
-
132
  except Exception as e:
133
  return SuccessResponse(
134
  status="False",
@@ -138,5 +151,5 @@ async def fluxai_image(payload: FluxAI):
138
  tokens = get_user_tokens_gpt(payload.user_id)
139
  return SuccessResponse(
140
  status="False",
141
- randydev={"error": f"Not enough tokens. Current tokens: {tokens}. Please support @xtdevs"}
142
  )
 
3
  from PIL import Image, ImageEnhance
4
  from fastapi import HTTPException
5
  import io
 
6
  import requests
7
  import os
 
8
  from dotenv import load_dotenv
9
  from pydantic import BaseModel
10
  from pymongo import MongoClient
11
  from models import *
12
  from huggingface_hub import InferenceClient
13
+ from fastapi import UploadFile
14
+ from fastapi.responses import JSONResponse
15
  import uuid
16
  from RyuzakiLib import GeminiLatest
17
 
 
76
  return False
77
 
78
 
79
+ @router.get("/akeno/gettoken")
80
+ async def get_token_with_flux(user_id: int):
81
+ tokens = get_user_tokens_gpt(user_id)
82
+ if tokens:
83
+ return SuccessResponse(
84
+ status="True",
85
+ randydev={"tokens": f"Current tokens: {tokens}."}
86
+ )
87
+ else:
88
+ return SuccessResponse(
89
+ status="False",
90
+ randydev={"tokens": f"Not enough tokens. Current tokens: {tokens}."}
91
+ )
92
+
93
  @router.post("/akeno/mistralai", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
94
  async def mistralai_(payload: MistralAI):
95
  try:
 
105
  )
106
 
107
  @router.post("/akeno/fluxai", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
108
+ async def fluxai_image(payload: FluxAI, file: UploadFile):
109
  if deduct_tokens_gpt(payload.user_id, amount=20):
110
  try:
111
  image_bytes = await schellwithflux(payload.args)
 
114
  status="False",
115
  randydev={"error": "Failed to generate an image"}
116
  )
 
117
  if payload.auto_enhancer:
118
+ with Image.open(io.BytesIO(image_bytes)) as image:
119
  enhancer = ImageEnhance.Sharpness(image)
120
  image = enhancer.enhance(1.5)
121
  enhancer = ImageEnhance.Contrast(image)
122
  image = enhancer.enhance(1.2)
123
  enhancer = ImageEnhance.Color(image)
124
  image = enhancer.enhance(1.1)
125
+ enhanced_image_bytes = io.BytesIO()
 
126
  image.save(enhanced_image_bytes, format="JPEG", quality=95)
127
+ enhanced_image_bytes.seek(0)
128
+ ext = file.filename.split(".")[-1]
129
+ unique_filename = f"{uuid.uuid4().hex}.{ext}"
130
+ file_path = os.path.join("uploads", unique_filename)
131
+ os.makedirs(os.path.dirname(file_path), exist_ok=True)
132
+ with open(file_path, "wb") as f:
133
+ f.write(enhanced_image_bytes.getvalue())
134
+ example_test = "Accurately identify the baked good in the image and provide an appropriate and recipe consistent with your analysis."
135
  x = GeminiLatest(api_keys=GOOGLE_API_KEY)
136
+ response = x.get_response_image(example_test, file_path)
137
+ url = f"https://randydev-ryuzaki-api.hf.space/{file_path}"
138
  return SuccessResponse(
139
  status="True",
140
+ randydev={"url": url, "caption": response}
141
  )
 
142
  else:
143
+ return StreamingResponse(io.BytesIO(image_bytes), media_type="image/jpeg")
144
+
145
  except Exception as e:
146
  return SuccessResponse(
147
  status="False",
 
151
  tokens = get_user_tokens_gpt(payload.user_id)
152
  return SuccessResponse(
153
  status="False",
154
+ randydev={"error": f"Not enough tokens. Current tokens: {tokens}."}
155
  )