VikramSingh178 commited on
Commit
654965f
1 Parent(s): 51c84d5

update : Add config.env to repository and update SDXL-LoRA inference pipeline and model weights

Browse files
.gitignore CHANGED
@@ -2,4 +2,4 @@
2
  /scripts/wandb
3
  variables.tf
4
  .terraform
5
-
 
2
  /scripts/wandb
3
  variables.tf
4
  .terraform
5
+ config.env
config_template.env ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ AWS_ACCESS_KEY_ID
2
+ AWS_SECRET_ACCESS_KEY
3
+ AWS_REGION
4
+ AWS_BUCKET_NAME
product_diffusion_api/routers/__pycache__/sdxl_text_to_image.cpython-310.pyc CHANGED
Binary files a/product_diffusion_api/routers/__pycache__/sdxl_text_to_image.cpython-310.pyc and b/product_diffusion_api/routers/__pycache__/sdxl_text_to_image.cpython-310.pyc differ
 
product_diffusion_api/routers/sdxl_text_to_image.py CHANGED
@@ -61,6 +61,9 @@ def pil_to_b64_json(image):
61
  b64_image = base64.b64encode(buffered.getvalue()).decode("utf-8")
62
  return {"image_id": image_id, "b64_image": b64_image}
63
 
 
 
 
64
 
65
  @lru_cache(maxsize=1)
66
  def load_pipeline(model_name, adapter_name):
@@ -203,3 +206,4 @@ async def sdxl_v0_lora_inference_batch(data: BatchInputFormat):
203
  processed_requests.append(output_json)
204
 
205
  return {"message": "Requests processed successfully", "data": processed_requests}
 
 
61
  b64_image = base64.b64encode(buffered.getvalue()).decode("utf-8")
62
  return {"image_id": image_id, "b64_image": b64_image}
63
 
64
+ def upload_pil_to_s3(image):
65
+ image
66
+
67
 
68
  @lru_cache(maxsize=1)
69
  def load_pipeline(model_name, adapter_name):
 
206
  processed_requests.append(output_json)
207
 
208
  return {"message": "Requests processed successfully", "data": processed_requests}
209
+
scripts/s3_manager.py CHANGED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import base64
2
+ import io
3
+ import boto3
4
+ from botocore.config import Config
5
+ import random
6
+ import string
7
+ from dotenv import load_dotenv
8
+
9
+ env = load_dotenv('../config.env')
10
+
11
+
12
+ class ImageService:
13
+ def _init_(self):
14
+ self.s3 = boto3.client(
15
+ "s3",
16
+ config=Config(signature_version="s3v4"),
17
+ aws_access_key_id=env.AWS_ACCESS_KEY_ID,
18
+ aws_secret_access_key=env.AWS_SECRET_ACCESS_KEY,
19
+ region_name=env.AWS_REGION,
20
+ )
21
+
22
+ def generate_signed_url(self, file_name: str, exp: int = 1800) -> str:
23
+ return self.s3.generate_presigned_url(
24
+ "get_object",
25
+ Params={"Bucket": env.AWS_BUCKET_NAME, "Key": file_name},
26
+ ExpiresIn=exp,
27
+ )
28
+
29
+ def generate_unique_file_name(self, file) -> str:
30
+ file_name = file.filename
31
+ random_string = "".join(
32
+ random.choices(string.ascii_uppercase + string.digits, k=10)
33
+ )
34
+ file_extension = file_name.split(".")[-1]
35
+ file_real_name = file_name.split(".")[0]
36
+ return f"{file_real_name}-{random_string}.{file_extension}"
37
+
38
+ def upload_file(self, file, file_name) -> str:
39
+ self.s3.upload_fileobj(file, env.AWS_BUCKET_NAME, file_name)
40
+ return file_name
41
+
42
+ def upload_base64_file(self, base64_file: str, file_name: str) -> str:
43
+ return self.upload_file(io.BytesIO(base64.b64decode(base64_file)), file_name)
44
+
45
+ def get_object(self, file_name: str, bucket: str):
46
+ try:
47
+ return self.s3.get_object(Bucket=bucket, Key=file_name)
48
+ except: # noqa: E722
49
+ return None