Skip to content

Commit

Permalink
setup saving list of images to aws3
Browse files Browse the repository at this point in the history
  • Loading branch information
SzabyV committed Dec 23, 2024
1 parent 40f33b5 commit 144196f
Showing 1 changed file with 82 additions and 7 deletions.
89 changes: 82 additions & 7 deletions src/rp_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@
from io import BytesIO
import shutil
from pathlib import Path
from math import pow, ceil

size_of_input_images = 0

# Time to wait between API check attempts in milliseconds
COMFY_API_AVAILABLE_INTERVAL_MS = 50
Expand Down Expand Up @@ -103,6 +106,7 @@ def check_server(url, retries=500, delay=50):


def upload_images(images):
global size_of_input_images
"""
Upload a list of base64 encoded images to the ComfyUI server using the /upload/image endpoint.
Expand All @@ -118,14 +122,15 @@ def upload_images(images):

responses = []
upload_errors = []
image_list = []

print(f"runpod-worker-comfy - image(s) upload")

for image in images:
name = image["name"]
image_data = image["image"]

blob = base64.b64decode(image_data)

# Prepare the form data
files = {
"image": (name, BytesIO(blob), "image/png"),
Expand All @@ -138,6 +143,10 @@ def upload_images(images):
upload_errors.append(f"Error uploading {name}: {response.text}")
else:
responses.append(f"Successfully uploaded {name}")
image_list.append(blob)

size_of_input_images= calculate_size_of_images(images)
print(f"Size of Input images is: {size_of_input_images}")

if upload_errors:
print(f"runpod-worker-comfy - image(s) upload with errors")
Expand Down Expand Up @@ -273,7 +282,49 @@ def process_output_images(outputs, job_id):
"status": "error",
"message": f"the image does not exist in the specified output folder: {local_image_path}",
}

def resize_image(image, wished_length):

width, height = image.size
if(width > wished_length) or (height > wished_length):
proportion = width/height
#portrait
if(proportion < 1):
new_height = wished_length
new_width = round(proportion*new_height)
else:
new_width = wished_length
new_height = round(new_width/proportion)

new_image = image.resize((new_width, new_height))
return new_image

def optimize_input_images_for_online_rendering(images,wished_length = 1024):

for image in images:
if(images is not None):
images = resize_image(image, wished_length)

def calculate_size_of_images(images):
import base64
size_of_images = 0
for image in images:
if(images is not None):

#img = image

# Save the image to an in-memory buffer as PNG
buffer = BytesIO()
image.save(buffer, format="PNG")

# Get the size in bytes
size_in_bytes = len(buffer.getvalue())

encoded_size = ceil(size_in_bytes / 3) * 4

size_of_images +=encoded_size

return size_of_images

def process_output_images_list(outputs, job_id, save_to_storage=None):
"""
Expand All @@ -293,7 +344,18 @@ def process_output_images_list(outputs, job_id, save_to_storage=None):

image_list = []
errors = []
images = []

for node_id, node_output in outputs.items():
if "images" in node_output:
images.append(node_output["images"])

size_of_output_images = calculate_size_of_images(images)
print(f"Size of Images is: {size_of_output_images}")

if(size_of_input_images + size_of_output_images >= 19.5 * pow(10,6)):
print("Returned images might be too large to be able to receive them without AWS S3")

for node_id, node_output in outputs.items():
if "images" in node_output:
for image in node_output["images"]:
Expand All @@ -308,11 +370,19 @@ def process_output_images_list(outputs, job_id, save_to_storage=None):
if os.environ.get("BUCKET_ENDPOINT_URL", False):
# Upload to AWS S3
image_url = rp_upload.upload_image(job_id, local_image_path)
image_list.append(image_url)
image_json = {
"name": image["filename"],
"image": image_url
}
image_list.append(image_json)
else:
# Encode in Base64
base64_image = base64_encode(local_image_path)
image_list.append(base64_image)
image_json = {
"name": image["filename"],
"image": base64_image
}
image_list.append(image_json)

# Save to RunPod storage if requested
if save_to_storage:
Expand All @@ -330,7 +400,7 @@ def process_output_images_list(outputs, job_id, save_to_storage=None):
if image_list:
return {
"status": "success",
"results": image_list,
"message": {"results": image_list} ,
"errors": errors if errors else None,
}
else:
Expand Down Expand Up @@ -403,9 +473,14 @@ def handler(job):
return {"error": "Max retries reached while waiting for image generation"}
except Exception as e:
return {"error": f"Error waiting for image generation: {str(e)}"}

# Get the generated image and return it as URL in an AWS bucket or as base64
images_result = process_output_images(history[prompt_id].get("outputs"), job["id"])

try:
# Get the generated image and return it as URL in an AWS bucket or as base64
images_result = process_output_images_list(history[prompt_id].get("outputs"), job["id"], save_to_storage=True)
except Exception as e:
print(f"Error while saving list of images: {str(e)}")
# Get the generated image and return it as URL in an AWS bucket or as base64
images_result = process_output_images(history[prompt_id].get("outputs"), job["id"])

result = {**images_result, "refresh_worker": REFRESH_WORKER}

Expand Down

0 comments on commit 144196f

Please sign in to comment.