techysam's picture
Upload 3 files
972626e verified
import gradio as gr
import cv2
import numpy as np
from PIL import Image, ImageEnhance, ImageFilter
import io
import logging
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def enhance_brightness_contrast(image, brightness_factor, contrast_factor):
"""Enhance brightness and contrast of image"""
try:
if image is None:
return None, "Please upload an image first."
logger.info(f"Enhancing brightness ({brightness_factor}) and contrast ({contrast_factor})...")
# Convert PIL to array for OpenCV processing
img_array = np.array(image)
# Apply brightness and contrast adjustments
enhanced = cv2.convertScaleAbs(img_array, alpha=contrast_factor, beta=brightness_factor)
# Convert back to PIL
result = Image.fromarray(enhanced)
logger.info("Brightness and contrast enhancement completed successfully")
return result, f"βœ… Enhanced! Brightness: {brightness_factor}, Contrast: {contrast_factor}"
except Exception as e:
logger.error(f"Error in brightness/contrast enhancement: {e}")
return None, f"❌ Error: {str(e)}"
def enhance_saturation(image, saturation_factor):
"""Enhance saturation of image"""
try:
if image is None:
return None, "Please upload an image first."
logger.info(f"Enhancing saturation with factor: {saturation_factor}")
# Convert PIL to HSV for saturation adjustment
img_array = np.array(image)
img_hsv = cv2.cvtColor(img_array, cv2.COLOR_RGB2HSV)
# Adjust saturation
img_hsv[:, :, 1] = np.clip(img_hsv[:, :, 1] * saturation_factor, 0, 255)
# Convert back to RGB
enhanced = cv2.cvtColor(img_hsv, cv2.COLOR_HSV2RGB)
# Convert back to PIL
result = Image.fromarray(enhanced)
logger.info("Saturation enhancement completed successfully")
return result, f"βœ… Saturation enhanced with factor: {saturation_factor}"
except Exception as e:
logger.error(f"Error in saturation enhancement: {e}")
return None, f"❌ Error: {str(e)}"
def apply_sharpening(image, sharpness_factor):
"""Apply sharpening filter to image"""
try:
if image is None:
return None, "Please upload an image first."
logger.info(f"Applying sharpening with factor: {sharpness_factor}")
# Convert PIL to array
img_array = np.array(image)
# Create sharpening kernel
kernel = np.array([[-1, -1, -1],
[-1, 9, -1],
[-1, -1, -1]])
# Apply convolution
sharpened = cv2.filter2D(img_array, -1, kernel)
# Blend with original based on sharpness factor
result_array = cv2.addWeighted(img_array, 1 - sharpness_factor, sharpened, sharpness_factor, 0)
# Convert back to PIL
result = Image.fromarray(result_array)
logger.info("Sharpening completed successfully")
return result, f"βœ… Sharpening applied with factor: {sharpness_factor}"
except Exception as e:
logger.error(f"Error in sharpening: {e}")
return None, f"❌ Error: {str(e)}"
def apply_noise_reduction(image, strength):
"""Reduce noise in image"""
try:
if image is None:
return None, "Please upload an image first."
logger.info(f"Applying noise reduction with strength: {strength}")
# Convert PIL to array
img_array = np.array(image)
# Apply bilateral filter for noise reduction
# Higher strength = more aggressive filtering
d = int(15 * strength) # Diameter of pixel neighborhood
sigma_color = int(75 * strength) # Filter sigma in color space
sigma_space = int(75 * strength) # Filter sigma in coordinate space
denoised = cv2.bilateralFilter(img_array, d, sigma_color, sigma_space)
# Convert back to PIL
result = Image.fromarray(denoised)
logger.info("Noise reduction completed successfully")
return result, f"βœ… Noise reduction applied with strength: {strength}"
except Exception as e:
logger.error(f"Error in noise reduction: {e}")
return None, f"❌ Error: {str(e)}"
def apply_color_balance(image, temperature, tint):
"""Apply color balance adjustments"""
try:
if image is None:
return None, "Please upload an image first."
logger.info(f"Applying color balance - Temperature: {temperature}, Tint: {tint}")
# Convert PIL to array
img_array = np.array(image)
# Convert to LAB color space
img_lab = cv2.cvtColor(img_array, cv2.COLOR_RGB2LAB)
# Adjust temperature (affects blue-yellow balance)
img_lab[:, :, 2] = np.clip(img_lab[:, :, 2] + temperature, 0, 255)
# Adjust tint (affects green-magenta balance)
img_lab[:, :, 1] = np.clip(img_lab[:, :, 1] + tint, 0, 255)
# Convert back to RGB
result_array = cv2.cvtColor(img_lab, cv2.COLOR_LAB2RGB)
# Convert back to PIL
result = Image.fromarray(result_array)
logger.info("Color balance adjustment completed successfully")
return result, f"βœ… Color balance applied! Temperature: {temperature}, Tint: {tint}"
except Exception as e:
logger.error(f"Error in color balance: {e}")
return None, f"❌ Error: {str(e)}"
def apply_vintage_effect(image, intensity):
"""Apply vintage/sepia effect to image"""
try:
if image is None:
return None, "Please upload an image first."
logger.info(f"Applying vintage effect with intensity: {intensity}")
# Convert PIL to array
img_array = np.array(image)
# Convert to grayscale
gray = cv2.cvtColor(img_array, cv2.COLOR_RGB2GRAY)
# Create sepia effect
sepia = np.zeros_like(img_array)
sepia[:, :, 0] = gray * 0.393 + gray * 0.769 + gray * 0.189 # Red channel
sepia[:, :, 1] = gray * 0.349 + gray * 0.686 + gray * 0.168 # Green channel
sepia[:, :, 2] = gray * 0.272 + gray * 0.534 + gray * 0.131 # Blue channel
# Clip values to valid range
sepia = np.clip(sepia, 0, 255).astype(np.uint8)
# Blend with original based on intensity
result_array = cv2.addWeighted(img_array, 1 - intensity, sepia, intensity, 0)
# Convert back to PIL
result = Image.fromarray(result_array)
logger.info("Vintage effect applied successfully")
return result, f"βœ… Vintage effect applied with intensity: {intensity}"
except Exception as e:
logger.error(f"Error in vintage effect: {e}")
return None, f"❌ Error: {str(e)}"
def apply_hdr_effect(image, strength):
"""Apply HDR-like effect to image"""
try:
if image is None:
return None, "Please upload an image first."
logger.info(f"Applying HDR effect with strength: {strength}")
# Convert PIL to array
img_array = np.array(image)
# Apply tone mapping for HDR effect
# Convert to LAB color space
img_lab = cv2.cvtColor(img_array, cv2.COLOR_RGB2LAB)
# Enhance L channel (lightness)
l_channel = img_lab[:, :, 0].astype(np.float32)
# Apply adaptive histogram equalization
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
enhanced_l = clahe.apply(l_channel.astype(np.uint8))
# Blend enhanced L channel with original
enhanced_l = enhanced_l.astype(np.float32)
final_l = l_channel * (1 - strength) + enhanced_l * strength
# Update LAB image
img_lab[:, :, 0] = np.clip(final_l, 0, 255).astype(np.uint8)
# Convert back to RGB
result_array = cv2.cvtColor(img_lab, cv2.COLOR_LAB2RGB)
# Convert back to PIL
result = Image.fromarray(result_array)
logger.info("HDR effect applied successfully")
return result, f"βœ… HDR effect applied with strength: {strength}"
except Exception as e:
logger.error(f"Error in HDR effect: {e}")
return None, f"❌ Error: {str(e)}"
# Create the main interface with tabs
with gr.Blocks(title="πŸ–ΌοΈ Image Enhancement Suite", theme=gr.themes.Soft()) as demo:
gr.Markdown("# πŸ–ΌοΈ Image Enhancement Suite")
gr.Markdown("Professional image enhancement tools powered by AI and advanced computer vision algorithms. Transform your images with precision and creativity!")
with gr.Tab("πŸ’‘ Brightness & Contrast"):
gr.Markdown("## πŸ’‘ Brightness & Contrast Enhancement")
gr.Markdown("Adjust image brightness and contrast for better visibility and impact.")
with gr.Row():
with gr.Column():
input_image = gr.Image(type="pil", label="Upload Image")
brightness_slider = gr.Slider(minimum=0.1, maximum=3.0, value=1.0, step=0.1, label="Brightness Factor")
contrast_slider = gr.Slider(minimum=0.1, maximum=3.0, value=1.0, step=0.1, label="Contrast Factor")
process_btn = gr.Button("Enhance Image", variant="primary")
with gr.Column():
output_image = gr.Image(type="pil", label="Enhanced Image")
status_text = gr.Textbox(label="Status", interactive=False)
process_btn.click(
fn=enhance_brightness_contrast,
inputs=[input_image, brightness_slider, contrast_slider],
outputs=[output_image, status_text]
)
with gr.Tab("🎨 Saturation Enhancement"):
gr.Markdown("## 🎨 Saturation Enhancement")
gr.Markdown("Enhance or reduce color saturation to make images more vibrant or subtle.")
with gr.Row():
with gr.Column():
input_image_sat = gr.Image(type="pil", label="Upload Image")
saturation_slider = gr.Slider(minimum=0.0, maximum=3.0, value=1.0, step=0.1, label="Saturation Factor")
process_btn_sat = gr.Button("Enhance Saturation", variant="primary")
with gr.Column():
output_image_sat = gr.Image(type="pil", label="Enhanced Image")
status_text_sat = gr.Textbox(label="Status", interactive=False)
process_btn_sat.click(
fn=enhance_saturation,
inputs=[input_image_sat, saturation_slider],
outputs=[output_image_sat, status_text_sat]
)
with gr.Tab("πŸ”ͺ Sharpening"):
gr.Markdown("## πŸ”ͺ Image Sharpening")
gr.Markdown("Apply sharpening filters to enhance image details and clarity.")
with gr.Row():
with gr.Column():
input_image_sharp = gr.Image(type="pil", label="Upload Image")
sharpness_slider = gr.Slider(minimum=0.0, maximum=1.0, value=0.5, step=0.1, label="Sharpness Factor")
process_btn_sharp = gr.Button("Sharpen Image", variant="primary")
with gr.Column():
output_image_sharp = gr.Image(type="pil", label="Enhanced Image")
status_text_sharp = gr.Textbox(label="Status", interactive=False)
process_btn_sharp.click(
fn=apply_sharpening,
inputs=[input_image_sharp, sharpness_slider],
outputs=[output_image_sharp, status_text_sharp]
)
with gr.Tab("πŸ”‡ Noise Reduction"):
gr.Markdown("## πŸ”‡ Noise Reduction")
gr.Markdown("Reduce image noise while preserving important details.")
with gr.Row():
with gr.Column():
input_image_noise = gr.Image(type="pil", label="Upload Image")
noise_slider = gr.Slider(minimum=0.1, maximum=2.0, value=1.0, step=0.1, label="Reduction Strength")
process_btn_noise = gr.Button("Reduce Noise", variant="primary")
with gr.Column():
output_image_noise = gr.Image(type="pil", label="Enhanced Image")
status_text_noise = gr.Textbox(label="Status", interactive=False)
process_btn_noise.click(
fn=apply_noise_reduction,
inputs=[input_image_noise, noise_slider],
outputs=[output_image_noise, status_text_noise]
)
with gr.Tab("🌈 Color Balance"):
gr.Markdown("## 🌈 Color Balance")
gr.Markdown("Fine-tune color temperature and tint for perfect color balance.")
with gr.Row():
with gr.Column():
input_image_color = gr.Image(type="pil", label="Upload Image")
temperature_slider = gr.Slider(minimum=-50, maximum=50, value=0, step=1, label="Temperature (Blue-Yellow)")
tint_slider = gr.Slider(minimum=-50, maximum=50, value=0, step=1, label="Tint (Green-Magenta)")
process_btn_color = gr.Button("Adjust Colors", variant="primary")
with gr.Column():
output_image_color = gr.Image(type="pil", label="Enhanced Image")
status_text_color = gr.Textbox(label="Status", interactive=False)
process_btn_color.click(
fn=apply_color_balance,
inputs=[input_image_color, temperature_slider, tint_slider],
outputs=[output_image_color, status_text_color]
)
with gr.Tab("πŸ“· Vintage Effect"):
gr.Markdown("## πŸ“· Vintage/Sepia Effect")
gr.Markdown("Apply vintage sepia tones for a nostalgic, classic look.")
with gr.Row():
with gr.Column():
input_image_vintage = gr.Image(type="pil", label="Upload Image")
vintage_slider = gr.Slider(minimum=0.0, maximum=1.0, value=0.5, step=0.1, label="Effect Intensity")
process_btn_vintage = gr.Button("Apply Vintage Effect", variant="primary")
with gr.Column():
output_image_vintage = gr.Image(type="pil", label="Enhanced Image")
status_text_vintage = gr.Textbox(label="Status", interactive=False)
process_btn_vintage.click(
fn=apply_vintage_effect,
inputs=[input_image_vintage, vintage_slider],
outputs=[output_image_vintage, status_text_vintage]
)
with gr.Tab("✨ HDR Effect"):
gr.Markdown("## ✨ HDR Effect")
gr.Markdown("Apply HDR-like effects for enhanced dynamic range and detail.")
with gr.Row():
with gr.Column():
input_image_hdr = gr.Image(type="pil", label="Upload Image")
hdr_slider = gr.Slider(minimum=0.0, maximum=1.0, value=0.5, step=0.1, label="Effect Strength")
process_btn_hdr = gr.Button("Apply HDR Effect", variant="primary")
with gr.Column():
output_image_hdr = gr.Image(type="pil", label="Enhanced Image")
status_text_hdr = gr.Textbox(label="Status", interactive=False)
process_btn_hdr.click(
fn=apply_hdr_effect,
inputs=[input_image_hdr, hdr_slider],
outputs=[output_image_hdr, status_text_hdr]
)
# Launch the interface
if __name__ == "__main__":
demo.launch()