Spaces:
Running
Running
File size: 2,576 Bytes
772d191 7e7078f 772d191 0b5b6f2 772d191 0b5b6f2 253f253 0b5b6f2 772d191 0b5b6f2 772d191 15eea4a 4e02b75 772d191 0b5b6f2 253f253 772d191 4e02b75 0b5b6f2 253f253 0b5b6f2 4e02b75 15eea4a 772d191 0b5b6f2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
import gradio as gr
import torch
import kornia as K
from kornia.geometry.transform import resize
import numpy as np
from torchvision import transforms
from torchvision.utils import make_grid
from PIL import Image
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
def read_image(img):
image_to_tensor = transforms.ToTensor()
if isinstance(img, np.ndarray):
img = Image.fromarray(img)
elif isinstance(img, str):
img = Image.open(img).convert('RGB')
img_tensor = image_to_tensor(img)
resized_image = resize(img_tensor.unsqueeze(0), (50, 50)).squeeze(0)
return resized_image
def predict(images, eps):
eps = float(eps)
images = [read_image(img) for img in images]
images = torch.stack(images, dim=0).to(device)
zca = K.enhance.ZCAWhitening(eps=eps, compute_inv=True)
zca.fit(images)
zca_images = zca(images)
grid_zca = make_grid(zca_images, nrow=3, normalize=True).cpu().numpy()
return np.transpose(grid_zca, [1, 2, 0])
def load_example_images():
return example_images
title = 'ZCA Whitening with Kornia!'
description = '''[ZCA Whitening](https://paperswithcode.com/method/zca-whitening) is an image preprocessing method that leads to a transformation of data such that the covariance matrix is the identity matrix, leading to decorrelated features:
*Note that you can upload only image files, e.g. jpg, png etc and there should be at least 2 images!*
Learn more about [ZCA Whitening and Kornia](https://kornia.readthedocs.io/en/latest/enhance.zca.html)'''
example_images = ['irises.jpg', 'roses.jpg', 'sunflower.jpg', 'violets.jpg', 'chamomile.jpg',
'tulips.jpg', 'Alstroemeria.jpg', 'Carnation.jpg', 'Orchid.jpg', 'Peony.jpg']
with gr.Blocks(title=title) as demo:
gr.Markdown(f"# {title}")
gr.Markdown(description)
with gr.Row():
input_images = gr.Files(label="Input Images")
eps_slider = gr.Slider(minimum=0.01, maximum=1, value=0.01, label="Epsilon")
output_image = gr.Image(label="ZCA Whitened Images")
submit_button = gr.Button("Apply ZCA Whitening")
submit_button.click(fn=predict, inputs=[input_images, eps_slider], outputs=output_image)
gr.Markdown("## Example Images")
example_gallery = gr.Gallery(value=example_images, label="Example Images", columns=5, height="auto")
load_examples_button = gr.Button("Load Example Images")
load_examples_button.click(fn=load_example_images, inputs=[], outputs=[input_images])
if __name__ == "__main__":
demo.launch(show_error=True) |