RobotJelly commited on
Commit
9b2b534
1 Parent(s): 838dc8b
Files changed (1) hide show
  1. app.py +97 -0
app.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import random
3
+ import numpy as np
4
+ from tqdm import tqdm
5
+ import matplotlib.pyplot as plt
6
+
7
+ import tensorflow as tf
8
+ import tensorflow_addons as tfa
9
+ from tensorflow import keras
10
+ from tensorflow.keras import layers
11
+
12
+ from glob import glob
13
+ from PIL import Image
14
+
15
+ import gradio as gr
16
+ from huggingface_hub import from_pretrained_keras
17
+
18
+ model = from_pretrained_keras("RobotJelly/GauGAN-Image-generation")
19
+
20
+
21
+
22
+ def predict(image_file):
23
+ # print(image_file)
24
+ # img = Image.open(image_file)
25
+ # image_file = str(img)
26
+ print("image_file-->", image_file)
27
+
28
+ image_list = []
29
+
30
+ segmentation_map = image_file.replace("images", "segmentation_map").replace("jpg", "png")
31
+
32
+ labels = image_file.replace("images", "segmentation_labels").replace("jpg", "bmp")
33
+ print("labels", labels)
34
+
35
+ image_list = [segmentation_map, image_file, labels]
36
+
37
+ image = tf.image.decode_png(tf.io.read_file(image_list[1]), channels=3)
38
+ image = tf.cast(image, tf.float32) / 127.5 - 1
39
+
40
+ segmentation_file = tf.image.decode_png(tf.io.read_file(image_list[0]), channels=3)
41
+ segmentation_file = tf.cast(segmentation_file, tf.float32)/127.5 - 1
42
+
43
+ label_file = tf.image.decode_bmp(tf.io.read_file(image_list[2]), channels=0)
44
+ label_file = tf.squeeze(label_file)
45
+
46
+ image_list = [segmentation_file, image, label_file]
47
+
48
+ crop_size = tf.convert_to_tensor((256, 256))
49
+
50
+ image_shape = tf.shape(image_list[1])[:2]
51
+
52
+ margins = image_shape - crop_size
53
+
54
+ y1 = tf.random.uniform(shape=(), maxval=margins[0], dtype=tf.int32)
55
+ x1 = tf.random.uniform(shape=(), maxval=margins[1], dtype=tf.int32)
56
+ y2 = y1 + crop_size[0]
57
+ x2 = x1 + crop_size[1]
58
+
59
+ cropped_images = []
60
+ for img in image_list:
61
+ cropped_images.append(img[y1:y2, x1:x2])
62
+
63
+ final_img_list = [tf.expand_dims(cropped_images[0], axis=0), tf.expand_dims(cropped_images[1], axis=0), tf.expand_dims(tf.one_hot(cropped_images[2], 12), axis=0)]
64
+
65
+ # print(final_img_list[0].shape)
66
+ # print(final_img_list[1].shape)
67
+ # print(final_img_list[2].shape)
68
+
69
+ latent_vector = tf.random.normal(shape=(1, 256), mean=0.0, stddev=2.0)
70
+
71
+ # Generate fake images
72
+ # fake_image = tf.squeeze(model.predict([latent_vector, final_img_list[2]]), axis=0)
73
+ fake_image = model.predict([latent_vector, final_img_list[2]])
74
+
75
+ real_images = final_img_list
76
+
77
+ # return tf.squeeze(real_images[1], axis=0), fake_image
78
+ return [(real_images[0][0]+1)/2, (fake_image[0]+1)/2]
79
+
80
+ # input
81
+ input = [gr.inputs.Image(type="filepath", label="Ground Truth - Real Image")]
82
+
83
+ facades_data = []
84
+ data_dir = 'examples/'
85
+ for idx, images in enumerate(os.listdir(data_dir)):
86
+ image = os.path.join(data_dir, images)
87
+ if os.path.isfile(image) and idx < 6:
88
+ facades_data.append(image)
89
+
90
+ # output
91
+ output = [gr.outputs.Image(type="numpy", label="Mask/Segmentation used"), gr.outputs.Image(type="numpy", label="Generated - Conditioned Images")]
92
+
93
+ title = "GauGAN For Conditional Image Generation"
94
+ description = "Upload an Image or take one from examples to generate realistic images that are conditioned on cue images and segmentation maps"
95
+
96
+ gr.Interface(fn=predict, inputs = input, outputs = output, examples=facades_data, allow_flagging=False, analytics_enabled=False,
97
+ title=title, description=description, article="<center>Space By: <u><a href='https://github.com/robotjellyzone'><b>Kavya Bisht</b></a></u> \n Based on <a href='https://keras.io/examples/generative/gaugan/'><b>this notebook</b></a></center>").launch(enable_queue=True, debug=True)