text stringlengths 0 4.99k |
|---|
# Get the images and the rays. |
(images, rays) = inputs |
(rays_flat, t_vals) = rays |
# Get the predictions from the model. |
rgb, _ = render_rgb_depth( |
model=self.nerf_model, rays_flat=rays_flat, t_vals=t_vals, rand=True |
) |
loss = self.loss_fn(images, rgb) |
# Get the PSNR of the reconstructed images and the source images. |
psnr = tf.image.psnr(images, rgb, max_val=1.0) |
# Compute our own metrics |
self.loss_tracker.update_state(loss) |
self.psnr_metric.update_state(psnr) |
return {\"loss\": self.loss_tracker.result(), \"psnr\": self.psnr_metric.result()} |
@property |
def metrics(self): |
return [self.loss_tracker, self.psnr_metric] |
test_imgs, test_rays = next(iter(train_ds)) |
test_rays_flat, test_t_vals = test_rays |
loss_list = [] |
class TrainMonitor(keras.callbacks.Callback): |
def on_epoch_end(self, epoch, logs=None): |
loss = logs[\"loss\"] |
loss_list.append(loss) |
test_recons_images, depth_maps = render_rgb_depth( |
model=self.model.nerf_model, |
rays_flat=test_rays_flat, |
t_vals=test_t_vals, |
rand=True, |
train=False, |
) |
# Plot the rgb, depth and the loss plot. |
fig, ax = plt.subplots(nrows=1, ncols=3, figsize=(20, 5)) |
ax[0].imshow(keras.preprocessing.image.array_to_img(test_recons_images[0])) |
ax[0].set_title(f\"Predicted Image: {epoch:03d}\") |
ax[1].imshow(keras.preprocessing.image.array_to_img(depth_maps[0, ..., None])) |
ax[1].set_title(f\"Depth Map: {epoch:03d}\") |
ax[2].plot(loss_list) |
ax[2].set_xticks(np.arange(0, EPOCHS + 1, 5.0)) |
ax[2].set_title(f\"Loss Plot: {epoch:03d}\") |
fig.savefig(f\"images/{epoch:03d}.png\") |
plt.show() |
plt.close() |
num_pos = H * W * NUM_SAMPLES |
nerf_model = get_nerf_model(num_layers=8, num_pos=num_pos) |
model = NeRF(nerf_model) |
model.compile( |
optimizer=keras.optimizers.Adam(), loss_fn=keras.losses.MeanSquaredError() |
) |
# Create a directory to save the images during training. |
if not os.path.exists(\"images\"): |
os.makedirs(\"images\") |
model.fit( |
train_ds, |
validation_data=val_ds, |
batch_size=BATCH_SIZE, |
epochs=EPOCHS, |
callbacks=[TrainMonitor()], |
steps_per_epoch=split_index // BATCH_SIZE, |
) |
def create_gif(path_to_images, name_gif): |
filenames = glob.glob(path_to_images) |
filenames = sorted(filenames) |
images = [] |
for filename in tqdm(filenames): |
images.append(imageio.imread(filename)) |
kargs = {\"duration\": 0.25} |
imageio.mimsave(name_gif, images, \"GIF\", **kargs) |
create_gif(\"images/*.png\", \"training.gif\") |
Epoch 1/20 |
16/16 [==============================] - 15s 753ms/step - loss: 0.1134 - psnr: 9.7278 - val_loss: 0.0683 - val_psnr: 12.0722 |
png |
Epoch 2/20 |
16/16 [==============================] - 13s 752ms/step - loss: 0.0648 - psnr: 12.4200 - val_loss: 0.0664 - val_psnr: 12.1765 |
png |
Epoch 3/20 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.