# Some parameters n_points = 6 #@param n_steps = 300 #@param latents = torch.randn(n_points, 256) # Loop through generating the frames frames = [] for i in tqdm(range(n_steps)): p1 = max(0, int(n_points*i/n_steps)) p2 = min(n_points, int(n_points*i/n_steps)+1)%n_points # so it wraps back to 0 frac = (i-(p1*(n_steps/n_points))) / (n_steps/n_points) l = latents[p1]*(1-frac) + latents[p2]*frac im = model.G(l.unsqueeze(0)).clamp_(0., 1.) frame=(im[0].permute(1, 2, 0).detach().cpu().numpy()*255).astype(np.uint8) frames.append(frame)