File size: 1,451 Bytes
2d7efb8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
import wandb
import click
import os
import sys
import pickle
import numpy as np
from PIL import Image
import torch
from configs import paths_config, hyperparameters, global_config
from IPython.display import display
import matplotlib.pyplot as plt
from scripts.latent_editor_wrapper import LatentEditorWrapper
image_dir_name = '/home/sayantan/processed_images'
use_multi_id_training = False
global_config.device = 'cuda'
paths_config.e4e = '/home/sayantan/PTI/pretrained_models/e4e_ffhq_encode.pt'
paths_config.input_data_id = image_dir_name
paths_config.input_data_path = f'{image_dir_name}'
paths_config.stylegan2_ada_ffhq = '/home/sayantan/PTI/pretrained_models/ffhq.pkl'
paths_config.checkpoints_dir = '/home/sayantan/PTI/'
paths_config.style_clip_pretrained_mappers = '/home/sayantan/PTI/pretrained_models'
hyperparameters.use_locality_regularization = False
hyperparameters.lpips_type = 'squeeze'
from scripts.run_pti import run_PTI
@click.command()
@click.pass_context
@click.option('--rname', prompt='wandb RUN NAME', help='The name to give for the wandb run')
def tune(ctx: click.Context,rname):
runn = wandb.init(project='PTI', entity='masc', name = rname)
model_id = run_PTI(run_name='',use_wandb=True, use_multi_id_training=False)
#----------------------------------------------------------------------------
if __name__ == '__main__':
tune()
#----------------------------------------------------------------------------
|