import copy
from itertools import count
import os
import pickle

import click
import torch
from torch import optim
from torch.utils.data.dataloader import DataLoader
from torch.utils.tensorboard import SummaryWriter
from torchvision.utils import save_image
from tqdm import tqdm

import legacy
import dnnlib
from segmentation.utils import *

import warnings
warnings.filterwarnings('ignore')

def save_image_ds(gan_dir, seg_dir, save_dir, n_sample, batch_size=8):
    n = n_sample*128
    assert n%batch_size == 0
    if not os.path.exists(save_dir):
        os.makedirs(os.path.join(save_dir, 'images'))
        os.makedirs(os.path.join(save_dir, 'masks'))
        print(f'make output dir: {save_dir}')
    # init generator ===============================================================
    with dnnlib.util.open_url(os.path.join(gan_dir)) as f:
        snapshot_data = legacy.load_network_pkl(f)
        G = snapshot_data['G_ema'].eval().requires_grad_(False).cuda()
        del snapshot_data
    get_representation = init_get_representation(G, [4, 8, 16, 32, 64, 128, 256], 256, 'random')
    print('load generator done!')

    with open(os.path.join(seg_dir), 'rb') as f:
        Seg = pickle.load(f)['net'].eval().requires_grad_(False).cuda()
    print('load Seg done!')

    iters = n//batch_size
    print('iters', iters)
    count = 0
    # prepare image mask
    with tqdm(range(iters), initial=0, dynamic_ncols=True, smoothing=0.01, ascii=True) as pbar:
        for i in range(iters):
            with torch.no_grad():
                z = torch.randn(batch_size, G.z_dim).cuda()
                w = G.mapping(z, None)  # [N, L, C]
                image, representation = get_representation(w, False)
                # 去掉大于0的背景
                image = torch.clamp(image, -0.95, 1)
                for s in range(image.shape[0]):
                    if image[s,0].max()==image[s,0].min():
                        image[s,0] = torch.fill_(image[s,0], 0)
                    else:
                        image[s,0] = (image[s,0] - image[s,0].min())/(image[s,0].max()-image[s,0].min())
                mask = Seg(representation)

                image = F.interpolate(image, (160,160), mode='bilinear')
                mask = F.interpolate(mask, (160,160), mode='nearest')
                mask = mask.argmax(1)
                mask = np.uint8(mask.cpu().numpy())

                for s in range(image.shape[0]):
                    save_image(image[s], os.path.join(save_dir, 'images', f'{count}'.zfill(5)+'.png'))
                    Image.fromarray(mask[s]).save(os.path.join(save_dir, 'masks', f'{count}'.zfill(5)+'.png'))

                    count += 1
                    pbar.set_description(f'{count}/{n}')

if __name__ == '__main__':
    save_image_ds(
        gan_dir='save/00010-images-mirror-low_shot-kimg25000-batch32-color-translation-cutout/network-snapshot-best.pkl',
        seg_dir='save/00010-images-mirror-low_shot-kimg25000-batch32-color-translation-cutout/handmark_5/Seg_BiFPN_128s_cbTrue_[4, 8, 16, 32, 64, 128, 256].pkl',
        save_dir='data/CANDI-128-160-my',
        n_sample=103,
        batch_size=16
    )

    save_image_ds(
        gan_dir='save/00011-GAN_OASIS-mirror-low_shot-kimg25000-batch32-color-translation-cutout/network-snapshot-best.pkl',
        seg_dir='save/00011-GAN_OASIS-mirror-low_shot-kimg25000-batch32-color-translation-cutout/handmark_7/Seg_BiFPN_128s_cbTrue_[4, 8, 16, 32, 64, 128, 256].pkl',
        save_dir='data/OASIS-128-160-my',
        n_sample=357,
        batch_size=16
    )
# CUDA_VISIBLE_DEVICES=2 python save_image_ds.py