import os
import argparse
from tqdm import tqdm
import numpy as np
from pprint import pprint
import random

from data.dataset import build_dataset
from data.utils import read_image
from data.preprocess.transform import NearestNeighbor

def parse_args():
    parser = argparse.ArgumentParser(description='calculate mean and std of dataset')
    parser.add_argument('--datasets', type=str, help='datasets name', required=True)
    parser.add_argument('-n', '--num', type=int, help='number of images', required=True)
    parser.add_argument('--centers', type=str, help='centers path', required=True)
    args = parser.parse_args()
    return args

class TinyDataset:
    def __init__(self, dataset_names, max_size):
        self.dataset_dicts = build_dataset(*dataset_names)
        self.dataset_dicts = random.sample(self.dataset_dicts, max_size)

    def __len__(self):
        return len(self.dataset_dicts)

    def __getitem__(self, index):
        filename = self.dataset_dicts[index]['file_name']
        image = read_image(filename)
        return image

if __name__ == '__main__':
    args = parse_args()
    
    train_datasets = ["{}-train".format(dataset) for dataset in args.datasets.split(',')]
    dataset = TinyDataset(train_datasets, max_size=args.num)

    transform = NearestNeighbor(args.centers)

    history_mean = 0
    history_var = 0
    history_pixel_count = 0
    for image in tqdm(dataset):
        image = transform(image)
        h, w = image.shape[:2]
        pixel_count = h * w
        mean = image.mean((0,1))
        var = image.var((0,1))
        temp_mean = (history_pixel_count * history_mean + pixel_count * mean) / (history_pixel_count + pixel_count)
        history_var = (history_pixel_count * (history_var + (temp_mean - history_mean) ** 2) + pixel_count * (var + (temp_mean - mean) ** 2)) / (history_pixel_count + pixel_count)
        history_mean = temp_mean
        history_pixel_count += pixel_count

    print("mean:", history_mean)
    print("std:", history_var ** 0.5)
    