import os
from copy import deepcopy
from posixpath import dirname
import numpy as np
import argparse
import innvestigate.utils as iutils
from keras.models import Model
from keras.utils import to_categorical
from keras import backend as K
import keras
import tensorflow as tf
import pandas as pd
from nmf import NMF
from functools import partial

from utils import *
from lrp import *
from transfers import *
import pdb

def get_non_negative_features(images_files, model, preprocess_input, image_size):
    imgs = []
    for image_file in images_files:
        img = preprocess_input(process_sample(
            image_file, image_size))[None, ...]
        imgs.append(img)
    imgs = np.concatenate(imgs, axis=0)
    features = []
    for x in data_loader(imgs, batch_size=16):
        feature = model([x])[0]    
        features.append(feature)
    features = np.concatenate(features, axis=0)
    return features


def solve_nmf(A, file_stream=None, cp=None):
    model = NMF(max_iter=10000, learning_rate=0.001,
                display_step=100, optimizer='adam', file_stream=file_stream)
    S, P = model.fit_transform(A, r_components=cp, initW=False, givenW=0)
    print(A)
    print(model.inverse_transform(S, P))
    if file_stream is not None:
        print('-' * 40, file=file_stream)
        print(A, file=file_stream)
        print(model.inverse_transform(S, P), file=file_stream)
        print('-' * 40, file=file_stream)
    return S, P

class nmf_solver:
    def __init__(self, c_prime, net_names):
        dir_name = "./results_imagenet/train"
        # self.concept_p = os.path.join(dir_name, "concepts", "concept_p_{}_{}.npz") 
        self.concept_p = os.path.join(dir_name, "concepts", "concept_p") 
        self.log_file = os.path.join(dir_name, "log", "{}_{}.log") 
        self.feature_path = os.path.join(dir_name, "features", "class_{}_data") 
        self.c_prime = c_prime
        self.net_names = net_names
        
        def check_files(paths):
            for path in paths:
                dir_name = os.path.dirname(path)
                if not os.path.exists(dir_name):
                    os.makedirs(dir_name)
        check_files([self.concept_p, self.log_file, self.feature_path])

        train_data = "/data/fangzhengwei/datasets/imagenet/train"
        # warning: label range from 1 to 1000 (not 0 to 999)
        _, self.label_wnid_dict = get_wnid_label_dict(path="./wnid_to_id.txt")
        self.get_trainfiles_by_label = partial(get_train_image_files_by_label, dir_name=train_data, label_wnid_dict=self.label_wnid_dict)
        # self.pd = load_val_data("val.txt")

    def get_features(self, get_features, feature_path):
        feature_path = feature_path + "_{}.npy"
        for i in range(1, 1001):
            path = feature_path.format(i)
            if os.path.exists(path):
                continue
            '''for val'''
            # images_files_list = list(self.pd[self.pd['label'] == i]['path'])
            '''for train'''
            images_files_list = self.get_trainfiles_by_label(label=str(i))
            print("extracting features on class {}, totally {} images".format(i, len(images_files_list)))
            features = get_features(images_files_list)
            print("feature shape ", features.shape)
            print("saving features in {}".format(path))
            np.save(path, features)

    
    def get_class_concept(self, net_name, cp, feature_path):
        concept_p = self.concept_p + f"_{net_name}_{cp}"
        log_path = self.log_file.format(net_name, cp)
        if os.path.exists(concept_p):
            return np.load(concept_p)
        
        file_stream = open(log_path, "w")
        concept_dict_P = {}  
        feature_path = feature_path + "_{}.npy"
        for i in range(1, 1001):
            concept_p_cp = concept_p + f"_class{i}.npy"
            if os.path.exists(concept_p_cp):
                P = np.load(concept_p_cp)
            else:
                path = feature_path.format(i)
                features = np.load(path)
                features = features[:50] 
                features = features.reshape(-1, features.shape[-1])
                print("solving class {} nmf features, features shape ".format(i), features.shape)
                _, P = solve_nmf(features, file_stream, cp)
                np.save(concept_p_cp, P)
            # update concept_dict
            concept_dict_P[str(i)] = P
            file_stream.flush()
        file_stream.close()
        np.savez(concept_p, **concept_dict_P)
        return concept_dict_P

    def solve(self):
        layers_dict = parse_json(key="nmf_layers")
        for net_name in self.net_names:            
            model, preprocess_input = load_net(net_name)
            image_size = int(model.input.shape[1])
            feature_extractor = K.function(
                [model.input],
                [model.get_layer(layers_dict[net_name]).output]
            )
            # get features for all class in imagenet
            get_features = partial(get_non_negative_features, model=feature_extractor, preprocess_input=preprocess_input, image_size=image_size)
            feature_path = self.feature_path.format(net_name)
            self.get_features(get_features, feature_path=feature_path)

            for cp in self.c_prime:
                _, _ = self.get_class_concept(net_name, cp, feature_path)

if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description='', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('start_id', type=int, help='start_id')
    parser.add_argument('end_id', type=int, help='end_id')
    parser.add_argument('gpu_id', help='GPU(s) used')
    args, _ = parser.parse_known_args()
    os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu_id

    net_names = ['ResNet50']
    c_prime = list(range(5, 51, 5))
        
    solver = nmf_solver(c_prime, net_names)
    solver.solve()