# -*- coding: utf-8 -*-
# @Time    : 2018/3/7 11:32
# @Author  : Deyu.Tian
# @Site    :
# @File    : config.py
# @Software: PyCharm Community Edition
from geotiff_tools import *
from config import *
import numpy as np

from sklearn.preprocessing import MinMaxScaler
from sklearn.semi_supervised import LabelSpreading
from sklearn.metrics import  confusion_matrix, classification_report

from scipy.sparse.csgraph import connected_components

np.seterr(divide='ignore', invalid='ignore')



def catgricfeaturesresponse():
    totalpoints = np.load("{}/catgri_cut.npy".format(catgriDir))
    samples = totalpoints[totalpoints[:, 1] > -999]
    print("samples:", len(samples))
    features = samples[:, ]
    print(features.shape)
    indices = np.arange(len(samples))
    test_set3 = indices[20000: 100000]

    feature4 = features[test_set3, 3]
    print("max and min: ", np.max(feature4), np.min(feature4))

    np.savetxt("{}/catgrifeature_D_noOnehot_testset3.csv".format(catgriDir), feature4, delimiter=',')




def numricfeaturesresponse():
    arr = np.loadtxt("{}/feature8_noscale_testset3.csv".format(featureDir), skiprows=1, delimiter=',', usecols=[0, 1])
    print(len(arr))
    arr = arr[arr[:, 0].argsort()]
    print(arr)
    step = -0.43
    start = 0
    for i in range(len(arr)):
        if arr[i, 0] > step:
            print(np.average(arr[start:i, 1]))
            start = i
            step = step + 0.03

def shuffedlabel2geotiff():
    samples = np.load("{}/train_X_Y_1_minmaxscale.npy".format(bufferDir))
    samples = samples[samples[:, -1].argsort()]
    shuffed_n = samples[:, 0]
    shuffed_y = np.load("{}/predictlabel_1_shuffed.npy".format(trainDir))
    arr = np.zeros((len(shuffed_n), 2))
    arr[:, 0] = shuffed_n
    arr[:, 1] = shuffed_y
    print(arr.shape)
    arr = arr[arr[:, 0].argsort()]
    image = np.full((528, 1300), -1)
    for i in range(528):
        for j in range(1300):
            if len(arr[:, 1][arr[:, 0] == i * 1300 + j]) == 0:
                image[i, j] = -1
            else:
                image[i, j] = int(arr[:, 1][arr[:, 0] == i * 1300 + j][0])

    imggt = read_tif_metadata("{}/numric_onehot_buffer.tif".format(bufferDir))
    array2rasterUTM("{}/predictlabel_1.tif".format(trainDir), imggt, image)




def label_spreading():

    samples = np.load("{}/train_X_Y_1_minmaxscale.npy".format(bufferDir))
    samples = samples[samples[:, -1].argsort()]
    # print(samples[200:400, 41])
    indices = np.arange(len(samples))

    X = samples[:, 1:41]
    print("X shape:", X.shape)
    y = samples[:, 41]
    print("y shape:" ,y.shape)

    # n_labeled_points_train = 6800
    n_labeled_points = 6914
    n_labeled_points_train = 10000
    n_labeled_points = 10646 #train_buffer_1

    unlabeled_set = np.concatenate((indices[n_labeled_points_train: ], indices[0:800]))
    test_set1 = indices[n_labeled_points_train : n_labeled_points]
    # test_set2 = indices[0:799]
    test_set3 =  indices[20000: 100000]

    y_train = np.copy(y)
    y_train[unlabeled_set] = -1

    #learn with LabelSpreading
    label_prop_model = LabelSpreading(kernel='knn', n_neighbors=18, max_iter=30, n_jobs=8)
    # label_prop_model.fit(X, y_train)

    # judge
    # predicted_labels = label_prop_model.transduction_[test_set1]
    # true_labels = y[test_set1]
    # cm = confusion_matrix(true_labels, predicted_labels, labels=label_prop_model.classes_)
    #
    # print(classification_report(true_labels, predicted_labels))
    # print("Confusion matrix")
    # print(cm)
    #
    # predicted_labels = label_prop_model.transduction_[test_set2]
    # true_labels = y[test_set2]
    # cm = confusion_matrix(true_labels, predicted_labels, labels=label_prop_model.classes_)
    #
    # print(classification_report(true_labels, predicted_labels))
    # print("Confusion matrix")
    # print(cm)


    #predict
    # y_pre_prob = label_prop_model.predict_proba(X[test_set3, :])
    # prob_of_true = y_pre_prob[:, 1]
    # prob_of_false = y_pre_prob[:, 0]

    totalpoints = np.load("{}/train_X_Y_1.npy".format(bufferDir))
    samples = totalpoints[totalpoints[:, 1] > -999]
    print("samples:", len(samples))
    features = samples[:, 1:41]
    feature8 = features[test_set3, 8]
    print("max and min: ", np.max(feature8), np.min(feature8))

    # np.savetxt("{}/prob_of_testset3_false.csv".format(featureDir), prob_of_false, delimiter=',')
    # np.savetxt("{}/prob_of_testset3_true.csv".format(featureDir), prob_of_true, delimiter=',')
    np.savetxt("{}/feature8_noscale_testset3.csv".format(featureDir), feature8, delimiter=',')



def standalization():
    totalpoints = np.load("{}/train_X_Y_1.npy".format(bufferDir))
    samples = totalpoints[totalpoints[:, 1] > -999]
    print("samples:", len(samples))
    features = samples[:, 1:41]
    print(features.shape)
    scaler = MinMaxScaler(feature_range=(-1, 1))
    print(scaler.fit(features))
    print(scaler.data_max_)
    samples[:, 1:41] = scaler.transform(features)
    print(samples.shape)
    np.save("{}/train_X_Y_1_minmaxscale.npy".format(bufferDir), samples)
    np.savetxt("{}/train_X_Y_1_minmaxscale.csv".format(bufferDir), samples, delimiter=',')


def tiff2features(X, Y):
    samples = np.zeros((528*1300, 42))
    for i in range(528):
        for j in range(1300):
            n = i * 1300 + j
            x = X[:, i, j]
            y = Y[i, j]
            samples[n, 0] = n
            samples[n, 1:41] = x
            samples[n, 41] = y

    np.save("{}/train_X_Y_1.npy".format(bufferDir), samples)
    np.savetxt("{}/train_X_Y_1.csv".format(bufferDir), samples, delimiter=',')
    pass



def catgric2array():
    X = read_tiff("{}/stacked_categri_features_cut.tif".format(catgriDir))
    samples = np.zeros((528*1300, 4))
    for i in range(528):
        for j in range(1300):
            n = i * 1300 + j
            x = X[:, i, j]
            samples[n, 0:4] = x

    np.save("{}/catgri_cut.npy".format(catgriDir), samples)
    np.savetxt("{}/catgri_cut.csv".format(catgriDir), samples, delimiter=',')
    pass


if __name__ == '__main__':
    # X = read_tiff("{}/numric_onehot_buffer.tif".format(bufferDir))
    # print(X.shape)
    # Y = read_tiff("{}/train_label_1_buffer.tif".format(bufferDir))
    # print(Y.shape)
    # tiff2features(X, Y)
    # standalization()
    # label_spreading()
    # shuffedlabel2geotiff()
    # numricfeaturesresponse()
    catgricfeaturesresponse()
    # catgric2array()