from  predigit import DIGITS,showdigit
import numpy as  np

digits = DIGITS()
#datatr,datate = digits.getSimpleData()
datatr = digits.getImgsSet()
datate = digits.getImgsSet(True)

trainlabel = digits.getImgsLabelSet()
testlabel = digits.getImgsLabelSet(True)

print("what")
import matplotlib.pyplot as plt

from sklearn.neural_network import MLPClassifier
from sklearn.model_selection import learning_curve



#use 28个参数
class MLP1:
    trainCnt =  None
    testCnt = None
    trainSet = None
    trainLabel =  None
    testSet =  None
    testLabel = None
    M = None
    scaler = None
    learning_result = None
    Layer_conf = None
    def __init__(self,trainCnt=300,testCnt=200):
        assert trainCnt<=datatr.size,"训练集不足"
        self.trainCnt = trainCnt
        self.testCnt = testCnt
        self.trainSet = datatr[0:trainCnt]
        self.trainLabel = trainlabel[0:trainCnt]
        self.testSet = datate[0:testCnt]
        self.testLabel = testlabel[0:testCnt]
    def train(self,Layer_conf=(28,)):
        self.Layer_conf = Layer_conf
        y = self.trainLabel
        X = self.trainSet

        M = MLPClassifier(solver='sgd',
                          activation='relu',
                          learning_rate_init=0.001,
                          power_t=0.5,
                          max_iter=200,
                          verbose=True,
                          shuffle=False,
                          early_stopping=True,
                          hidden_layer_sizes=Layer_conf)
        M.fit(X,y)

        print("layers:",M.n_layers_)
        print("classes:",M.classes_)
        print("output_cnt:",M.n_outputs_)
        print("itercnt:",M.n_iter_)
        print("loss:",M.loss_)
        print("out_fun:",M.out_activation_)
        print("params:",M.get_params())

        self.M = M
    def learning_curve(self):
        X = self.trainSet
        y = self.trainLabel
        M = MLPClassifier(solver='sgd',
                          activation='relu',
                          learning_rate_init=0.001,
                          power_t=0.5,
                          max_iter=200,
                          verbose=False,
                          shuffle=False,
                          early_stopping=True,
                          hidden_layer_sizes=self.Layer_conf)
        self.learning_result = learning_curve(M, X, y)

    def test(self):
        assert  self.M != None,"without model"

        TX = self.testSet
        Ty = self.testLabel
        print("test_score:",self.M.score(TX,Ty))

#use 28个参数
class MLP2(MLP1):
    def __init__(self,t_c,test_cnt):
        super().__init__(t_c,test_cnt)
    def train(self,Layer_conf=(14,2)):
        super().train(Layer_conf)
def plot_learning_curve(m
                        ,title='learning_curve'
                        ):

    plt.figure()
    plt.title(title)
    plt.xlabel("samples")
    plt.ylabel("Score")
    train_sizes, train_scores, test_scores = m.learning_result
    train_scores_mean = np.mean(train_scores, axis=1)
    train_scores_std = np.std(train_scores, axis=1)
    test_scores_mean = np.mean(test_scores, axis=1)
    test_scores_std = np.std(test_scores, axis=1)
    plt.grid()
    plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
                     train_scores_mean + train_scores_std, alpha=0.1,
                     color="r")
    plt.fill_between(train_sizes, test_scores_mean - test_scores_std,
                     test_scores_mean + test_scores_std, alpha=0.1, color="g")
    plt.plot(train_sizes, train_scores_mean, 'o-', color="r",
             label="Training score")
    plt.plot(train_sizes, test_scores_mean, 'o-', color="g",
             label="Cross-validation score")

    plt.legend(loc="best")

    plt.show()
