import sklearn as skl

from sklearn.neural_network import MLPClassifier
#
# import numpy as np
# import pandas as pd
# import random
#
# random.seed(2016)
#
# sample_size = 1000
# sample = pd.Series(random.sample(range(-10000, 10000), sample_size))
#
# x = sample / 10000
# y = x ** 2
#
# from sklearn.neural_network import MLPRegressor
#
# mlp = MLPRegressor(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(5, 5), random_state=1)
# mlp.fit(np.array(x).reshape(-1, 1), y)
# print(mlp.n_layers_)
# print(mlp.n_iter_)
# print(mlp.loss_)
# print(mlp.out_activation_)

from sklearn.datasets import load_digits

digits = load_digits()
print(digits.data.shape)

import pylab  as pl

pl.imshow(digits.images[1],
          cmap=pl.cm.gray_r,
          interpolation='nearest')
pl.show()

import numpy as np
import random
import sklearn
from sklearn.model_selection import train_test_split

np.random.seed(2019)
x_train, x_test, y_train, y_test = train_test_split(digits.data, digits.target, test_size=0.2)

from sknn.mlp import Classifier, Layer

mlp_classifier = Classifier(
    layers=[Layer("Tanh", units=20),
            Layer("Tanh", units=30),
            Layer("Sigmoid", units=37),
            Layer("Softmax")],
    valid_size=0.25,
    random_state=2018,
    learning_rule=u'adagrad',
    learning_momentum=0.005,
    dropout_rate=0.05,
    batch_size=34,
    n_iter=100)

mlp_classifier.fit(x_train, y_train)
y_predict = mlp_classifier.predict(x_test)
from sklearn.metrics import confusion_matrix

print(confusion_matrix(y_test, y_predict))
print(mlp_classifier.score(x_test, y_test))
