#!/usr/bin/python
# -*-coding:utf-8-*-

# from gplearn.genetic import SymbolicRegressor
from gplearn_evolve.genetic import SymbolicRegressor
from sklearn.utils.random import check_random_state
import numpy as np

import graphviz

# # Ground truth
# x0 = np.arange(-1, 1, .1)
# x1 = np.arange(-1, 1, .1)
# x0, x1 = np.meshgrid(x0, x1)
# y_truth = x0**2 - x1**2 + x1 - 1
#
# ax = plt.figure().gca(projection='3d')
# ax.set_xlim(-1, 1)
# ax.set_ylim(-1, 1)
# ax.set_xticks(np.arange(-1, 1.01, .5))
# ax.set_yticks(np.arange(-1, 1.01, .5))
# surf = ax.plot_surface(x0, x1, y_truth, rstride=1, cstride=1, color='green', alpha=0.5)
# plt.show()


rng = check_random_state(0)

#
X_train = rng.uniform(-1, 1, 100).reshape(50, 2)
y_train = X_train[:, 0]**2 - X_train[:, 1]**2 + X_train[:, 1] - 1

X_test = rng.uniform(-1, 1, 100).reshape(50, 2)
y_test = X_test[:, 0]**2 - X_test[:, 1]**2 + X_test[:, 1] - 1

X_train[:10, 0] = np.nan

est_gp = SymbolicRegressor(population_size=5000,
                           generations=20, stopping_criteria=0.01,
                           p_crossover=0.7, p_subtree_mutation=0.1,
                           p_hoist_mutation=0.05, p_point_mutation=0.1,
                           max_samples=0.9, verbose=1,
                           parsimony_coefficient=0.01, random_state=0)

est_gp.fit(X_train, y_train)


print(est_gp._program)

# TODO - 输出文件
dot_data = est_gp._program.export_graphviz()
graph = graphviz.Source(dot_data)
graph.render('ex1_child_test', format='png', cleanup=True)


