from tensorflow import keras
import numpy as np
from python_ai.common.xcommon import *
import pickle
from python_ai.category.NumpyNet.utils import *

# load data
sep('Load data')
(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()

sep('Process data')
n_train = len(x_train)
n_test = len(x_test)

# shuffle data
x_train, y_train = shuffle(x_train, y_train)
x_test, y_test = shuffle(x_test, y_test)

# shape
shape_ = x_test.shape[1:]

sep('Split test data into test and val')
n_test = n_val = int(np.ceil(n_test * 0.5))
print('n_train, n_val, n_test', n_train, n_val, n_test)
x_val, x_test = np.split(x_test, [n_val])
y_val, y_test = np.split(y_test, [n_val])
print('x_train', x_train.shape)
print('x_val', x_val.shape)
print('x_test', x_test.shape)
print('y_train', y_train.shape)
print('y_val', y_val.shape)
print('y_test', y_test.shape)

data_dict = dict(
    x_train=x_train,
    x_val=x_val,
    x_test=x_test,
    y_train=y_train,
    y_val=y_val,
    y_test=y_test,
    shape_=shape_
)

BASE_DIR, FILE_NAME = os.path.split(__file__)
save_path = os.path.join(BASE_DIR, '_data', FILE_NAME + '.pkl')
xdir, xfilename = os.path.split(save_path)
os.makedirs(xdir, exist_ok=True)
print(f'Generate pickle file to {save_path} ...')
with open(save_path, 'wb') as f:
    pickle.dump(data_dict, f)
print('Generated.')
