import os.path
import mindspore
from mindspore import nn
from mindspore.train import LossMonitor
from src.deep_learning.dataloader import create_mlp_dataset
from src.deep_learning.configuration import MlpModelConfig
from src.deep_learning.networks import CellSortMlp

#
#                       _oo0oo_
#                      o8888888o
#                      88" . "88
#                      (| -_- |)
#                      0\  =  /0
#                    ___/`---'\___
#                  .' \\|     |# '.
#                 / \\|||  :  |||# \
#                / _||||| -:- |||||- \
#               |   | \\\  -  #/ |   |
#               | \_|  ''\---/''  |_/ |
#               \  .-\__  '-'  ___/-. /
#             ___'. .'  /--.--\  `. .'___
#          ."" '<  `.___\_<|>_/___.' >' "".
#         | | :  `- \`.;`\ _ /`;.`/ - ` : | |
#         \  \ `_.   \_ __\ /__ _/   .-` /  /
#     =====`-.____`.___ \_____/___.-`___.-'=====
#                       `=---='
#
#
#     ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
#               佛祖保佑         永无BUG

config = MlpModelConfig()

cancer_data_path = os.path.join(r"mlp_datasets/cancer_cell.csv")
not_caner_data_path = os.path.join(r"mlp_datasets/not_cancer_cell.csv")

train_dataset, eval_dataset = create_mlp_dataset(cancer_data_path,
                                                 not_caner_data_path)
loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=False, reduction='mean')
net = CellSortMlp()
opt = nn.Adam(net.trainable_params(), learning_rate=config.learning_rate)
model = mindspore.Model(net, loss_fn, opt, metrics={"准确率": nn.Accuracy()})

def train_and_eval():
    print("============ Starting Training ============")
    model.train(epoch=10, train_dataset=train_dataset,
                callbacks=LossMonitor(per_print_times=1),
                dataset_sink_mode=config.dataset_sink_mode)
    metrics_result = model.eval(eval_dataset)
    print(metrics_result)

    target_directory = os.path.join("mlp_checkpoints")
    if not os.path.exists(target_directory):
        os.makedirs(target_directory)
    else:
        pass
    mindspore.save_checkpoint(model.train_network, os.path.join(target_directory,  "mlp_model_checkpoint.ckpt"))
    print("============== End Training ==============")

train_and_eval()


