# _*_ coding: utf-8 _*_
# Codes from Baidu AI, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
# pretraining of compound
# Author: MiqroEra Shibo

import paddle.optimizer
import numpy as np

from helixwrapper.models.pretrain_gnns_model import PretrainGNNModel, AttrmaskModel
from helixwrapper.datasets.zinc_dataset import load_zinc_dataset
from helixwrapper.featurizers.pretrain_gnn_featurizer import AttrmaskTransformFn, AttrmaskCollateFn
from helixwrapper.utils import load_json_config

compound_encoder_config = load_json_config("model_configs/pregnn_paper.json")
model_config = load_json_config("model_configs/pre_Attrmask.json")
compound_encoder = PretrainGNNModel(compound_encoder_config)
model = AttrmaskModel(model_config, compound_encoder)
opt = paddle.optimizer.Adam(0.001, parameters=model.parameters())

# load dataset
dataset = load_zinc_dataset("./dataset/chem_dataset_small/zinc_standard_agent/")

transform_fn = AttrmaskTransformFn()
dataset.transform(transform_fn, num_workers=4)

# start training
def train(model, dataset, collate_fn, opt):
    data_gen = dataset.get_data_loader(
            batch_size=128,
            num_workers=4,
            shuffle=True,
            collate_fn=collate_fn)
    list_loss = []
    model.train()
    for graphs, masked_node_indice, masked_node_label in data_gen:
        graphs = graphs.tensor()
        masked_node_indice = paddle.to_tensor(masked_node_indice, 'int64')
        masked_node_label = paddle.to_tensor(masked_node_label, 'int64')
        loss = model(graphs, masked_node_indice, masked_node_label)
        loss.backward()
        opt.step()
        opt.clear_grad()
        list_loss.append(loss.numpy())
    return np.mean(list_loss)

collate_fn = AttrmaskCollateFn(
        atom_names=compound_encoder_config['atom_names'],
        bond_names=compound_encoder_config['bond_names'],
        mask_ratio=0.15)

for epoch_id in range(1000):
    train_loss = train(model, dataset, collate_fn, opt)
    print("epoch:%d train/loss:%s" % (epoch_id, train_loss))
paddle.save(compound_encoder.state_dict(), './model/pretrain_attrmask/compound_encoder.pdparams')

