import paddle
import pandas as pd
from tqdm import tqdm
import numpy as np

class CvFoBlock(paddle.nn.Layer):
    def __init__(self, voc_size, hidden_dim):
        super(CvFoBlock, self).__init__()
        self.em = paddle.nn.Embedding(voc_size, hidden_dim,padding_idx=-1)
        self.start = paddle.nn.Linear(hidden_dim, hidden_dim, bias_attr=False)
        self.end = paddle.nn.Linear(hidden_dim, hidden_dim, bias_attr=False)
        self.map = paddle.nn.Linear(hidden_dim, hidden_dim, bias_attr=False)
        self.llm = paddle.nn.Linear(hidden_dim * 2, hidden_dim, bias_attr=False)
        self.end_flag = paddle.nn.Linear(hidden_dim * 2, hidden_dim, bias_attr=False)

        self.layer_nor = paddle.nn.LayerNorm(hidden_dim)
        self.out_layer = paddle.nn.Linear(hidden_dim, voc_size, bias_attr=False)

    def forward(self, sx, st):
        sx = self.em(sx)
        start = self.em(st)
        all_sum_data = sx ** 2
        all_sum_data += 0.00000000001
        sx = self.layer_nor(paddle.sum(sx / all_sum_data, 1).unsqueeze(1))

        map_data = self.map(sx)

        end = self.end(sx)

        llm = self.llm(paddle.concat([start, map_data],-1))

        end_flag = self.end_flag(paddle.concat([llm, end], -1))

        out = self.out_layer(paddle.concat([end_flag, llm], 1))
        return out


def gen_data():
    data_list = []
    for j in range(2, 100):

        for i in range(0, 100, j):
            jj = i + j

            one_data = [
                ["<|aos|>", str(i), "<|bos|>", str(jj), "<|cos|>"] + [str(jjj) for jjj in range(i, ii+1)] + ["<|true|>",str(ii+1)
                                                                                                           ] if str(
                    ii) == str(jj)
                else
                ["<|aos|>", str(i), "<|bos|>", str(jj), "<|cos|>"] + [str(jjj) for jjj in range(i, ii+1)] + ["<|false|>",str(ii+1)
                                                                                                           ]
                for ii in range(i, jj )]

            data_list += one_data
    data_list = [i[:-3] + ["<|p_{}|>".format(j) for j in range(len(i[:-3] ), 107) ] +i[-3:] for i in data_list]
    voc=["<|pad|>"]+list(set(np.hstack(data_list).tolist()))
    pd.to_pickle(voc, "voc_data.pandas_pickle")
    voc_dict={k:v for v,k in enumerate(voc,1)}
    pd.to_pickle([ [voc_dict[j] for  j in i ] for i in data_list], "train_data.pandas_pickle")


def train_data():
    voc_id = pd.read_pickle("voc_data.pandas_pickle")
    net = CvFoBlock(len(voc_id)+1, 128)
    loss_func = paddle.nn.CrossEntropyLoss(ignore_index=-1)
    opt = paddle.optimizer.Adam(learning_rate=0.0001, parameters=net.parameters())
    bar = tqdm(range(700))
    batch_size = 10
    data_set = pd.read_pickle("train_data.pandas_pickle")
    np.random.shuffle(data_set)
    for epoch in bar:
        for i in range(0, len(data_set), batch_size):
            j = i + batch_size
            data = paddle.to_tensor(data_set[i:j]).astype("int64")
            label = data[:, -2:]
            sst = data[:, -3:-2]
            input_data = data[:, :-3]
            out = net(input_data,sst)
            loss = loss_func(out, label)
            acc = paddle.metric.accuracy(out.reshape([-1, len(voc_id)+1]), label.reshape([-1, 1]))
            bar.set_description("epoch___{}_loss___{:.5f}_acc__{:.5f}".format(epoch, loss.item(), acc.item()))
            opt.clear_grad()
            loss.backward()
            opt.step()

if __name__ == '__main__':
    # gen_data()
    train_data()
