import json
import os
import random

import torch
import torch.nn as nn
import copy
from sql_options import insert_or_update_client_data
from utils.loaddata import load_client_data
from utils.update import BenignUpdate
import tenseal as ts
from pypbc import *
import hashlib

Hash = hashlib.sha256
from utils.masked_input import Flatten_paremeter, KeyGen, PPGen, Hhash, sign_key_gen, sign, Verify_agg_sign_2, \
    decompose_agg_w_new, \
    Hhash_equa, Verify_agg_sign_1, Restore_values_to_w, Invert_Quality

import base64
import numpy as np

class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.fc1 = nn.Linear(784, 10)  # 784，100， 10
        self.fc2 = nn.Linear(10, 10)

    def forward(self, x):
        x = torch.flatten(x, 1)
        x = self.fc1(x)
        x = torch.relu(x)
        x = self.fc2(x)
        return x


class Client:
    def __init__(self, args):
        random.seed(args["seed"])
        np.random.seed(args["seed"])
        torch.manual_seed(args["seed"])
        torch.cuda.manual_seed_all(args["seed"])
        self.args = args.copy()
        self.device = args["device"]
        self.num_clients = 0
        self.w_locals = []
        self.dataset_train = None
        # 本地训练所需
        self.net = Net().to(args["device"])
        self.net.train()
        self.w_glob = self.net.state_dict()
        self.w = self.net.state_dict()

    def set_params(self, data, client_id, connection):
        self.params = Parameters(param_string=data['params'])
        self.pairing = Pairing(self.params)
        self.G2 = G2
        self.g2 = Element(self.pairing, G2, value=data['g2'])
        self.total_params = data['total_params']
        self.len_pp2 = data['len_pp2']
        self.pp1 = [Element(self.pairing, G2, value=s) for s in data['pp1']]
        self.pp2 = [Element(self.pairing, G2, value=s) for s in data['pp2']]
        self.client_num = data['client_num']
        self.num_clients = data['num_clients']
        context_bytes = base64.b64decode(data['context'])
        self.context = ts.context_from(context_bytes)
        self.pk_sign_1 = 0
        self.pk_sign_2 = 0
        self.sk_sign_1, self.pk_sign_1 = sign_key_gen(self.G2, self.g2, self.pairing)
        self.sk_sign_2, self.pk_sign_2 = sign_key_gen(self.G2, self.g2, self.pairing)

        insert_or_update_client_data(connection, client_id, sk_sign_1=str(self.sk_sign_1),
                                     pk_sign_1=str(self.pk_sign_1),
                                     sk_sign_2=str(self.sk_sign_2), pk_sign_2=Element.__str__(self.pk_sign_2),
                                     client_num=self.client_num)

    def local_train(self, local_set, local_dataset, client_id, connection):
        # args=self.args
        print("self.device", self.device)
        sk_sign_1_idx = self.sk_sign_1
        sk_sign_2_idx = self.sk_sign_2
        idx = self.client_num - 1
        len_hv = 130 * self.num_clients
        hv_ini = [0] * len_hv
        print(idx, "执行loacl_train ", 'data', local_dataset)

        if local_set != 0:
            #根据业务需求自行替换，数据集加载、训练代码
            dataset = load_client_data(is_train=True, datafile=local_dataset, client_id=client_id)
            local = BenignUpdate(args=self.args.copy(), dataset=dataset)
            # end

            w = local.train(net=copy.deepcopy(self.net).to(self.device))
            self.net.load_state_dict(w)
            model_dir = f"./model/"
            os.makedirs(model_dir, exist_ok=True)
            model_path = os.path.join(model_dir, "model.pth")
            torch.save(self.net.state_dict(), model_path)

            w_flat = Flatten_paremeter(copy.deepcopy(w))
            if local_set == 1:
                w_flat_forged = [-num for num in w_flat]
                h1_idx = Hhash(self.pp1, w_flat_forged, self.total_params, self.pairing)
            else:
                h1_idx = Hhash(self.pp1, w_flat, self.total_params, self.pairing)
            h1_idx_str = Element.__str__(h1_idx)
            h1_idx_str_list = list(h1_idx_str)
            h1_idx_ASCII_list = [ord(mess) for mess in h1_idx_str_list]
            a = idx * 130
            b = a + 130
            replace_indices = list(range(a, b))
            for i, j in enumerate(replace_indices):
                hv_ini[j] = h1_idx_ASCII_list[i]

            w_new_idx = hv_ini + w_flat
            enc_w_new_idx = ts.bfv_vector(self.context, w_new_idx)
            local_BFV_bytes = enc_w_new_idx.serialize()

            h2_idx_pre = Hhash(self.pp2, hv_ini, self.len_pp2, self.pairing)
            h2_idx = Element.__mul__(h1_idx, h2_idx_pre)  # h2u

            if local_set == 2:
                sk_sign_1_forged, pk_sign_1_forged = sign_key_gen(self.G2, self.g2, self.pairing)
                sig1_idx = sign(sk_sign_1_forged, self.pairing, h2_idx)
            else:
                sig1_idx = sign(sk_sign_1_idx, self.pairing, h2_idx)

            local_BFV_str = str(local_BFV_bytes)
            str_enc_w_new_sigm_idx = local_BFV_str + str(sig1_idx)
            h3_idx_str = Hash(str_enc_w_new_sigm_idx.encode()).hexdigest()
            h3_idx = Element.from_hash(self.pairing, self.G2, h3_idx_str)
            if local_set == 3:
                sk_sign_2_forged, pk_sign_2_forged = sign_key_gen(self.G2, self.g2, self.pairing)
                sig2_idx = sign(sk_sign_2_forged, self.pairing, h3_idx)
            else:
                sig2_idx = sign(sk_sign_2_idx, self.pairing, h3_idx)

            self.w_locals.append(copy.deepcopy(w_flat))
        else:
            # 根据业务需求自行替换，数据集加载、训练代码
            dataset = load_client_data(is_train=True, datafile=local_dataset, client_id=client_id)
            local = BenignUpdate(args=self.args.copy(), dataset=dataset)
            #end

            w = local.train(net=copy.deepcopy(self.net).to(self.device))
            self.net.load_state_dict(w)
            model_dir = f"./model/"
            os.makedirs(model_dir, exist_ok=True)
            model_path = os.path.join(model_dir, "model.pth")
            torch.save(self.net.state_dict(), model_path)
            w_flat = Flatten_paremeter(copy.deepcopy(self.net.state_dict()))

            h1_idx = Hhash(self.pp1, w_flat, self.total_params, self.pairing)
            h1_idx_str = Element.__str__(h1_idx)
            h1_idx_str_list = list(h1_idx_str)
            h1_idx_ASCII_list = [ord(mess) for mess in h1_idx_str_list]

            a = idx * 130
            b = a + 130
            replace_indices = list(range(a, b))
            for i, j in enumerate(replace_indices):
                hv_ini[j] = h1_idx_ASCII_list[i]
            w_new_idx = hv_ini + w_flat
            enc_w_new_idx = ts.bfv_vector(self.context, w_new_idx)

            local_BFV_bytes = enc_w_new_idx.serialize()
            h2_idx_pre = Hhash(self.pp2, hv_ini, self.len_pp2, self.pairing)
            h2_idx = Element.__mul__(h1_idx, h2_idx_pre)  # h2u
            sig1_idx = sign(sk_sign_1_idx, self.pairing, h2_idx)
            local_BFV_str = str(local_BFV_bytes)
            str_enc_w_new_sigm_idx = local_BFV_str + str(sig1_idx)
            h3_idx_str = Hash(str_enc_w_new_sigm_idx.encode()).hexdigest()
            h3_idx = Element.from_hash(self.pairing, self.G2, h3_idx_str)

            sig2_idx = sign(sk_sign_2_idx, self.pairing, h3_idx)

            self.w_locals.append(copy.deepcopy(w_flat))

        str_sig1_idx = Element.__str__(sig1_idx)
        self.str_sig1_idx = str_sig1_idx
        str_sig2_idx = Element.__str__(sig2_idx)
        local_model = json.dumps({key: value.cpu().numpy().tolist() for key, value in w.items()})
        global BFV_bytes
        BFV_bytes = local_BFV_bytes
        insert_or_update_client_data(connection, client_id, local_model=local_model, local_BFV=local_BFV_bytes,
                                     local_sign1=str_sig1_idx,
                                     local_sign2=str_sig2_idx)
        print("done")

    def client_verify(self, data=None, client_id=None, connection=None):
        verify = 1
        global_BFV_bytes = base64.b64decode(data['global_BFV_bytes'])

        self.pk_sign_1_s = [Element(self.pairing, G2, value=s) for s in data['pk_sign_1_s']]

        enc_agg_bfv_recover = ts.bfv_vector_from(self.context, global_BFV_bytes)
        decrypted_agg_w_new = enc_agg_bfv_recover.decrypt()
        decrypted_agg_w, h1_idx_list_recover = decompose_agg_w_new(decrypted_agg_w_new, self.len_pp2, self.num_clients)
        verify1 = Hhash_equa(self.pp1, decrypted_agg_w, len(decrypted_agg_w), self.pairing, self.G2,
                             h1_idx_list_recover)
        print("verify1 = ", verify1)
        str_sig1_idx_recover = data['sig1_s']

        if verify1 == 0:
            verify = 0

        elif verify1 == 1:
            sig1_s_ele_recover = []
            for i in range(0, len(str_sig1_idx_recover)):
                ele_sig1_idx_recover = Element(self.pairing, self.G2, value=str_sig1_idx_recover[i])
                sig1_s_ele_recover.append(ele_sig1_idx_recover)

            h2_idx_rec_list = []
            for i, h1_idx_recover in zip(range(self.num_clients), h1_idx_list_recover):
                len_hv = 130 * self.num_clients
                hv_ini_local = [0] * len_hv

                h1_idx_recover_list = list(h1_idx_recover)
                h1_idx_recover_ASCII = [ord(mess) for mess in h1_idx_recover_list]

                a = i * 130
                b = a + 130
                replace_indices = list(range(a, b))
                for ii, jj in enumerate(replace_indices):
                    hv_ini_local[jj] = h1_idx_recover_ASCII[ii]

                h2_idx_pre = Hhash(self.pp2, hv_ini_local, self.len_pp2, self.pairing)
                h1_idx_ele_rec = Element(self.pairing, self.G2, value=h1_idx_recover)
                h2_idx_rec = Element.__mul__(h1_idx_ele_rec, h2_idx_pre)
                h2_idx_rec_list.append(h2_idx_rec)

            verify2 = Verify_agg_sign_1(self.pairing, h2_idx_rec_list, sig1_s_ele_recover, self.pk_sign_1_s,
                                        self.num_clients, self.g2)
            print("verify2 = ", verify2)

            if verify2 == 0:
                verify = 0
            elif verify2 == 1:
                aggregated_parameter = Invert_Quality(decrypted_agg_w, self.num_clients)
                self.w = Restore_values_to_w(copy.deepcopy(self.w), decrypted_agg_w)
                self.net.load_state_dict(self.w)

                local_model = json.dumps({key: value.cpu().numpy().tolist() for key, value in self.w.items()})
                insert_or_update_client_data(connection, client_id, local_model=local_model)
                model_dir = f"./model/"
                os.makedirs(model_dir, exist_ok=True)
                model_path = os.path.join(model_dir, "model.pth")
                torch.save(self.net.state_dict(), model_path)

        return verify
