import json
import  os
import pathlib
import time
from typing import Union,Any
import numpy as np

from nlp_tools.tasks.abs_task_model import ABCTaskModel
from bert4keras.backend import  search_layer
import tensorflow.keras.backend as K
import tensorflow as tf


def convert_to_saved_model(model: ABCTaskModel,
                           model_path: str,
                           version: Union[str, int] = None,
                           signatures: Any = None,
                           options: Any = None) -> None:
    """
    Export model for tensorflow serving
    Args:
        model: Target model.
        model_path: The path to which the SavedModel will be stored.
        version: The model version code, default timestamp
        signatures: Signatures to save with the SavedModel. Applicable to the
            'tf' format only. Please see the `signatures` argument in
            `tf.saved_model.save` for details.
        options: Optional `tf.saved_model.SaveOptions` object that specifies
            options for saving to SavedModel.

    """
    if not isinstance(model,ABCTaskModel):
        raise ValueError("Only supports the classification model and labeling model")
    if version is None:
        version = round(time.time())
    export_path = os.path.join(model_path,str(version))

    pathlib.Path(export_path).mkdir(exist_ok=True,parents=True)
    model.tf_model.save(export_path,save_format='tf',signatures=signatures,options=options)

    with open(os.path.join(export_path,'model_config.json'),'w') as f:
        f.write(json.dumps(model.to_dict(),indent=2,ensure_ascii=True))
        f.close()


def adversarial_training(model, embedding_name, epsilon=1):
    """给模型添加对抗训练
    其中model是需要添加对抗训练的keras模型，embedding_name
    则是model里边Embedding层的名字。要在模型compile之后使用。
    """

    if model.train_function is None:  # 如果还没有训练函数
        model.make_train_function()  # 手动make
    old_train_function = model.train_function  # 备份旧的训练函数

    # 查找Embedding层
    for output in model.outputs:
        embedding_layer = search_layer(output, embedding_name)
        if embedding_layer is not None:
            break
    if embedding_layer is None:
        raise Exception('Embedding layer not found')

    # 求Embedding梯度
    embeddings = embedding_layer.embeddings  # Embedding矩阵
    with tf.GradientTape() as gtape:
        gradients = gtape.gradient(model.compiled_loss._user_losses, [embeddings])  # Embedding梯度
        gradients = tf.zeros_like(embeddings) + gradients[0]  # 转为dense tensor

        # 封装为函数
        inputs = (
            model._feed_inputs + model._feed_targets + model._feed_sample_weights
        )  # 所有输入层
        embedding_gradients = tf.function(
            inputs=inputs,
            outputs=[gradients],
            name='embedding_gradients',
        )  # 封装为函数

    def train_function(inputs):  # 重新定义训练函数
        grads = embedding_gradients(inputs)[0]  # Embedding梯度
        delta = epsilon * grads / (np.sqrt((grads**2).sum()) + 1e-8)  # 计算扰动
        K.set_value(embeddings, K.eval(embeddings) + delta)  # 注入扰动
        outputs = old_train_function(inputs)  # 梯度下降
        K.set_value(embeddings, K.eval(embeddings) - delta)  # 删除扰动
        return outputs

    model.train_function = train_function  # 覆盖原训练函数