#!/usr/bin/python3
# -*- coding: utf-8 -*-
# File  : kernel.py.py
# Author: anyongjin
# Date  : 2020/9/22
import tensorflow as tf
import numpy as np
from tensorflow.keras.layers import Layer
from tensorflow.keras import backend as K

def gelu(x):
    return 0.5 * x * (1 + tf.tanh(x * 0.7978845608 * (1 + 0.044715 * x * x)))


def pooling(mat, ksize, method='max', pad=False):
    '''Non-overlapping pooling on 2D or 3D data.

    <mat>: ndarray, input array to pool.
    <ksize>: tuple of 2, kernel size in (ky, kx).
    <method>: str, 'max for max-pooling,
                   'mean' for mean-pooling.
    <pad>: bool, pad <mat> or not. If no pad, output has size
           n//f, n being <mat> size, f being kernel size.
           if pad, output has size ceil(n/f).

    Return <result>: pooled matrix.
    '''

    m, n = mat.shape[:2]
    ky, kx = ksize

    _ceil = lambda x, y: int(np.ceil(x / float(y)))

    if pad:
        ny = _ceil(m, ky)
        nx = _ceil(n, kx)
        size = (ny * ky, nx * kx) + mat.shape[2:]
        mat_pad = np.full(size, np.nan)
        mat_pad[:m, :n, ...] = mat
    else:
        ny = m // ky
        nx = n // kx
        mat_pad = mat[:ny * ky, :nx * kx, ...]

    new_shape = (ny, ky, nx, kx) + mat.shape[2:]

    if method == 'max':
        result = np.nanmax(mat_pad.reshape(new_shape), axis=(1, 3))
    else:
        result = np.nanmean(mat_pad.reshape(new_shape), axis=(1, 3))

    return result


def get_initializer(initializer_range=0.02):
    import tensorflow as tf
    return tf.keras.initializers.TruncatedNormal(stddev=initializer_range)


def l2_loss(y_true, y_pred):
    import tensorflow.keras.backend as K
    return K.mean(y_true - y_pred) ** 2


def shape_list(x):
    """Deal with dynamic shape in tensorflow cleanly."""
    static = x.shape.as_list()
    dynamic = tf.shape(x)
    return [dynamic[i] if s is None else s for i, s in enumerate(static)]


def attention_layer(inputs, name=None):
    '''
    LSTM注意力层，要求LSTM参数有return_sequences=True
    :param inputs: LSTM的输出，shape：None, time_steps, input_dim
    :param name:
    :return:
    '''
    from tensorflow.keras.layers import Dense, Permute, Multiply
    from tensorflow.keras import backend as K
    time_steps = K.int_shape(inputs)[1]
    atten = Permute((2, 1))(inputs)
    atten = Dense(time_steps, activation='sigmoid', name=f'{name}_atcore')(atten)
    atten = Permute((2, 1))(atten)
    out_with_atten = Multiply()([inputs, atten])
    return out_with_atten
