import keras.backend as K
import tensorflow as tf


# @staticmethod
# def encode(x):
#
#     x_mean = K.mean(x, axis=1, keepdims=False)
#     x_out = K.reshape(x_mean, shape=(-1, 4))
#
#     return x_out
#
# # @staticmethod
# def slice(x, i):
#
#     return x[i, :]


def transpose(x):
    '''将第一维和第2维对调'''

    return K.permute_dimensions(x, [0, 2, 1])

def slice(x,i):
    return x[:,i]


def encode(x):
    x_mean = K.mean(x, axis=1, keepdims=False)
    # x_out = K.reshape(x_mean, shape=(-1,x_mean.ndim(-1)//4, 4))

    return x_mean


def softmax(x, axis=1):
    """Softmax activation function.
    # Arguments
        x : Tensor.
        axis: Integer, axis along which the softmax normalization is applied.
    # Returns
        Tensor, output of softmax transformation.
    # Raises
        ValueError: In case `dim(x) == 1`.
    """
    ndim = K.ndim(x)
    if ndim == 2:
        return K.softmax(x)
    elif ndim > 2:
        e = K.exp(x - K.max(x, axis=axis, keepdims=True))
        s = K.sum(e, axis=axis, keepdims=True)
        return e / s
    else:
        raise ValueError('Cannot apply softmax to a tensor that is 1D')

