import tensorflow as tf
from module import conv3d, maxpool3d, fc


# C3D，8*112*112*3
def c3d(input_tensor, num_classes, keep_prob=None, regularizer=None):
    """
    :param input_tensor: 输入的五维张量，形如[batch_size, depth, height, width, channels]，一般为[batch_size, 8, 112, 112, 3]
    :param num_classes: 分类数
    :param keep_prob: dropout概率
    :param regularizer: 正则化项
    :return: 未经过softmax的一维向量，统计后的模型参数量
    """
    # 第一层，卷积层
    conv1 = conv3d(input_tensor, 'conv1', output_channel=64, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1, padding='VALID')
    pool1 = maxpool3d(conv1, 'pool1', kd=1, kh=2, kw=2, dd=1, dh=2, dw=2, padding='VALID')
    # 第二层，卷积层
    conv2 = conv3d(pool1, 'conv2', output_channel=128, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1)
    pool2 = maxpool3d(conv2, 'pool2', kd=2, kh=2, kw=2, dd=2, dh=2, dw=2)
    # 第三层，卷积层
    conv3_1 = conv3d(pool2, 'conv3_1', output_channel=256, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1)
    conv3_2 = conv3d(conv3_1, 'conv3_2', output_channel=256, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1)
    pool3 = maxpool3d(conv3_2, 'pool3', kd=2, kh=2, kw=2, dd=2, dh=2, dw=2)
    # 第四层，卷积层
    conv4_1 = conv3d(pool3, 'conv4_1', output_channel=512, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1)
    conv4_2 = conv3d(conv4_1, 'conv4_2', output_channel=512, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1)
    pool4 = maxpool3d(conv4_2, 'pool4', kd=2, kh=2, kw=2, dd=2, dh=2, dw=2)
    # 第五层，卷积层
    conv5_1 = conv3d(pool4, 'conv5_1', output_channel=512, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1)
    conv5_2 = conv3d(conv5_1, 'conv5_2', output_channel=512, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1)
    pool5 = maxpool3d(conv5_2, 'pool5', kd=2, kh=2, kw=2, dd=2, dh=2, dw=2)
    # 第六层，卷积层
    conv6_1 = conv3d(pool5, 'conv6_1', output_channel=512, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1)
    conv6_2 = conv3d(conv6_1, 'conv6_2', output_channel=512, kd=3, kh=3, kw=3, dd=1, dh=1, dw=1)
    pool6 = maxpool3d(conv6_2, 'pool6', kd=2, kh=2, kw=2, dd=2, dh=2, dw=2)
    # 展开张量
    shape = pool6.get_shape()
    nodes = shape[1].value * shape[2].value * shape[3].value * shape[4].value
    reshaped = tf.reshape(pool6, [-1, nodes], name='reshape')
    # 第七层，全连接层
    fc7 = fc(reshaped, 'fc7', 4096)
    if keep_prob is not None:
        fc7 = tf.nn.dropout(fc7, keep_prob)
    # 第八层，全连接层
    fc8 = fc(fc7, 'fc8', 4096)
    if keep_prob is not None:
        fc8 = tf.nn.dropout(fc8, keep_prob)
    # 第九层，全连接映射层，保证softmax的输入输出维度一致
    logit = fc(fc8, 'out', num_classes, activation=False, regularizer=regularizer)
    return logit
