import tensorflow as tf
from module import conv2d, maxpool2d, fc


# AlexNet，224*224*3
def alexnet(input_tensor, num_classes, keep_prob=None, regularizer=None):
    """
    :param input_tensor: 输入的四维张量，形如[batch_size, height, width, channels]，一般为[batch_size, 224, 224, 3]
    :param num_classes: 分类数
    :param keep_prob: dropout概率
    :param regularizer: 正则化项
    :return: 未经过softmax的一维向量，统计后的模型参数量
    """
    # 第一层，卷积层
    conv1 = conv2d(input_tensor, 'conv1', output_channel=64, kh=11, kw=11, dh=4, dw=4)
    lrn1 = tf.nn.lrn(conv1, 4, bias=1, alpha=1e-3/9, beta=0.75, name='lrn1')    # 局部响应归一化，防止过拟合
    pool1 = maxpool2d(lrn1, 'pool1', kh=3, kw=3, dh=2, dw=2, padding='VALID')
    # 第二层，卷积层
    conv2 = conv2d(pool1, 'conv2', output_channel=192, kh=5, kw=5, dh=1, dw=1)
    lrn2 = tf.nn.lrn(conv2, 4, bias=1, alpha=1e-3/9, beta=0.75, name='lrn2')
    pool2 = maxpool2d(lrn2, 'pool2', kh=3, kw=3, dh=2, dw=2, padding='VALID')
    # 第三层，卷积层
    conv3 = conv2d(pool2, 'conv3', output_channel=384, kh=3, kw=3, dh=1, dw=1)
    # 第四层，卷积层
    conv4 = conv2d(conv3, 'conv4', output_channel=256, kh=3, kw=3, dh=1, dw=1)
    # 第五层，卷积层
    conv5 = conv2d(conv4, 'conv5', output_channel=256, kh=3, kw=3, dh=1, dw=1)
    pool5 = maxpool2d(conv5, 'pool5', kh=3, kw=3, dh=2, dw=2, padding='VALID')
    # 展开张量
    shape = pool5.get_shape()
    nodes = shape[1].value * shape[2].value * shape[3].value
    reshaped = tf.reshape(pool5, [-1, nodes], name='reshape')
    # 第六层，全连接层
    fc6 = fc(reshaped, 'fc6', 4096)
    if keep_prob is not None:
        fc6 = tf.nn.dropout(fc6, keep_prob)
    # 第七层，全连接层
    fc7 = fc(fc6, 'fc7', 4096)
    if keep_prob is not None:
        fc7 = tf.nn.dropout(fc7, keep_prob)
    # 第八层，全连接映射层，保证softmax的输入输出维度一致
    logit = fc(fc7, 'out', num_classes, activation=False, regularizer=regularizer)
    return logit
