# -*- coding:utf-8 -*-

import tensorflow as tf
from utils.log_utils import log_debug

"""
DNN模块
"""

slim = tf.contrib.slim


def fc_layer(x, out_size, activation_func=tf.nn.relu):
    """
    fc,全连接层
    :param x: 输入  shape=(batch, x)
    :param out_size: 输出维度 y_size
    :param activation_func: 激活函数
    :return:
    """
    in_size = x.shape[1].value  # 特征数 x_size_1
    weights = tf.Variable(tf.truncated_normal([in_size, out_size], stddev=0.1))
    biases = tf.Variable(tf.constant(0.1, shape=[out_size]))
    h_x = tf.matmul(x, weights) + biases  # (x_size_0, x_size_1)×(x_size_1, y_size)+(1, y_size)
    if activation_func is None:
        out = h_x
    else:
        out = activation_func(h_x)
    log_debug('fc--in:{}, out:{}'.format(x.shape.as_list(), out_size), fore='b')
    return out
