# -*- coding: utf-8 -*-
'''
Created on 2017年5月22日

@author: ZhuJiahui506
'''
import tensorflow as tf
from tensorflow.contrib import rnn


def bidirectional_lstm(inputs, hidden_num, step_size, embedding_size, out_weight, out_bias, layer_num=1):
    '''
    双向LSTM模型
    :param inputs: 输入数据 文本片对应的向量化表示 (numpy 3d array) shape: [batch_size, padding_size, embedding_size]
    :param hidden_num: 隐藏层数目
    :param step_size: 文本对齐长度
    :param embedding_size: 词向量维度
    :param out_weight: 权重 (numpy 2d array)
    :param out_bias: 偏置 (numpy 1d array)
    :param layer_num: 空间层数
    '''
    
    # 双向LSTM层
    lstm_fw_cell = rnn.BasicLSTMCell(hidden_num, forget_bias=1.0, state_is_tuple=True)
    lstm_bw_cell = rnn.BasicLSTMCell(hidden_num, forget_bias=1.0, state_is_tuple=True)
    
    # Dropout层
    keep_prob = 0.5
    lstm_fw_cell = rnn.DropoutWrapper(cell=lstm_fw_cell, input_keep_prob=1.0, output_keep_prob=keep_prob)
    lstm_bw_cell = rnn.DropoutWrapper(cell=lstm_bw_cell, input_keep_prob=1.0, output_keep_prob=keep_prob)
    
    # 多层双向LSTM
    cell_fw = rnn.MultiRNNCell([lstm_fw_cell]*layer_num, state_is_tuple=True)
    cell_bw = rnn.MultiRNNCell([lstm_bw_cell]*layer_num, state_is_tuple=True)
    
    # 初始状态设置(增强可扩展性 尽量不设初值)
    # initial_state_fw = cell_fw.zero_state(batch_size, tf.float32)
    # initial_state_bw = cell_bw.zero_state(batch_size, tf.float32)
    
    # 输入数据序列化
    inputs = tf.transpose(inputs, [1, 0, 2])
    inputs = tf.reshape(inputs, [-1, embedding_size])
    inputs = tf.split(inputs, step_size)
    
    # 计算双向LSTM状态
    # outputs, _, _ = rnn.static_bidirectional_rnn(cell_fw, cell_bw, inputs=inputs, initial_state_fw=initial_state_fw, initial_state_bw=initial_state_bw, dtype=tf.float32)
    outputs, _, _ = rnn.static_bidirectional_rnn(cell_fw, cell_bw, inputs=inputs, dtype=tf.float32)
    output = outputs[-1]

    return tf.matmul(output, out_weight) + out_bias
    

if __name__ == '__main__':
    pass