# dynamic RNN
import numpy as np
import tensorflow as tf

n_steps = 2 #时间步数（序列长度）
n_inputs = 3  #每个样本3个特征
n_neurons = 5 #隐藏状态，神经元个数
X = tf.placeholder(tf.float32, [None, n_steps, n_inputs])  #三维数据
basic_cell = tf.contrib.rnn.BasicRNNCell(num_units=n_neurons)
# 动态RNN内部封装一个循环，根据输入，动态决定自己需要展开几次
outputs, states = tf.nn.dynamic_rnn(basic_cell, X, dtype=tf.float32)
# X_batch的大小4*2*3
X_batch = np.array([
    # t = 0　　 t = 1
    [[0, 1, 2], [9, 8, 7]], # instance 1
    [[3, 4, 5], [0, 0, 0]], # instance 2
    [[6, 7, 8], [6, 5, 4]], # instance 3
    [[9, 0, 1], [3, 2, 1]], # instance 4
])
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    outputs_val, states_val = sess.run([outputs, states], feed_dict={X: X_batch})
    print('outputs_val')
    print(outputs_val) #(4,2,5)
    print('states_val')
    print(states_val) #(4,5)
    print('outputs_val[:,-1]')
    print(outputs_val[:,-1]) #(4,5)
    print(outputs_val[-1])

'''
outputs_val
[[[-0.9029721  -0.34042302  0.69175494  0.79234093 -0.9175828 ]
  [-1.         -0.9951505  -0.98185223  0.99999654 -0.99999976]]

 [[-0.9999681  -0.91501606  0.3578671   0.9980062  -0.9998628 ]
  [-0.71020603  0.2167271   0.30062205  0.7996906   0.60600686]]

 [[-1.         -0.9920274  -0.10208116  0.99998283 -0.99999976]
  [-0.99999964 -0.9677378  -0.8702748   0.99989444 -0.99909204]]

 [[-0.9999333   0.83014137 -0.9993158   0.6591327  -0.83511984]
  [-0.99951136 -0.97164816 -0.9379803   0.8951973  -0.86094856]]]
states_val
[[-1.         -0.9951505  -0.98185223  0.99999654 -0.99999976]
 [-0.71020603  0.2167271   0.30062205  0.7996906   0.60600686]
 [-0.99999964 -0.9677378  -0.8702748   0.99989444 -0.99909204]
 [-0.99951136 -0.97164816 -0.9379803   0.8951973  -0.86094856]]
outputs_val[:,-1]
[[-1.         -0.9951505  -0.98185223  0.99999654 -0.99999976]
 [-0.71020603  0.2167271   0.30062205  0.7996906   0.60600686]
 [-0.99999964 -0.9677378  -0.8702748   0.99989444 -0.99909204]
 [-0.99951136 -0.97164816 -0.9379803   0.8951973  -0.86094856]]
'''
