# coding:utf-8

import numpy as np
import pandas as pd
from keras.models import Sequential
from keras.layers import LSTM, Dense, LSTMCell, Dropout
import matplotlib.pyplot as plt
from pprint import pprint
file = r"D:\hiicy\datasets\international-airline-passengers.csv"


def drow(file):
    df = pd.read_csv(file)
    df = df.set_index('time')
    df['passengers'].plot()
    plt.show()
def main(file):
    data = pd.read_csv(file,sep=',',usecols=[1])
    data_all = np.array(data).astype(float)  # 2维一列数据
    # data_all = np.array(data).ravel().astype(float)
    def x_seq(data_all,sequence_length=8):
        data=[]
        for i in range(len(data_all)-sequence_length-1):
            data.append(data_all[i:i+sequence_length+1])
        reshape_data = np.asarray(data,dtype='float64')
        return reshape_data
    data = x_seq(data_all)
    # print(data)
    data = (data - data.mean()) / data.std()

    #     数据切分为训练集和测试集
    split_pro = 0.8
    np.random.shuffle(data)
    x = data[:,:-1]
    y = data[:,-1]
    print(x.shape)
    split_boundary = int(data.shape[0]*split_pro)
    train_x = x[:split_boundary]
    train_y= y[:split_boundary]

    test_x = x[split_boundary:]
    test_y = y[split_boundary:]

#     形如（samples，timesteps，input_dim）的3D张量 LSTM需要的数据输入维度
    print('sahpe',train_x)
    train_x = np.reshape(train_x,(train_x.shape[0],train_x.shape[1],1))
    test_x = np.reshape(test_x,(test_x.shape[0],test_x.shape[1],1))
    # print(train_x)
    # return_sequences：默认 False。在输出序列中，返回单个 hidden state值还是返回全部time step 的 hidden state值。 False 返回单个， true 返回全部。
    model = Sequential()
    model.add(LSTM(50,
                   input_shape=(None,1),
                   return_sequences=True))
    model.add(LSTM(100,
                   return_sequences=False))
    model.add(Dropout(0.2))
    model.add(Dense(1,activation="linear"))
    model.compile(loss='mse', optimizer='rmsprop')
    model.fit()


main(file)


"""
import keras
from keras.layers import Input,Embedding,LSTM,Dense
from keras.models import Model
def twitter():
	# 主要的输入接收新闻本身，即一个整数的序列（每个整数编码了一个词）。这些整数位于1到10，000
	# 之间（即我们的字典有10，000
	# 个词）。这个序列有100个单词
	main_input=Input(shape=(100,),dtype='int32',name='main_input')
	# into a sequence of dense 512 - dimensional vectors.
	x = Embedding(output_dim=512,input_dim=10000,input_length=100)(main_input)
	lstm_out=LSTM(32)(x)
	# 额外损失
	auxiliary_output = Dense(1, activation='sigmoid', name='aux_output')(lstm_out)
	# 然后，我们将LSTM与额外的输入数据串联起来组成输入，送入模型中
	auxiliary_input = Input(shape=(5,), name='aux_input')
	x = keras.layers.concatenate([lstm_out, auxiliary_input])

	# We stack a deep densely-connected network on top
	x = Dense(64, activation='relu')(x)
	x = Dense(64, activation='relu')(x)
	x = Dense(64, activation='relu')(x)
if __name__ == "__main__":
	pass
"""



