#训练并保存模型：
import numpy
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from sklearn.preprocessing import MinMaxScaler
import pymysql
def create_dataset(dataset, look_back=1):
    dataX, dataY = [], []
    for i in range(len(dataset) - look_back - 1):
        a = dataset[i:(i + look_back), 0]
        dataX.append(a)
        dataY.append(dataset[i + look_back, 0])
    return numpy.array(dataX), numpy.array(dataY)


# 打开数据库连接
# 注意password的密码是你刚刚设置的，port=4306是MySql默认的端口号
db = pymysql.connect(host='117.73.10.122',user='root', password='root',port=3306, db='mqtt')
print(db)
# 使用cursor()方法获取操作游标
cursor = db.cursor()
print(cursor)
# SQL 插入语句
sql = """select * from data"""
id=[]
RH=[]
temp=[]
createdAt=[]
try:
    # 执行sql语句
    cursor.execute(sql)
    results=cursor.fetchall()
    for row in results:
        RH.append(row[3])
        createdAt.append(row[1])
        id.append(row[0])
        temp.append(row[4])
    # 提交到数据库执行
    db.commit()
except:
    # 如果发生错误则回滚
    db.rollback()

# 关闭数据库连接
db.close()
arr = []
for i in range(len(temp)-1):
   arr.append([temp[i]])





scaler = MinMaxScaler(feature_range=(0, 1))
arr = scaler.fit_transform(arr)

train_size = int(len(arr) * 0.67)
test_size = len(arr) - train_size
train, test = arr[0:train_size, :], arr[train_size:len(arr), :]
#
look_back = 1
trainX, trainY = create_dataset(train, look_back)
testX, testY = create_dataset(test, look_back)

print(trainX[:2], trainY[:2])
trainX = numpy.reshape(trainX, (trainX.shape[0], look_back, trainX.shape[1]))  # （样本个数，1，输入的维度）
testX = numpy.reshape(testX, (testX.shape[0], look_back, testX.shape[1]))

model = Sequential()
model.add(LSTM(120, input_shape=(trainX.shape[1], trainX.shape[2])))  # 输入维度为1，时间窗的长度为1，隐含层神经元节点个数为120
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(trainX, trainY, epochs=50, batch_size=1, verbose=2)
model.save('lstm_model.h5')
#----------------------------------------------------------------------------------------------------------
