Roberta2024 commited on
Commit
4eee952
1 Parent(s): 4884347

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +97 -0
app.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+ import tensorflow as tf
4
+ from tensorflow.keras.models import Sequential
5
+ from tensorflow.keras.layers import Dense, LSTM
6
+ from sklearn.preprocessing import MinMaxScaler
7
+ from sklearn.metrics import mean_squared_error
8
+ import matplotlib.pyplot as plt
9
+ import gradio as gr
10
+
11
+ # fix random seed for reproducibility
12
+ tf.random.set_seed(7)
13
+
14
+ def train_and_predict(file, epochs):
15
+ # Load the dataset
16
+ dataframe = pd.read_csv(file.name, usecols=[1], engine='python', encoding="big5")
17
+ dataset = dataframe.values.astype('float32')
18
+
19
+ # Normalize the dataset
20
+ scaler = MinMaxScaler(feature_range=(0, 1))
21
+ dataset = scaler.fit_transform(dataset)
22
+
23
+ # Split into train and test sets
24
+ train_size = int(len(dataset) * 0.8)
25
+ train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]
26
+
27
+ # Convert an array of values into a dataset matrix
28
+ def create_dataset(dataset, look_back=1):
29
+ dataX, dataY = [], []
30
+ for i in range(len(dataset)-look_back-1):
31
+ a = dataset[i:(i+look_back), 0]
32
+ dataX.append(a)
33
+ dataY.append(dataset[i + look_back, 0])
34
+ return np.array(dataX), np.array(dataY)
35
+
36
+ # Reshape into X=t and Y=t+1
37
+ look_back = 1
38
+ trainX, trainY = create_dataset(train, look_back)
39
+ testX, testY = create_dataset(test, look_back)
40
+
41
+ trainX = np.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))
42
+ testX = np.reshape(testX, (testX.shape[0], 1, testX.shape[1]))
43
+
44
+ # Create and fit the LSTM network
45
+ model = Sequential()
46
+ model.add(LSTM(4, input_shape=(1, look_back)))
47
+ model.add(Dense(1))
48
+ model.compile(loss='mean_squared_error', optimizer='adam')
49
+ model.fit(trainX, trainY, epochs=epochs, batch_size=1, verbose=2)
50
+
51
+ # Make predictions
52
+ trainPredict = model.predict(trainX)
53
+ testPredict = model.predict(testX)
54
+
55
+ # Invert predictions
56
+ trainPredict = scaler.inverse_transform(trainPredict)
57
+ trainY = scaler.inverse_transform([trainY])
58
+ testPredict = scaler.inverse_transform(testPredict)
59
+ testY = scaler.inverse_transform([testY])
60
+
61
+ # Calculate root mean squared error
62
+ trainScore = np.sqrt(mean_squared_error(trainY[0], trainPredict[:,0]))
63
+ testScore = np.sqrt(mean_squared_error(testY[0], testPredict[:,0]))
64
+
65
+ # Plot predictions
66
+ trainPredictPlot = np.empty_like(dataset)
67
+ trainPredictPlot[:, :] = np.nan
68
+ trainPredictPlot[look_back:len(trainPredict)+look_back, :] = trainPredict
69
+
70
+ testPredictPlot = np.empty_like(dataset)
71
+ testPredictPlot[:, :] = np.nan
72
+ testPredictPlot[len(trainPredict)+(look_back*2)+1:len(dataset)-1, :] = testPredict
73
+
74
+ plt.figure(figsize=(12, 8))
75
+ plt.plot(scaler.inverse_transform(dataset), label='Original Data')
76
+ plt.plot(trainPredictPlot, label='Training Predictions', linestyle='--')
77
+ plt.plot(testPredictPlot, label='Test Predictions', linestyle='--')
78
+ plt.xlabel('Time')
79
+ plt.ylabel('Scaled Values')
80
+ plt.title('Original Data and Predictions')
81
+ plt.legend()
82
+
83
+ return (f'Train Score: {trainScore:.2f} RMSE\nTest Score: {testScore:.2f} RMSE'), plt
84
+
85
+ # Gradio interface
86
+ file_input = gr.File(label="Upload CSV File")
87
+ epochs_input = gr.Slider(minimum=1, maximum=100, value=50, label="Epochs")
88
+ output_text = gr.Textbox(label="Training and Testing RMSE Scores")
89
+ output_plot = gr.Plot(label="Original Data and Predictions")
90
+
91
+ gr.Interface(
92
+ fn=train_and_predict,
93
+ inputs=[file_input, epochs_input],
94
+ outputs=[output_text, output_plot],
95
+ title="LSTM Model for Time Series Prediction",
96
+ description="Upload a CSV file with time series data and specify the number of epochs to train the model."
97
+ ).launch()