AlcalaDataset commited on
Commit
2a25ae9
1 Parent(s): c292915
Average_Filter.m ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ % Recursive weighted average fltering algorithm
2
+ % RSSI_After: scalar
3
+ % RSSI_Before: vector with 3 values
4
+
5
+ function [RSSI_After, F1, F2, TimeCount] = Average_Filter(RSSI_Before, F1_Before, F2_Before,n)
6
+
7
+ beta1 = 0.2;
8
+ beta2 = 0.8;
9
+ beta3 = 0.05;
10
+ beta4 = 0.15;
11
+ beta5 = 0.8;
12
+
13
+ TimeCount = n;
14
+ F1 = F1_Before;
15
+ F2 = F2_Before;
16
+
17
+ if n == 1
18
+ F1(1) = RSSI_Before(1);
19
+ F2(1) = RSSI_Before(1);
20
+ RSSI_After = RSSI_Before(1);
21
+ end
22
+
23
+ if n == 2
24
+ F1(2) = beta1*RSSI_Before(n-1) + beta2*RSSI_Before(n);
25
+ F2(2) = beta1*F1(n-1) + beta2*F1(n);
26
+ F3 = beta1*F2(n-1) + beta2*F2(n);
27
+ RSSI_After = F3;
28
+ end
29
+
30
+ if n >= 3
31
+ if n > 3
32
+ n = 3;
33
+ F1(1) = F1(2); F1(2) = F1(3);
34
+ F2(1) = F2(2); F2(2) = F2(3);
35
+ end
36
+ F1(3) = beta3*RSSI_Before(n-2) + beta4*RSSI_Before(n-1)+beta5*RSSI_Before(n);
37
+ F2(3) = beta3*F1(n-2) + beta4*F1(n-1)+beta5*F1(n);
38
+ F3 = beta3*F2(n-2) + beta4*F2(n-1)+beta5*F2(n);
39
+ RSSI_After = F3;
40
+ end
41
+ TimeCount = TimeCount + 1;
42
+ end
BiLSTM_Model_5_10fold.py ADDED
@@ -0,0 +1,388 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #############################################################
2
+ # 10 locations/ trajectory
3
+ # First location is known
4
+ # Date: 8 June 2018
5
+ #############################################################
6
+
7
+ import time
8
+ import tensorflow
9
+ # import lstm
10
+ import os
11
+ #import time
12
+ import warnings
13
+ import numpy as np
14
+ from numpy import newaxis
15
+ from keras.layers import TimeDistributed
16
+ from keras.layers.core import Dense, Activation, Dropout
17
+ from keras.layers.recurrent import LSTM
18
+ from keras.models import Sequential
19
+ # import matplotlib.pyplot as plt
20
+ from matplotlib import pyplot
21
+ import pandas as pd
22
+ import math
23
+ from pandas import DataFrame
24
+ from keras import backend as K
25
+ from keras.layers import Bidirectional
26
+
27
+ def rmse(y_true, y_pred):
28
+ return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
29
+ def err_absolute(y_true, y_pred):
30
+ err = K.sqrt(K.square(y_pred - y_true))
31
+ return err
32
+
33
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
34
+ warnings.filterwarnings("ignore") #Hide messy Numpy warnings
35
+
36
+ # Open File to write results
37
+ File1 = open('Error_BiLSTM_10_fold.csv','w')
38
+ # File2 = open('Traj_Model_5_DiffSpeed_2ms.csv','w')
39
+ # File3 = open('Loss_PredictedL_50k_NoFilter.csv','w')
40
+ # File4 = open('Output_PredictedL_50k_NoFilter.csv','w')
41
+
42
+ NumTotalTraj = 365000
43
+
44
+ # Configure the sampling set -----------------
45
+ epochs_num = 500
46
+ N_fold = 5
47
+ NumTrajTraining = 20000
48
+ NumSam_PerTraj = 1
49
+ StartingTraj = 0
50
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
51
+ NumSample = NumTrajTraining*NumSam_PerTraj # NumTraj = NumSample/NumSam_PerTraj
52
+
53
+ # Configure the validation set -----------------
54
+ NumTrajVal = 10000
55
+ StartingTrajVal = 100000
56
+
57
+ StartingValidation = StartingTrajVal*NumSam_PerTraj
58
+ NumValidation = NumTrajVal*NumSam_PerTraj
59
+
60
+ # Main Run Thread
61
+ global_start_time = time.time()
62
+ input_layer = 13 # X, Y & 11 MAC Addresses
63
+ num_rssi_reading = 11
64
+ output_layer = 2
65
+ timestep = 9
66
+ NumBatch = 512 #512
67
+ hidden_layer1 = 50
68
+ hidden_layer2 = 50
69
+
70
+ ##########################################################
71
+ ###### Test Trajectory Splitting #########################
72
+ ##########################################################
73
+ # Read Training data --------------------------------------------------
74
+ print('> Loading data... ')
75
+ df1=pd.read_csv('Input_Location_RSSI_10points_365k.csv')
76
+ df1= np.asarray(df1)
77
+
78
+ df2=pd.read_csv('Output_Location_RSSI_10points_365k.csv')
79
+ df2= np.asarray(df2)
80
+
81
+ ########################################################################
82
+ # Read Validation data --------------------------------------------------
83
+ #######################################################################
84
+ # df3= df1[StartingValidation:StartingValidation+NumValidation,:]
85
+ # input_validation = df3.reshape(len(df3),timestep,input_layer)
86
+ # print(input_validation.shape)
87
+
88
+ # df4=df2[StartingValidation:StartingValidation+NumValidation,:]
89
+ # output_validation = df4.reshape(len(df4),timestep,output_layer)
90
+ # print(output_validation.shape)
91
+
92
+ ####################################################################
93
+ # Testing data ------------------------
94
+ ####################################################################
95
+ # RSSI array
96
+ TestData_origin =pd.read_csv('Long_Traj_6July_v1_AverageFilter.csv')
97
+ print(TestData_origin.shape)
98
+ TestData_origin = np.asarray(TestData_origin)
99
+
100
+ LengthTest = len(TestData_origin)
101
+ StartTestIdx = 0
102
+ StopTestIdx = StartTestIdx + LengthTest
103
+
104
+ # print(TestData.shape)
105
+ TestData = TestData_origin[StartTestIdx:StopTestIdx, :]
106
+ # LengthTest = len(TestData)
107
+
108
+ # List of Locations
109
+ Location_origin = pd.read_csv('Long_Traj_Location.csv')
110
+ print(Location_origin.shape)
111
+ Location_origin = np.asarray(Location_origin)
112
+ Location = Location_origin[StartTestIdx:StopTestIdx, :]
113
+
114
+ print('> Data Loaded. Compiling...')
115
+
116
+ ####################################################################
117
+ # Init 9 first Location ###########################################
118
+ ####################################################################
119
+ L1 = Location[0,:]
120
+ L2 = Location[0,:]
121
+ L3 = Location[0,:]
122
+ L4 = Location[0,:]
123
+ L5 = Location[0,:]
124
+ L6 = Location[0,:]
125
+ L7 = Location[0,:]
126
+ L8 = Location[0,:]
127
+ L9 = Location[0,:]
128
+
129
+ # Take RSSI
130
+ RSSI_L1 = TestData[0,:]
131
+ RSSI_L2 = TestData[1,:]
132
+ RSSI_L3 = TestData[1,:]
133
+ RSSI_L4 = TestData[1,:]
134
+ RSSI_L5 = TestData[1,:]
135
+ RSSI_L6 = TestData[1,:]
136
+ RSSI_L7 = TestData[1,:]
137
+ RSSI_L8 = TestData[1,:]
138
+ RSSI_L9 = TestData[1,:]
139
+ RSSI_L10 = TestData[1,:]
140
+
141
+ # Build the network --------------------------------------------
142
+ model1 = Sequential()
143
+ model1.add(Bidirectional(LSTM(hidden_layer1, return_sequences=True),input_shape=(timestep, input_layer),merge_mode='concat'))
144
+ model1.add(Dropout(0.2))
145
+ model1.add(Bidirectional(LSTM(hidden_layer2, return_sequences=True),input_shape=(timestep, input_layer),merge_mode='concat'))
146
+ model1.add(LSTM(hidden_layer2,return_sequences=True))
147
+ model1.add(Dropout(0.2))
148
+ model1.add(TimeDistributed(Dense(output_layer)))
149
+ model1.summary()
150
+ start = time.time()
151
+ model1.compile(loss="mse", optimizer="adam",metrics=[rmse])
152
+
153
+ # Training --------------------------------------------------------
154
+ loss = list()
155
+ ResultPlot = DataFrame()
156
+
157
+ #### Create a copy of input training ##########################
158
+ # input_training_org = input_training
159
+ # input_validation_org = input_validation
160
+ ###############################################################
161
+ # model1.load_weights("lstm_Model_5_DiffSpeed.h5")
162
+ # StartingValidation = 0
163
+ for CountFold in xrange(1, N_fold):
164
+ # training samples -----------------------
165
+ StartingTraj = CountFold*NumTrajTraining
166
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
167
+ df1_split = df1[StartingSample:StartingSample+NumSample,:]
168
+ input_training = df1_split.reshape(len(df1_split),timestep,input_layer)
169
+ # print(input_training.shape)
170
+ input_training_org = input_training
171
+
172
+ df2_split = df2[StartingSample:StartingSample+NumSample,:]
173
+ output_training = df2_split.reshape(len(df2_split),timestep,output_layer)
174
+ # print(output_training.shape)
175
+
176
+ H5_Name = 'Bilstm_Model_5_fold_'+ str(CountFold) + '.h5'
177
+ iTimeStep = 1
178
+ iTimeStep_val = 1
179
+ for ep in xrange(epochs_num):
180
+ print("Iteration {} ----- ".format(ep))
181
+
182
+ # Fit Model
183
+ # hist = model1.fit(input_training,output_training, validation_data=(input_validation, output_validation), epochs=1, batch_size=NumBatch, verbose=1)
184
+ hist = model1.fit(input_training,output_training, epochs=1, batch_size=NumBatch, verbose=1, shuffle=False)
185
+ loss.append(hist.history['loss'][0])
186
+ # File3.write(str(hist.history['loss'][0]) + '\n') #100 write to file
187
+
188
+ # Reform the input training ---------------------------------
189
+ print("Training Predict Sample {} ... ".format(iTimeStep))
190
+ Predicted_L_Training = model1.predict(input_training)
191
+ # get the predicted locations to become the inputs of the next step
192
+ for iSample in xrange(0,len(df1_split)):
193
+ # First location: known
194
+ # Update predicted Location starting from the second location
195
+ input_training[iSample,iTimeStep,0] = Predicted_L_Training[iSample,iTimeStep-1,0]
196
+ input_training[iSample,iTimeStep,1] = Predicted_L_Training[iSample,iTimeStep-1,1]
197
+ iTimeStep = iTimeStep + 1 # Update for next time step
198
+
199
+ if iTimeStep == timestep: # reset 1 round -------
200
+ input_training = input_training_org
201
+ iTimeStep = 1
202
+ # --------------------------------------------------------------------------------------
203
+
204
+ # Reform the validation set for testing -------------------------------------------------
205
+ # Input is updated by prediction
206
+ # Output: Ideal
207
+ # print("Validation Predict ...")
208
+ # Predicted_L_Validation = model1.predict(input_validation)
209
+ # for iSample in xrange(0,len(df3)):
210
+ # input_validation[iSample,iTimeStep_val,0] = Predicted_L_Validation[iSample,iTimeStep_val-1,0]
211
+ # input_validation[iSample,iTimeStep_val,1] = Predicted_L_Validation[iSample,iTimeStep_val-1,1]
212
+ # iTimeStep_val = iTimeStep_val + 1
213
+ # print("TESTING...........")
214
+
215
+ # if iTimeStep_val == timestep: # reset 1 round -------
216
+ # input_validation = input_validation_org
217
+ # iTimeStep_val = 1
218
+ # ------------------------------------------------------------------------------------------
219
+ # Calculate the validation error ------------------------------------------------
220
+
221
+ # model1.fit(input_training,output_training, validation_split=0.2, epochs=epochs_num, batch_size=512)
222
+ model1.save_weights(H5_Name)
223
+ # print('Training duration (s) 100: ', time.time() - global_start_time)
224
+
225
+ # Testing ---------------------------------------------------------
226
+ #########################################################
227
+ # The first 3 steps: Fill up the buffer
228
+ # #######################################################
229
+ Acc_Location = np.zeros((timestep,2))
230
+ Acc_Location[0,:] = L1
231
+ L_combine = np.concatenate((L1,L2,L3,L4,L5,L6,L7,L8,L9), axis=0)
232
+ RSSI_combine = np.concatenate((RSSI_L2, RSSI_L3, RSSI_L4, RSSI_L5,RSSI_L6, RSSI_L7, RSSI_L8, RSSI_L9, RSSI_L10), axis=0)
233
+
234
+ for Step in xrange(1,timestep):
235
+
236
+ # Update the Locations & RSSIs buffer
237
+ LocationIdx = 0
238
+ RSSIdx = 0
239
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
240
+ for i in xrange(timestep):
241
+ TestingData[LocationIdx] = L_combine[i*2]
242
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
243
+ for j in xrange(len(RSSI_L2)):
244
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
245
+ LocationIdx = LocationIdx + input_layer
246
+ RSSIdx = RSSIdx + num_rssi_reading
247
+ # print(TestingData)
248
+
249
+ TestingData = TestingData.reshape(1,timestep,input_layer)
250
+ # print(TestingData)
251
+
252
+ # Prediction
253
+ Predicted_L = model1.predict(TestingData)
254
+ Acc_Location[Step,:] = Predicted_L[0,Step-1,:]
255
+ print(Acc_Location)
256
+
257
+ # Update for next step
258
+ # Init 4 first Location
259
+ IdxTemp = 0
260
+ # Update Location
261
+ for j in xrange(Step*2,len(L_combine)):
262
+ L_combine[j] = Acc_Location[Step,IdxTemp]
263
+ if IdxTemp == 0:
264
+ IdxTemp = 1
265
+ else:
266
+ IdxTemp = 0
267
+ pass
268
+
269
+ # Take RSSI
270
+ IdxTemp = 0
271
+ for j in xrange(Step*num_rssi_reading,len(RSSI_combine)):
272
+ RSSI_combine[j] = TestData[Step+1,IdxTemp]
273
+ IdxTemp = IdxTemp + 1
274
+ if IdxTemp == num_rssi_reading: # Reach the end
275
+ IdxTemp = 0
276
+
277
+ #########################################################
278
+ # AFter the buffer is full - Do the Test
279
+ # #######################################################
280
+
281
+ CountArray = np.ones(timestep)
282
+ error = np.zeros(LengthTest-1)
283
+ # Predicted_array = np.zeros((LengthTest-1,2))
284
+ Average_Err = 0
285
+
286
+ for CountTest in xrange(LengthTest-timestep):
287
+ print("Location {}------------".format(CountTest))
288
+ # Update the Locations & RSSIs buffer
289
+ LocationIdx = 0
290
+ RSSIdx = 0
291
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
292
+ for i in xrange(timestep):
293
+ TestingData[LocationIdx] = L_combine[i*2]
294
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
295
+ for j in xrange(len(RSSI_L2)):
296
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
297
+ LocationIdx = LocationIdx + input_layer
298
+ RSSIdx = RSSIdx + num_rssi_reading
299
+
300
+ TestingData = TestingData.reshape(1,timestep,input_layer)
301
+ # print(TestingData)
302
+
303
+ # Prediction
304
+ Predicted_L = model1.predict(TestingData)
305
+
306
+ # for t in xrange(timestep):
307
+ # File4.write(str(Predicted_L[0,t,0]) + ',')
308
+ # File4.write(str(Predicted_L[0,t,1])+ ',') # write to file
309
+ # File4.write('\n')
310
+
311
+ # Re-arrange Accumulated Location
312
+ for t in xrange(timestep-1):
313
+ CountArray[t] = CountArray[t+1]
314
+ Acc_Location[t,:] = Acc_Location[t+1,:]+Predicted_L[0,t,:]
315
+ CountArray[t] = CountArray[t] + 1
316
+
317
+ Acc_Location[timestep-1,:] = Predicted_L[0,timestep-1,:] # Update new Location
318
+ CountArray[timestep-1] = 1 #Update New Count
319
+ # print(Acc_Location)
320
+ # print(CountArray)
321
+ ######################################################################
322
+ ############# UPDATE LOCATION ########################################
323
+ ######################################################################
324
+
325
+ Final_L = Acc_Location[0,:]/CountArray[0]
326
+ CountArray[0] = 1
327
+ # Take correct location, compare the result
328
+ Correct_L = Location[CountTest+1,:]
329
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
330
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
331
+ Average_Err = Average_Err + error[CountTest]
332
+
333
+ File1.write(str(error[CountTest]) + ' , ') # write to file
334
+ # File2.write(str(Final_L[0]) + ',')
335
+ # File2.write(str(Final_L[1]) + '\n') # write to file
336
+ # Re-arrange L_combine
337
+ for t in xrange(timestep-1):
338
+ L_combine[t*2] = L_combine[(t+1)*2]
339
+ L_combine[t*2+1] = L_combine[(t+1)*2+1]
340
+ # Update
341
+ L_combine[(timestep-1)*2] = Predicted_L[0,timestep-1,0]
342
+ L_combine[(timestep-1)*2+1] = Predicted_L[0,timestep-1,1]
343
+ # Re-arrange RSSI_combine
344
+ if CountTest+timestep+1 < LengthTest:
345
+ for t in xrange(timestep-1):
346
+ for k in xrange(num_rssi_reading):
347
+ RSSI_combine[t*num_rssi_reading+k] = RSSI_combine[(t+1)*num_rssi_reading+k]
348
+
349
+ for k in xrange(num_rssi_reading):
350
+ RSSI_combine[(timestep-1)*num_rssi_reading+k] = TestData[CountTest+timestep+1,k]
351
+
352
+ ###################################################################
353
+ ############### The Last Locations ###############################
354
+ ###################################################################
355
+ for i in xrange(timestep-1):
356
+ Final_L = Acc_Location[i+1,:]/CountArray[i+1]
357
+ # Take correct location, compare the result
358
+ Correct_L = Location[CountTest+1,:]
359
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
360
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), error[CountTest])
361
+ Average_Err = Average_Err + error[CountTest]
362
+
363
+ File1.write(str(error[CountTest]) + ' , ') # write to file
364
+ # File2.write(str(Final_L[0]) + ',')
365
+ # File2.write(str(Final_L[1]) + '\n') # write to file
366
+ CountTest = CountTest + 1
367
+
368
+ File1.write('\n')
369
+ Average_Err = Average_Err/(LengthTest-1)
370
+ print "Average Error: ", Average_Err
371
+
372
+ Std_Err = 0
373
+ for k in xrange(LengthTest-1):
374
+ Std_Err = Std_Err + np.power((error[k] - Average_Err),2)
375
+ Std_Err = Std_Err/(LengthTest-1)
376
+ Std_Err = np.sqrt(Std_Err)
377
+ print "Std: ", Std_Err
378
+
379
+ #### Show Figure ######################
380
+ #if epochs_num > 0:
381
+ # ResultPlot['neurons_500'] = loss
382
+ # ResultPlot.plot()
383
+ # pyplot.show()
384
+
385
+ File1.close()
386
+ # File2.close()
387
+ # File3.close()
388
+ # File4.close()
BiRNN_Model_5_10fold.py ADDED
@@ -0,0 +1,388 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #############################################################
2
+ # 10 locations/ trajectory
3
+ # First location is known
4
+ # Date: 8 June 2018
5
+ #############################################################
6
+
7
+ import time
8
+ import tensorflow
9
+ # import lstm
10
+ import os
11
+ #import time
12
+ import warnings
13
+ import numpy as np
14
+ from numpy import newaxis
15
+ from keras.layers import TimeDistributed
16
+ from keras.layers.core import Dense, Activation, Dropout
17
+ from keras.layers.recurrent import SimpleRNN
18
+ from keras.models import Sequential
19
+ # import matplotlib.pyplot as plt
20
+ from matplotlib import pyplot
21
+ import pandas as pd
22
+ import math
23
+ from pandas import DataFrame
24
+ from keras import backend as K
25
+ from keras.layers import Bidirectional
26
+
27
+ def rmse(y_true, y_pred):
28
+ return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
29
+ def err_absolute(y_true, y_pred):
30
+ err = K.sqrt(K.square(y_pred - y_true))
31
+ return err
32
+
33
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
34
+ warnings.filterwarnings("ignore") #Hide messy Numpy warnings
35
+
36
+ # Open File to write results
37
+ File1 = open('Error_BiSimpleRNN_10_fold.csv','w')
38
+ # File2 = open('Traj_Model_5_DiffSpeed_2ms.csv','w')
39
+ # File3 = open('Loss_PredictedL_50k_NoFilter.csv','w')
40
+ # File4 = open('Output_PredictedL_50k_NoFilter.csv','w')
41
+
42
+ NumTotalTraj = 365000
43
+
44
+ # Configure the sampling set -----------------
45
+ epochs_num = 500
46
+ N_fold = 5
47
+ NumTrajTraining = 20000
48
+ NumSam_PerTraj = 1
49
+ StartingTraj = 0
50
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
51
+ NumSample = NumTrajTraining*NumSam_PerTraj # NumTraj = NumSample/NumSam_PerTraj
52
+
53
+ # Configure the validation set -----------------
54
+ NumTrajVal = 10000
55
+ StartingTrajVal = 100000
56
+
57
+ StartingValidation = StartingTrajVal*NumSam_PerTraj
58
+ NumValidation = NumTrajVal*NumSam_PerTraj
59
+
60
+ # Main Run Thread
61
+ global_start_time = time.time()
62
+ input_layer = 13 # X, Y & 11 MAC Addresses
63
+ num_rssi_reading = 11
64
+ output_layer = 2
65
+ timestep = 9
66
+ NumBatch = 512 #512
67
+ hidden_layer1 = 50
68
+ hidden_layer2 = 50
69
+
70
+ ##########################################################
71
+ ###### Test Trajectory Splitting #########################
72
+ ##########################################################
73
+ # Read Training data --------------------------------------------------
74
+ print('> Loading data... ')
75
+ df1=pd.read_csv('Input_Location_RSSI_10points_365k.csv')
76
+ df1= np.asarray(df1)
77
+
78
+ df2=pd.read_csv('Output_Location_RSSI_10points_365k.csv')
79
+ df2= np.asarray(df2)
80
+
81
+ ########################################################################
82
+ # Read Validation data --------------------------------------------------
83
+ #######################################################################
84
+ # df3= df1[StartingValidation:StartingValidation+NumValidation,:]
85
+ # input_validation = df3.reshape(len(df3),timestep,input_layer)
86
+ # print(input_validation.shape)
87
+
88
+ # df4=df2[StartingValidation:StartingValidation+NumValidation,:]
89
+ # output_validation = df4.reshape(len(df4),timestep,output_layer)
90
+ # print(output_validation.shape)
91
+
92
+ ####################################################################
93
+ # Testing data ------------------------
94
+ ####################################################################
95
+ # RSSI array
96
+ TestData_origin =pd.read_csv('Long_Traj_172Locations_RSSI.csv')
97
+ print(TestData_origin.shape)
98
+ TestData_origin = np.asarray(TestData_origin)
99
+
100
+ LengthTest = len(TestData_origin)
101
+ StartTestIdx = 0
102
+ StopTestIdx = StartTestIdx + LengthTest
103
+
104
+ # print(TestData.shape)
105
+ TestData = TestData_origin[StartTestIdx:StopTestIdx, :]
106
+ # LengthTest = len(TestData)
107
+
108
+ # List of Locations
109
+ Location_origin = pd.read_csv('Long_Traj_172Locations.csv')
110
+ print(Location_origin.shape)
111
+ Location_origin = np.asarray(Location_origin)
112
+ Location = Location_origin[StartTestIdx:StopTestIdx, :]
113
+
114
+ print('> Data Loaded. Compiling...')
115
+
116
+ ####################################################################
117
+ # Init 9 first Location ###########################################
118
+ ####################################################################
119
+ L1 = Location[0,:]
120
+ L2 = Location[0,:]
121
+ L3 = Location[0,:]
122
+ L4 = Location[0,:]
123
+ L5 = Location[0,:]
124
+ L6 = Location[0,:]
125
+ L7 = Location[0,:]
126
+ L8 = Location[0,:]
127
+ L9 = Location[0,:]
128
+
129
+ # Take RSSI
130
+ RSSI_L1 = TestData[0,:]
131
+ RSSI_L2 = TestData[1,:]
132
+ RSSI_L3 = TestData[1,:]
133
+ RSSI_L4 = TestData[1,:]
134
+ RSSI_L5 = TestData[1,:]
135
+ RSSI_L6 = TestData[1,:]
136
+ RSSI_L7 = TestData[1,:]
137
+ RSSI_L8 = TestData[1,:]
138
+ RSSI_L9 = TestData[1,:]
139
+ RSSI_L10 = TestData[1,:]
140
+
141
+ # Build the network --------------------------------------------
142
+ model1 = Sequential()
143
+ model1.add(Bidirectional(SimpleRNN(hidden_layer1, return_sequences=True),input_shape=(timestep, input_layer),merge_mode='concat'))
144
+ model1.add(Dropout(0.2))
145
+ model1.add(Bidirectional(SimpleRNN(hidden_layer2, return_sequences=True),input_shape=(timestep, input_layer),merge_mode='concat'))
146
+ model1.add(SimpleRNN(hidden_layer2,return_sequences=True))
147
+ model1.add(Dropout(0.2))
148
+ model1.add(TimeDistributed(Dense(output_layer)))
149
+ model1.summary()
150
+ start = time.time()
151
+ model1.compile(loss="mse", optimizer="adam",metrics=[rmse])
152
+
153
+ # Training --------------------------------------------------------
154
+ loss = list()
155
+ ResultPlot = DataFrame()
156
+
157
+ #### Create a copy of input training ##########################
158
+ # input_training_org = input_training
159
+ # input_validation_org = input_validation
160
+ ###############################################################
161
+ # model1.load_weights("lstm_Model_5_DiffSpeed.h5")
162
+ # StartingValidation = 0
163
+ for CountFold in xrange(1, N_fold):
164
+ # training samples -----------------------
165
+ StartingTraj = CountFold*NumTrajTraining
166
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
167
+ df1_split = df1[StartingSample:StartingSample+NumSample,:]
168
+ input_training = df1_split.reshape(len(df1_split),timestep,input_layer)
169
+ # print(input_training.shape)
170
+ input_training_org = input_training
171
+
172
+ df2_split = df2[StartingSample:StartingSample+NumSample,:]
173
+ output_training = df2_split.reshape(len(df2_split),timestep,output_layer)
174
+ # print(output_training.shape)
175
+
176
+ H5_Name = 'BiSimpleRNN_Model_5_fold_'+ str(CountFold) + '.h5'
177
+ iTimeStep = 1
178
+ iTimeStep_val = 1
179
+ for ep in xrange(epochs_num):
180
+ print("Iteration {} ----- ".format(ep))
181
+
182
+ # Fit Model
183
+ # hist = model1.fit(input_training,output_training, validation_data=(input_validation, output_validation), epochs=1, batch_size=NumBatch, verbose=1)
184
+ hist = model1.fit(input_training,output_training, epochs=1, batch_size=NumBatch, verbose=1, shuffle=False)
185
+ loss.append(hist.history['loss'][0])
186
+ # File3.write(str(hist.history['loss'][0]) + '\n') #100 write to file
187
+
188
+ # Reform the input training ---------------------------------
189
+ print("Training Predict Sample {} ... ".format(iTimeStep))
190
+ Predicted_L_Training = model1.predict(input_training)
191
+ # get the predicted locations to become the inputs of the next step
192
+ for iSample in xrange(0,len(df1_split)):
193
+ # First location: known
194
+ # Update predicted Location starting from the second location
195
+ input_training[iSample,iTimeStep,0] = Predicted_L_Training[iSample,iTimeStep-1,0]
196
+ input_training[iSample,iTimeStep,1] = Predicted_L_Training[iSample,iTimeStep-1,1]
197
+ iTimeStep = iTimeStep + 1 # Update for next time step
198
+
199
+ if iTimeStep == timestep: # reset 1 round -------
200
+ input_training = input_training_org
201
+ iTimeStep = 1
202
+ # --------------------------------------------------------------------------------------
203
+
204
+ # Reform the validation set for testing -------------------------------------------------
205
+ # Input is updated by prediction
206
+ # Output: Ideal
207
+ # print("Validation Predict ...")
208
+ # Predicted_L_Validation = model1.predict(input_validation)
209
+ # for iSample in xrange(0,len(df3)):
210
+ # input_validation[iSample,iTimeStep_val,0] = Predicted_L_Validation[iSample,iTimeStep_val-1,0]
211
+ # input_validation[iSample,iTimeStep_val,1] = Predicted_L_Validation[iSample,iTimeStep_val-1,1]
212
+ # iTimeStep_val = iTimeStep_val + 1
213
+ # print("TESTING...........")
214
+
215
+ # if iTimeStep_val == timestep: # reset 1 round -------
216
+ # input_validation = input_validation_org
217
+ # iTimeStep_val = 1
218
+ # ------------------------------------------------------------------------------------------
219
+ # Calculate the validation error ------------------------------------------------
220
+
221
+ # model1.fit(input_training,output_training, validation_split=0.2, epochs=epochs_num, batch_size=512)
222
+ model1.save_weights(H5_Name)
223
+ # print('Training duration (s) 100: ', time.time() - global_start_time)
224
+
225
+ # Testing ---------------------------------------------------------
226
+ #########################################################
227
+ # The first 3 steps: Fill up the buffer
228
+ # #######################################################
229
+ Acc_Location = np.zeros((timestep,2))
230
+ Acc_Location[0,:] = L1
231
+ L_combine = np.concatenate((L1,L2,L3,L4,L5,L6,L7,L8,L9), axis=0)
232
+ RSSI_combine = np.concatenate((RSSI_L2, RSSI_L3, RSSI_L4, RSSI_L5,RSSI_L6, RSSI_L7, RSSI_L8, RSSI_L9, RSSI_L10), axis=0)
233
+
234
+ for Step in xrange(1,timestep):
235
+
236
+ # Update the Locations & RSSIs buffer
237
+ LocationIdx = 0
238
+ RSSIdx = 0
239
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
240
+ for i in xrange(timestep):
241
+ TestingData[LocationIdx] = L_combine[i*2]
242
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
243
+ for j in xrange(len(RSSI_L2)):
244
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
245
+ LocationIdx = LocationIdx + input_layer
246
+ RSSIdx = RSSIdx + num_rssi_reading
247
+ # print(TestingData)
248
+
249
+ TestingData = TestingData.reshape(1,timestep,input_layer)
250
+ # print(TestingData)
251
+
252
+ # Prediction
253
+ Predicted_L = model1.predict(TestingData)
254
+ Acc_Location[Step,:] = Predicted_L[0,Step-1,:]
255
+ print(Acc_Location)
256
+
257
+ # Update for next step
258
+ # Init 4 first Location
259
+ IdxTemp = 0
260
+ # Update Location
261
+ for j in xrange(Step*2,len(L_combine)):
262
+ L_combine[j] = Acc_Location[Step,IdxTemp]
263
+ if IdxTemp == 0:
264
+ IdxTemp = 1
265
+ else:
266
+ IdxTemp = 0
267
+ pass
268
+
269
+ # Take RSSI
270
+ IdxTemp = 0
271
+ for j in xrange(Step*num_rssi_reading,len(RSSI_combine)):
272
+ RSSI_combine[j] = TestData[Step+1,IdxTemp]
273
+ IdxTemp = IdxTemp + 1
274
+ if IdxTemp == num_rssi_reading: # Reach the end
275
+ IdxTemp = 0
276
+
277
+ #########################################################
278
+ # AFter the buffer is full - Do the Test
279
+ # #######################################################
280
+
281
+ CountArray = np.ones(timestep)
282
+ error = np.zeros(LengthTest-1)
283
+ # Predicted_array = np.zeros((LengthTest-1,2))
284
+ Average_Err = 0
285
+
286
+ for CountTest in xrange(LengthTest-timestep):
287
+ print("Location {}------------".format(CountTest))
288
+ # Update the Locations & RSSIs buffer
289
+ LocationIdx = 0
290
+ RSSIdx = 0
291
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
292
+ for i in xrange(timestep):
293
+ TestingData[LocationIdx] = L_combine[i*2]
294
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
295
+ for j in xrange(len(RSSI_L2)):
296
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
297
+ LocationIdx = LocationIdx + input_layer
298
+ RSSIdx = RSSIdx + num_rssi_reading
299
+
300
+ TestingData = TestingData.reshape(1,timestep,input_layer)
301
+ # print(TestingData)
302
+
303
+ # Prediction
304
+ Predicted_L = model1.predict(TestingData)
305
+
306
+ # for t in xrange(timestep):
307
+ # File4.write(str(Predicted_L[0,t,0]) + ',')
308
+ # File4.write(str(Predicted_L[0,t,1])+ ',') # write to file
309
+ # File4.write('\n')
310
+
311
+ # Re-arrange Accumulated Location
312
+ for t in xrange(timestep-1):
313
+ CountArray[t] = CountArray[t+1]
314
+ Acc_Location[t,:] = Acc_Location[t+1,:]+Predicted_L[0,t,:]
315
+ CountArray[t] = CountArray[t] + 1
316
+
317
+ Acc_Location[timestep-1,:] = Predicted_L[0,timestep-1,:] # Update new Location
318
+ CountArray[timestep-1] = 1 #Update New Count
319
+ # print(Acc_Location)
320
+ # print(CountArray)
321
+ ######################################################################
322
+ ############# UPDATE LOCATION ########################################
323
+ ######################################################################
324
+
325
+ Final_L = Acc_Location[0,:]/CountArray[0]
326
+ CountArray[0] = 1
327
+ # Take correct location, compare the result
328
+ Correct_L = Location[CountTest+1,:]
329
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
330
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
331
+ Average_Err = Average_Err + error[CountTest]
332
+
333
+ File1.write(str(error[CountTest]) + ' , ') # write to file
334
+ # File2.write(str(Final_L[0]) + ',')
335
+ # File2.write(str(Final_L[1]) + '\n') # write to file
336
+ # Re-arrange L_combine
337
+ for t in xrange(timestep-1):
338
+ L_combine[t*2] = L_combine[(t+1)*2]
339
+ L_combine[t*2+1] = L_combine[(t+1)*2+1]
340
+ # Update
341
+ L_combine[(timestep-1)*2] = Predicted_L[0,timestep-1,0]
342
+ L_combine[(timestep-1)*2+1] = Predicted_L[0,timestep-1,1]
343
+ # Re-arrange RSSI_combine
344
+ if CountTest+timestep+1 < LengthTest:
345
+ for t in xrange(timestep-1):
346
+ for k in xrange(num_rssi_reading):
347
+ RSSI_combine[t*num_rssi_reading+k] = RSSI_combine[(t+1)*num_rssi_reading+k]
348
+
349
+ for k in xrange(num_rssi_reading):
350
+ RSSI_combine[(timestep-1)*num_rssi_reading+k] = TestData[CountTest+timestep+1,k]
351
+
352
+ ###################################################################
353
+ ############### The Last Locations ###############################
354
+ ###################################################################
355
+ for i in xrange(timestep-1):
356
+ Final_L = Acc_Location[i+1,:]/CountArray[i+1]
357
+ # Take correct location, compare the result
358
+ Correct_L = Location[CountTest+1,:]
359
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
360
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), error[CountTest])
361
+ Average_Err = Average_Err + error[CountTest]
362
+
363
+ File1.write(str(error[CountTest]) + ' , ') # write to file
364
+ # File2.write(str(Final_L[0]) + ',')
365
+ # File2.write(str(Final_L[1]) + '\n') # write to file
366
+ CountTest = CountTest + 1
367
+
368
+ File1.write('\n')
369
+ Average_Err = Average_Err/(LengthTest-1)
370
+ print "Average Error: ", Average_Err
371
+
372
+ Std_Err = 0
373
+ for k in xrange(LengthTest-1):
374
+ Std_Err = Std_Err + np.power((error[k] - Average_Err),2)
375
+ Std_Err = Std_Err/(LengthTest-1)
376
+ Std_Err = np.sqrt(Std_Err)
377
+ print "Std: ", Std_Err
378
+
379
+ #### Show Figure ######################
380
+ #if epochs_num > 0:
381
+ # ResultPlot['neurons_500'] = loss
382
+ # ResultPlot.plot()
383
+ # pyplot.show()
384
+
385
+ File1.close()
386
+ # File2.close()
387
+ # File3.close()
388
+ # File4.close()
BiSimpleRNN_Model_5_fold_1.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84c621bf54df42e2856b57a719eee2265ec31b2e9ba895d6da40bbec8c1f2981
3
+ size 143184
Bigru_Model_5_fold_1.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d81c7a69381a5cb602afa5747f24937ee93f80cbcb249597849dfc016934332
3
+ size 375952
Bilstm_Model_5_fold_1.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6d7f11011febf6ae0b3b1f62e5d0160a56ce66e1bda68e7d806691060c2889c
3
+ size 491368
Diff_Structure_update.pdf ADDED
Binary file (720 kB). View file
 
GRU_Model_5_10fold.py ADDED
@@ -0,0 +1,389 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #############################################################
2
+ # 10 locations/ trajectory
3
+ # First location is known
4
+ # Date: 8 June 2018
5
+ #############################################################
6
+
7
+ import time
8
+ import tensorflow
9
+ # import lstm
10
+ import os
11
+ #import time
12
+ import warnings
13
+ import numpy as np
14
+ from numpy import newaxis
15
+ from keras.layers import TimeDistributed
16
+ from keras.layers.core import Dense, Activation, Dropout
17
+ from keras.layers.recurrent import GRU
18
+ from keras.models import Sequential
19
+ # import matplotlib.pyplot as plt
20
+ from matplotlib import pyplot
21
+ import pandas as pd
22
+ import math
23
+ from pandas import DataFrame
24
+ from keras import backend as K
25
+
26
+ def rmse(y_true, y_pred):
27
+ return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
28
+ def err_absolute(y_true, y_pred):
29
+ err = K.sqrt(K.square(y_pred - y_true))
30
+ return err
31
+
32
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
33
+ warnings.filterwarnings("ignore") #Hide messy Numpy warnings
34
+
35
+ # Open File to write results
36
+ File1 = open('Error_GRU_10_fold.csv','w')
37
+ # File2 = open('Traj_Model_5_DiffSpeed_2ms.csv','w')
38
+ # File3 = open('Loss_PredictedL_50k_NoFilter.csv','w')
39
+ # File4 = open('Output_PredictedL_50k_NoFilter.csv','w')
40
+
41
+ NumTotalTraj = 365000
42
+
43
+ # Configure the sampling set -----------------
44
+ epochs_num = 500
45
+ N_fold = 10
46
+ NumTrajTraining = 20000
47
+ NumSam_PerTraj = 1
48
+ StartingTraj = 0
49
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
50
+ NumSample = NumTrajTraining*NumSam_PerTraj # NumTraj = NumSample/NumSam_PerTraj
51
+
52
+ # Configure the validation set -----------------
53
+ NumTrajVal = 10000
54
+ StartingTrajVal = 100000
55
+
56
+ StartingValidation = StartingTrajVal*NumSam_PerTraj
57
+ NumValidation = NumTrajVal*NumSam_PerTraj
58
+
59
+ # Main Run Thread
60
+ global_start_time = time.time()
61
+ input_layer = 13 # X, Y & 11 MAC Addresses
62
+ num_rssi_reading = 11
63
+ output_layer = 2
64
+ timestep = 9
65
+ NumBatch = 512 #512
66
+ hidden_layer1 = 100
67
+ hidden_layer2 = 100
68
+
69
+ ##########################################################
70
+ ###### Test Trajectory Splitting #########################
71
+ ##########################################################
72
+ # Read Training data --------------------------------------------------
73
+ print('> Loading data... ')
74
+ df1=pd.read_csv('Input_Location_RSSI_10points_365k.csv')
75
+ df1= np.asarray(df1)
76
+
77
+ df2=pd.read_csv('Output_Location_RSSI_10points_365k.csv')
78
+ df2= np.asarray(df2)
79
+
80
+ ########################################################################
81
+ # Read Validation data --------------------------------------------------
82
+ #######################################################################
83
+ # df3= df1[StartingValidation:StartingValidation+NumValidation,:]
84
+ # input_validation = df3.reshape(len(df3),timestep,input_layer)
85
+ # print(input_validation.shape)
86
+
87
+ # df4=df2[StartingValidation:StartingValidation+NumValidation,:]
88
+ # output_validation = df4.reshape(len(df4),timestep,output_layer)
89
+ # print(output_validation.shape)
90
+
91
+ ####################################################################
92
+ # Testing data ------------------------
93
+ ####################################################################
94
+ # RSSI array
95
+ TestData_origin =pd.read_csv('Long_Traj_6July_v1_AverageFilter.csv')
96
+ print(TestData_origin.shape)
97
+ TestData_origin = np.asarray(TestData_origin)
98
+
99
+ LengthTest = len(TestData_origin)
100
+ StartTestIdx = 0
101
+ StopTestIdx = StartTestIdx + LengthTest
102
+
103
+ # print(TestData.shape)
104
+ TestData = TestData_origin[StartTestIdx:StopTestIdx, :]
105
+ # LengthTest = len(TestData)
106
+
107
+ # List of Locations
108
+ Location_origin = pd.read_csv('Long_Traj_Location.csv')
109
+ print(Location_origin.shape)
110
+ Location_origin = np.asarray(Location_origin)
111
+ Location = Location_origin[StartTestIdx:StopTestIdx, :]
112
+
113
+ print('> Data Loaded. Compiling...')
114
+
115
+ ####################################################################
116
+ # Init 9 first Location ###########################################
117
+ ####################################################################
118
+ L1 = Location[0,:]
119
+ L2 = Location[0,:]
120
+ L3 = Location[0,:]
121
+ L4 = Location[0,:]
122
+ L5 = Location[0,:]
123
+ L6 = Location[0,:]
124
+ L7 = Location[0,:]
125
+ L8 = Location[0,:]
126
+ L9 = Location[0,:]
127
+
128
+ # Take RSSI
129
+ RSSI_L1 = TestData[0,:]
130
+ RSSI_L2 = TestData[1,:]
131
+ RSSI_L3 = TestData[1,:]
132
+ RSSI_L4 = TestData[1,:]
133
+ RSSI_L5 = TestData[1,:]
134
+ RSSI_L6 = TestData[1,:]
135
+ RSSI_L7 = TestData[1,:]
136
+ RSSI_L8 = TestData[1,:]
137
+ RSSI_L9 = TestData[1,:]
138
+ RSSI_L10 = TestData[1,:]
139
+
140
+ # Build the network --------------------------------------------
141
+ model1 = Sequential()
142
+ model1.add(GRU(hidden_layer1, input_shape=(timestep, input_layer), return_sequences=True))
143
+ model1.add(Dropout(0.2))
144
+ model1.add(GRU(hidden_layer2,return_sequences=True))
145
+ model1.add(Dropout(0.2))
146
+ model1.add(TimeDistributed(Dense(output_layer)))
147
+ model1.summary()
148
+ start = time.time()
149
+ model1.compile(loss="mse", optimizer="adam",metrics=[rmse])
150
+ # model1.compile(loss=rmse, optimizer="adam",metrics=[err_absolute]) #rmse
151
+ # root_mean_squared_error
152
+ print("> Compilation Time : ", time.time() - start)
153
+
154
+ # Training --------------------------------------------------------
155
+ loss = list()
156
+ ResultPlot = DataFrame()
157
+
158
+ #### Create a copy of input training ##########################
159
+ # input_training_org = input_training
160
+ # input_validation_org = input_validation
161
+ ###############################################################
162
+ # model1.load_weights("lstm_Model_5_DiffSpeed.h5")
163
+ # StartingValidation = 0
164
+ for CountFold in xrange(1, N_fold):
165
+ # training samples -----------------------
166
+ StartingTraj = CountFold*NumTrajTraining
167
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
168
+ df1_split = df1[StartingSample:StartingSample+NumSample,:]
169
+ input_training = df1_split.reshape(len(df1_split),timestep,input_layer)
170
+ # print(input_training.shape)
171
+ input_training_org = input_training
172
+
173
+ df2_split = df2[StartingSample:StartingSample+NumSample,:]
174
+ output_training = df2_split.reshape(len(df2_split),timestep,output_layer)
175
+ # print(output_training.shape)
176
+
177
+ H5_Name = 'gru_Model_5_fold_'+ str(CountFold) + '.h5'
178
+ iTimeStep = 1
179
+ iTimeStep_val = 1
180
+ for ep in xrange(epochs_num):
181
+ print("Iteration {} ----- ".format(ep))
182
+
183
+ # Fit Model
184
+ # hist = model1.fit(input_training,output_training, validation_data=(input_validation, output_validation), epochs=1, batch_size=NumBatch, verbose=1)
185
+ hist = model1.fit(input_training,output_training, epochs=1, batch_size=NumBatch, verbose=1, shuffle=False)
186
+ loss.append(hist.history['loss'][0])
187
+ # File3.write(str(hist.history['loss'][0]) + '\n') #100 write to file
188
+
189
+ # Reform the input training ---------------------------------
190
+ print("Training Predict Sample {} ... ".format(iTimeStep))
191
+ Predicted_L_Training = model1.predict(input_training)
192
+ # get the predicted locations to become the inputs of the next step
193
+ for iSample in xrange(0,len(df1_split)):
194
+ # First location: known
195
+ # Update predicted Location starting from the second location
196
+ input_training[iSample,iTimeStep,0] = Predicted_L_Training[iSample,iTimeStep-1,0]
197
+ input_training[iSample,iTimeStep,1] = Predicted_L_Training[iSample,iTimeStep-1,1]
198
+ iTimeStep = iTimeStep + 1 # Update for next time step
199
+
200
+ if iTimeStep == timestep: # reset 1 round -------
201
+ input_training = input_training_org
202
+ iTimeStep = 1
203
+ # --------------------------------------------------------------------------------------
204
+
205
+ # Reform the validation set for testing -------------------------------------------------
206
+ # Input is updated by prediction
207
+ # Output: Ideal
208
+ # print("Validation Predict ...")
209
+ # Predicted_L_Validation = model1.predict(input_validation)
210
+ # for iSample in xrange(0,len(df3)):
211
+ # input_validation[iSample,iTimeStep_val,0] = Predicted_L_Validation[iSample,iTimeStep_val-1,0]
212
+ # input_validation[iSample,iTimeStep_val,1] = Predicted_L_Validation[iSample,iTimeStep_val-1,1]
213
+ # iTimeStep_val = iTimeStep_val + 1
214
+ # print("TESTING...........")
215
+
216
+ # if iTimeStep_val == timestep: # reset 1 round -------
217
+ # input_validation = input_validation_org
218
+ # iTimeStep_val = 1
219
+ # ------------------------------------------------------------------------------------------
220
+ # Calculate the validation error ------------------------------------------------
221
+
222
+ # model1.fit(input_training,output_training, validation_split=0.2, epochs=epochs_num, batch_size=512)
223
+ model1.save_weights(H5_Name)
224
+ # print('Training duration (s) 100: ', time.time() - global_start_time)
225
+
226
+ # Testing ---------------------------------------------------------
227
+ #########################################################
228
+ # The first 3 steps: Fill up the buffer
229
+ # #######################################################
230
+ Acc_Location = np.zeros((timestep,2))
231
+ Acc_Location[0,:] = L1
232
+ L_combine = np.concatenate((L1,L2,L3,L4,L5,L6,L7,L8,L9), axis=0)
233
+ RSSI_combine = np.concatenate((RSSI_L2, RSSI_L3, RSSI_L4, RSSI_L5,RSSI_L6, RSSI_L7, RSSI_L8, RSSI_L9, RSSI_L10), axis=0)
234
+
235
+ for Step in xrange(1,timestep):
236
+
237
+ # Update the Locations & RSSIs buffer
238
+ LocationIdx = 0
239
+ RSSIdx = 0
240
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
241
+ for i in xrange(timestep):
242
+ TestingData[LocationIdx] = L_combine[i*2]
243
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
244
+ for j in xrange(len(RSSI_L2)):
245
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
246
+ LocationIdx = LocationIdx + input_layer
247
+ RSSIdx = RSSIdx + num_rssi_reading
248
+ # print(TestingData)
249
+
250
+ TestingData = TestingData.reshape(1,timestep,input_layer)
251
+ # print(TestingData)
252
+
253
+ # Prediction
254
+ Predicted_L = model1.predict(TestingData)
255
+ Acc_Location[Step,:] = Predicted_L[0,Step-1,:]
256
+ print(Acc_Location)
257
+
258
+ # Update for next step
259
+ # Init 4 first Location
260
+ IdxTemp = 0
261
+ # Update Location
262
+ for j in xrange(Step*2,len(L_combine)):
263
+ L_combine[j] = Acc_Location[Step,IdxTemp]
264
+ if IdxTemp == 0:
265
+ IdxTemp = 1
266
+ else:
267
+ IdxTemp = 0
268
+ pass
269
+
270
+ # Take RSSI
271
+ IdxTemp = 0
272
+ for j in xrange(Step*num_rssi_reading,len(RSSI_combine)):
273
+ RSSI_combine[j] = TestData[Step+1,IdxTemp]
274
+ IdxTemp = IdxTemp + 1
275
+ if IdxTemp == num_rssi_reading: # Reach the end
276
+ IdxTemp = 0
277
+
278
+ #########################################################
279
+ # AFter the buffer is full - Do the Test
280
+ # #######################################################
281
+
282
+ CountArray = np.ones(timestep)
283
+ error = np.zeros(LengthTest-1)
284
+ # Predicted_array = np.zeros((LengthTest-1,2))
285
+ Average_Err = 0
286
+
287
+ for CountTest in xrange(LengthTest-timestep):
288
+ print("Location {}------------".format(CountTest))
289
+ # Update the Locations & RSSIs buffer
290
+ LocationIdx = 0
291
+ RSSIdx = 0
292
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
293
+ for i in xrange(timestep):
294
+ TestingData[LocationIdx] = L_combine[i*2]
295
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
296
+ for j in xrange(len(RSSI_L2)):
297
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
298
+ LocationIdx = LocationIdx + input_layer
299
+ RSSIdx = RSSIdx + num_rssi_reading
300
+
301
+ TestingData = TestingData.reshape(1,timestep,input_layer)
302
+ # print(TestingData)
303
+
304
+ # Prediction
305
+ Predicted_L = model1.predict(TestingData)
306
+
307
+ # for t in xrange(timestep):
308
+ # File4.write(str(Predicted_L[0,t,0]) + ',')
309
+ # File4.write(str(Predicted_L[0,t,1])+ ',') # write to file
310
+ # File4.write('\n')
311
+
312
+ # Re-arrange Accumulated Location
313
+ for t in xrange(timestep-1):
314
+ CountArray[t] = CountArray[t+1]
315
+ Acc_Location[t,:] = Acc_Location[t+1,:]+Predicted_L[0,t,:]
316
+ CountArray[t] = CountArray[t] + 1
317
+
318
+ Acc_Location[timestep-1,:] = Predicted_L[0,timestep-1,:] # Update new Location
319
+ CountArray[timestep-1] = 1 #Update New Count
320
+ # print(Acc_Location)
321
+ # print(CountArray)
322
+ ######################################################################
323
+ ############# UPDATE LOCATION ########################################
324
+ ######################################################################
325
+
326
+ Final_L = Acc_Location[0,:]/CountArray[0]
327
+ CountArray[0] = 1
328
+ # Take correct location, compare the result
329
+ Correct_L = Location[CountTest+1,:]
330
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
331
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
332
+ Average_Err = Average_Err + error[CountTest]
333
+
334
+ File1.write(str(error[CountTest]) + ' , ') # write to file
335
+ # File2.write(str(Final_L[0]) + ',')
336
+ # File2.write(str(Final_L[1]) + '\n') # write to file
337
+ # Re-arrange L_combine
338
+ for t in xrange(timestep-1):
339
+ L_combine[t*2] = L_combine[(t+1)*2]
340
+ L_combine[t*2+1] = L_combine[(t+1)*2+1]
341
+ # Update
342
+ L_combine[(timestep-1)*2] = Predicted_L[0,timestep-1,0]
343
+ L_combine[(timestep-1)*2+1] = Predicted_L[0,timestep-1,1]
344
+ # Re-arrange RSSI_combine
345
+ if CountTest+timestep+1 < LengthTest:
346
+ for t in xrange(timestep-1):
347
+ for k in xrange(num_rssi_reading):
348
+ RSSI_combine[t*num_rssi_reading+k] = RSSI_combine[(t+1)*num_rssi_reading+k]
349
+
350
+ for k in xrange(num_rssi_reading):
351
+ RSSI_combine[(timestep-1)*num_rssi_reading+k] = TestData[CountTest+timestep+1,k]
352
+
353
+ ###################################################################
354
+ ############### The Last Locations ###############################
355
+ ###################################################################
356
+ for i in xrange(timestep-1):
357
+ Final_L = Acc_Location[i+1,:]/CountArray[i+1]
358
+ # Take correct location, compare the result
359
+ Correct_L = Location[CountTest+1,:]
360
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
361
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), error[CountTest])
362
+ Average_Err = Average_Err + error[CountTest]
363
+
364
+ File1.write(str(error[CountTest]) + ' , ') # write to file
365
+ # File2.write(str(Final_L[0]) + ',')
366
+ # File2.write(str(Final_L[1]) + '\n') # write to file
367
+ CountTest = CountTest + 1
368
+
369
+ File1.write('\n')
370
+ Average_Err = Average_Err/(LengthTest-1)
371
+ print "Average Error: ", Average_Err
372
+
373
+ Std_Err = 0
374
+ for k in xrange(LengthTest-1):
375
+ Std_Err = Std_Err + np.power((error[k] - Average_Err),2)
376
+ Std_Err = Std_Err/(LengthTest-1)
377
+ Std_Err = np.sqrt(Std_Err)
378
+ print "Std: ", Std_Err
379
+
380
+ #### Show Figure ######################
381
+ #if epochs_num > 0:
382
+ # ResultPlot['neurons_500'] = loss
383
+ # ResultPlot.plot()
384
+ # pyplot.show()
385
+
386
+ File1.close()
387
+ # File2.close()
388
+ # File3.close()
389
+ # File4.close()
LICENSE.md ADDED
@@ -0,0 +1,353 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International
2
+
3
+ Creative Commons Corporation ("Creative Commons") is not a law firm and
4
+ does not provide legal services or legal advice. Distribution of
5
+ Creative Commons public licenses does not create a lawyer-client or
6
+ other relationship. Creative Commons makes its licenses and related
7
+ information available on an "as-is" basis. Creative Commons gives no
8
+ warranties regarding its licenses, any material licensed under their
9
+ terms and conditions, or any related information. Creative Commons
10
+ disclaims all liability for damages resulting from their use to the
11
+ fullest extent possible.
12
+
13
+ Using Creative Commons Public Licenses
14
+
15
+ Creative Commons public licenses provide a standard set of terms and
16
+ conditions that creators and other rights holders may use to share
17
+ original works of authorship and other material subject to copyright and
18
+ certain other rights specified in the public license below. The
19
+ following considerations are for informational purposes only, are not
20
+ exhaustive, and do not form part of our licenses.
21
+
22
+ Considerations for licensors: Our public licenses are intended for use
23
+ by those authorized to give the public permission to use material in
24
+ ways otherwise restricted by copyright and certain other rights. Our
25
+ licenses are irrevocable. Licensors should read and understand the terms
26
+ and conditions of the license they choose before applying it. Licensors
27
+ should also secure all rights necessary before applying our licenses so
28
+ that the public can reuse the material as expected. Licensors should
29
+ clearly mark any material not subject to the license. This includes
30
+ other CC-licensed material, or material used under an exception or
31
+ limitation to copyright. More considerations for licensors :
32
+ wiki.creativecommons.org/Considerations_for_licensors
33
+
34
+ Considerations for the public: By using one of our public licenses, a
35
+ licensor grants the public permission to use the licensed material under
36
+ specified terms and conditions. If the licensor's permission is not
37
+ necessary for any reason–for example, because of any applicable
38
+ exception or limitation to copyright–then that use is not regulated by
39
+ the license. Our licenses grant only permissions under copyright and
40
+ certain other rights that a licensor has authority to grant. Use of the
41
+ licensed material may still be restricted for other reasons, including
42
+ because others have copyright or other rights in the material. A
43
+ licensor may make special requests, such as asking that all changes be
44
+ marked or described. Although not required by our licenses, you are
45
+ encouraged to respect those requests where reasonable. More
46
+ considerations for the public :
47
+ wiki.creativecommons.org/Considerations_for_licensees
48
+
49
+ Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International
50
+ Public License
51
+
52
+ By exercising the Licensed Rights (defined below), You accept and agree
53
+ to be bound by the terms and conditions of this Creative Commons
54
+ Attribution-NonCommercial-ShareAlike 4.0 International Public License
55
+ ("Public License"). To the extent this Public License may be interpreted
56
+ as a contract, You are granted the Licensed Rights in consideration of
57
+ Your acceptance of these terms and conditions, and the Licensor grants
58
+ You such rights in consideration of benefits the Licensor receives from
59
+ making the Licensed Material available under these terms and conditions.
60
+
61
+ Section 1 – Definitions.
62
+
63
+ - a. Adapted Material means material subject to Copyright and Similar
64
+ Rights that is derived from or based upon the Licensed Material and
65
+ in which the Licensed Material is translated, altered, arranged,
66
+ transformed, or otherwise modified in a manner requiring permission
67
+ under the Copyright and Similar Rights held by the Licensor. For
68
+ purposes of this Public License, where the Licensed Material is a
69
+ musical work, performance, or sound recording, Adapted Material is
70
+ always produced where the Licensed Material is synched in timed
71
+ relation with a moving image.
72
+ - b. Adapter's License means the license You apply to Your Copyright
73
+ and Similar Rights in Your contributions to Adapted Material in
74
+ accordance with the terms and conditions of this Public License.
75
+ - c. BY-NC-SA Compatible License means a license listed at
76
+ creativecommons.org/compatiblelicenses, approved by Creative Commons
77
+ as essentially the equivalent of this Public License.
78
+ - d. Copyright and Similar Rights means copyright and/or similar
79
+ rights closely related to copyright including, without limitation,
80
+ performance, broadcast, sound recording, and Sui Generis Database
81
+ Rights, without regard to how the rights are labeled or categorized.
82
+ For purposes of this Public License, the rights specified in Section
83
+ 2(b)(1)-(2) are not Copyright and Similar Rights.
84
+ - e. Effective Technological Measures means those measures that, in
85
+ the absence of proper authority, may not be circumvented under laws
86
+ fulfilling obligations under Article 11 of the WIPO Copyright Treaty
87
+ adopted on December 20, 1996, and/or similar international
88
+ agreements.
89
+ - f. Exceptions and Limitations means fair use, fair dealing, and/or
90
+ any other exception or limitation to Copyright and Similar Rights
91
+ that applies to Your use of the Licensed Material.
92
+ - g. License Elements means the license attributes listed in the name
93
+ of a Creative Commons Public License. The License Elements of this
94
+ Public License are Attribution, NonCommercial, and ShareAlike.
95
+ - h. Licensed Material means the artistic or literary work, database,
96
+ or other material to which the Licensor applied this Public License.
97
+ - i. Licensed Rights means the rights granted to You subject to the
98
+ terms and conditions of this Public License, which are limited to
99
+ all Copyright and Similar Rights that apply to Your use of the
100
+ Licensed Material and that the Licensor has authority to license.
101
+ - j. Licensor means the individual(s) or entity(ies) granting rights
102
+ under this Public License.
103
+ - k. NonCommercial means not primarily intended for or directed
104
+ towards commercial advantage or monetary compensation. For purposes
105
+ of this Public License, the exchange of the Licensed Material for
106
+ other material subject to Copyright and Similar Rights by digital
107
+ file-sharing or similar means is NonCommercial provided there is no
108
+ payment of monetary compensation in connection with the exchange.
109
+ - l. Share means to provide material to the public by any means or
110
+ process that requires permission under the Licensed Rights, such as
111
+ reproduction, public display, public performance, distribution,
112
+ dissemination, communication, or importation, and to make material
113
+ available to the public including in ways that members of the public
114
+ may access the material from a place and at a time individually
115
+ chosen by them.
116
+ - m. Sui Generis Database Rights means rights other than copyright
117
+ resulting from Directive 96/9/EC of the European Parliament and of
118
+ the Council of 11 March 1996 on the legal protection of databases,
119
+ as amended and/or succeeded, as well as other essentially equivalent
120
+ rights anywhere in the world.
121
+ - n. You means the individual or entity exercising the Licensed Rights
122
+ under this Public License. Your has a corresponding meaning.
123
+
124
+ Section 2 – Scope.
125
+
126
+ - a. License grant.
127
+ - 1. Subject to the terms and conditions of this Public License,
128
+ the Licensor hereby grants You a worldwide, royalty-free,
129
+ non-sublicensable, non-exclusive, irrevocable license to
130
+ exercise the Licensed Rights in the Licensed Material to:
131
+ - A. reproduce and Share the Licensed Material, in whole or in
132
+ part, for NonCommercial purposes only; and
133
+ - B. produce, reproduce, and Share Adapted Material for
134
+ NonCommercial purposes only.
135
+ - 2. Exceptions and Limitations. For the avoidance of doubt, where
136
+ Exceptions and Limitations apply to Your use, this Public
137
+ License does not apply, and You do not need to comply with its
138
+ terms and conditions.
139
+ - 3. Term. The term of this Public License is specified in Section
140
+ 6(a).
141
+ - 4. Media and formats; technical modifications allowed. The
142
+ Licensor authorizes You to exercise the Licensed Rights in all
143
+ media and formats whether now known or hereafter created, and to
144
+ make technical modifications necessary to do so. The Licensor
145
+ waives and/or agrees not to assert any right or authority to
146
+ forbid You from making technical modifications necessary to
147
+ exercise the Licensed Rights, including technical modifications
148
+ necessary to circumvent Effective Technological Measures. For
149
+ purposes of this Public License, simply making modifications
150
+ authorized by this Section 2(a)(4) never produces Adapted
151
+ Material.
152
+ - 5. Downstream recipients.
153
+ - A. Offer from the Licensor – Licensed Material. Every
154
+ recipient of the Licensed Material automatically receives an
155
+ offer from the Licensor to exercise the Licensed Rights
156
+ under the terms and conditions of this Public License.
157
+ - B. Additional offer from the Licensor – Adapted Material.
158
+ Every recipient of Adapted Material from You automatically
159
+ receives an offer from the Licensor to exercise the Licensed
160
+ Rights in the Adapted Material under the conditions of the
161
+ Adapter's License You apply.
162
+ - C. No downstream restrictions. You may not offer or impose
163
+ any additional or different terms or conditions on, or apply
164
+ any Effective Technological Measures to, the Licensed
165
+ Material if doing so restricts exercise of the Licensed
166
+ Rights by any recipient of the Licensed Material.
167
+ - 6. No endorsement. Nothing in this Public License constitutes or
168
+ may be construed as permission to assert or imply that You are,
169
+ or that Your use of the Licensed Material is, connected with, or
170
+ sponsored, endorsed, or granted official status by, the Licensor
171
+ or others designated to receive attribution as provided in
172
+ Section 3(a)(1)(A)(i).
173
+ - b. Other rights.
174
+ - 1. Moral rights, such as the right of integrity, are not
175
+ licensed under this Public License, nor are publicity, privacy,
176
+ and/or other similar personality rights; however, to the extent
177
+ possible, the Licensor waives and/or agrees not to assert any
178
+ such rights held by the Licensor to the limited extent necessary
179
+ to allow You to exercise the Licensed Rights, but not otherwise.
180
+ - 2. Patent and trademark rights are not licensed under this
181
+ Public License.
182
+ - 3. To the extent possible, the Licensor waives any right to
183
+ collect royalties from You for the exercise of the Licensed
184
+ Rights, whether directly or through a collecting society under
185
+ any voluntary or waivable statutory or compulsory licensing
186
+ scheme. In all other cases the Licensor expressly reserves any
187
+ right to collect such royalties, including when the Licensed
188
+ Material is used other than for NonCommercial purposes.
189
+
190
+ Section 3 – License Conditions.
191
+
192
+ Your exercise of the Licensed Rights is expressly made subject to the
193
+ following conditions.
194
+
195
+ - a. Attribution.
196
+ - 1. If You Share the Licensed Material (including in modified
197
+ form), You must:
198
+ - A. retain the following if it is supplied by the Licensor
199
+ with the Licensed Material:
200
+ - i. identification of the creator(s) of the Licensed
201
+ Material and any others designated to receive
202
+ attribution, in any reasonable manner requested by the
203
+ Licensor (including by pseudonym if designated);
204
+ - ii. a copyright notice;
205
+ - iii. a notice that refers to this Public License;
206
+ - iv. a notice that refers to the disclaimer of
207
+ warranties;
208
+ - v. a URI or hyperlink to the Licensed Material to the
209
+ extent reasonably practicable;
210
+
211
+ - B. indicate if You modified the Licensed Material and retain
212
+ an indication of any previous modifications; and
213
+ - C. indicate the Licensed Material is licensed under this
214
+ Public License, and include the text of, or the URI or
215
+ hyperlink to, this Public License.
216
+ - 2. You may satisfy the conditions in Section 3(a)(1) in any
217
+ reasonable manner based on the medium, means, and context in
218
+ which You Share the Licensed Material. For example, it may be
219
+ reasonable to satisfy the conditions by providing a URI or
220
+ hyperlink to a resource that includes the required information.
221
+ - 3. If requested by the Licensor, You must remove any of the
222
+ information required by Section 3(a)(1)(A) to the extent
223
+ reasonably practicable.
224
+ - b. ShareAlike.In addition to the conditions in Section 3(a), if You
225
+ Share Adapted Material You produce, the following conditions also
226
+ apply.
227
+ - 1. The Adapter's License You apply must be a Creative Commons
228
+ license with the same License Elements, this version or later,
229
+ or a BY-NC-SA Compatible License.
230
+ - 2. You must include the text of, or the URI or hyperlink to, the
231
+ Adapter's License You apply. You may satisfy this condition in
232
+ any reasonable manner based on the medium, means, and context in
233
+ which You Share Adapted Material.
234
+ - 3. You may not offer or impose any additional or different terms
235
+ or conditions on, or apply any Effective Technological Measures
236
+ to, Adapted Material that restrict exercise of the rights
237
+ granted under the Adapter's License You apply.
238
+
239
+ Section 4 – Sui Generis Database Rights.
240
+
241
+ Where the Licensed Rights include Sui Generis Database Rights that apply
242
+ to Your use of the Licensed Material:
243
+
244
+ - a. for the avoidance of doubt, Section 2(a)(1) grants You the right
245
+ to extract, reuse, reproduce, and Share all or a substantial portion
246
+ of the contents of the database for NonCommercial purposes only;
247
+ - b. if You include all or a substantial portion of the database
248
+ contents in a database in which You have Sui Generis Database
249
+ Rights, then the database in which You have Sui Generis Database
250
+ Rights (but not its individual contents) is Adapted Material,
251
+ including for purposes of Section 3(b); and
252
+ - c. You must comply with the conditions in Section 3(a) if You Share
253
+ all or a substantial portion of the contents of the database.
254
+ For the avoidance of doubt, this Section 4 supplements and does not
255
+ replace Your obligations under this Public License where the
256
+ Licensed Rights include other Copyright and Similar Rights.
257
+
258
+ Section 5 – Disclaimer of Warranties and Limitation of Liability.
259
+
260
+ - a. Unless otherwise separately undertaken by the Licensor, to the
261
+ extent possible, the Licensor offers the Licensed Material as-is and
262
+ as-available, and makes no representations or warranties of any kind
263
+ concerning the Licensed Material, whether express, implied,
264
+ statutory, or other. This includes, without limitation, warranties
265
+ of title, merchantability, fitness for a particular purpose,
266
+ non-infringement, absence of latent or other defects, accuracy, or
267
+ the presence or absence of errors, whether or not known or
268
+ discoverable. Where disclaimers of warranties are not allowed in
269
+ full or in part, this disclaimer may not apply to You.
270
+ - b. To the extent possible, in no event will the Licensor be liable
271
+ to You on any legal theory (including, without limitation,
272
+ negligence) or otherwise for any direct, special, indirect,
273
+ incidental, consequential, punitive, exemplary, or other losses,
274
+ costs, expenses, or damages arising out of this Public License or
275
+ use of the Licensed Material, even if the Licensor has been advised
276
+ of the possibility of such losses, costs, expenses, or damages.
277
+ Where a limitation of liability is not allowed in full or in part,
278
+ this limitation may not apply to You.
279
+ - c. The disclaimer of warranties and limitation of liability provided
280
+ above shall be interpreted in a manner that, to the extent possible,
281
+ most closely approximates an absolute disclaimer and waiver of all
282
+ liability.
283
+
284
+ Section 6 – Term and Termination.
285
+
286
+ - a. This Public License applies for the term of the Copyright and
287
+ Similar Rights licensed here. However, if You fail to comply with
288
+ this Public License, then Your rights under this Public License
289
+ terminate automatically.
290
+ - b. Where Your right to use the Licensed Material has terminated
291
+ under Section 6(a), it reinstates:
292
+
293
+ - 1. automatically as of the date the violation is cured, provided
294
+ it is cured within 30 days of Your discovery of the violation;
295
+ or
296
+ - 2. upon express reinstatement by the Licensor.
297
+
298
+ For the avoidance of doubt, this Section 6(b) does not affect any
299
+ right the Licensor may have to seek remedies for Your violations of
300
+ this Public License.
301
+
302
+ - c. For the avoidance of doubt, the Licensor may also offer the
303
+ Licensed Material under separate terms or conditions or stop
304
+ distributing the Licensed Material at any time; however, doing so
305
+ will not terminate this Public License.
306
+ - d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
307
+ License.
308
+
309
+ Section 7 – Other Terms and Conditions.
310
+
311
+ - a. The Licensor shall not be bound by any additional or different
312
+ terms or conditions communicated by You unless expressly agreed.
313
+ - b. Any arrangements, understandings, or agreements regarding the
314
+ Licensed Material not stated herein are separate from and
315
+ independent of the terms and conditions of this Public License.
316
+
317
+ Section 8 – Interpretation.
318
+
319
+ - a. For the avoidance of doubt, this Public License does not, and
320
+ shall not be interpreted to, reduce, limit, restrict, or impose
321
+ conditions on any use of the Licensed Material that could lawfully
322
+ be made without permission under this Public License.
323
+ - b. To the extent possible, if any provision of this Public License
324
+ is deemed unenforceable, it shall be automatically reformed to the
325
+ minimum extent necessary to make it enforceable. If the provision
326
+ cannot be reformed, it shall be severed from this Public License
327
+ without affecting the enforceability of the remaining terms and
328
+ conditions.
329
+ - c. No term or condition of this Public License will be waived and no
330
+ failure to comply consented to unless expressly agreed to by the
331
+ Licensor.
332
+ - d. Nothing in this Public License constitutes or may be interpreted
333
+ as a limitation upon, or waiver of, any privileges and immunities
334
+ that apply to the Licensor or You, including from the legal
335
+ processes of any jurisdiction or authority.
336
+
337
+ Creative Commons is not a party to its public licenses. Notwithstanding,
338
+ Creative Commons may elect to apply one of its public licenses to
339
+ material it publishes and in those instances will be considered the
340
+ "Licensor." The text of the Creative Commons public licenses is
341
+ dedicated to the public domain under the CC0 Public Domain Dedication.
342
+ Except for the limited purpose of indicating that material is shared
343
+ under a Creative Commons public license or as otherwise permitted by the
344
+ Creative Commons policies published at creativecommons.org/policies,
345
+ Creative Commons does not authorize the use of the trademark "Creative
346
+ Commons" or any other trademark or logo of Creative Commons without its
347
+ prior written consent including, without limitation, in connection with
348
+ any unauthorized modifications to any of its public licenses or any
349
+ other arrangements, understandings, or agreements concerning use of
350
+ licensed material. For the avoidance of doubt, this paragraph does not
351
+ form part of the public licenses.
352
+
353
+ Creative Commons may be contacted at creativecommons.org.
LSTM_Model_1.py ADDED
@@ -0,0 +1,296 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #############################################################
2
+ # 10 locations/ trajectory
3
+ # First location is known
4
+ # Date: 8 June 2018
5
+ #############################################################
6
+
7
+ import time
8
+ import tensorflow
9
+ # import lstm
10
+ import os
11
+ #import time
12
+ import warnings
13
+ import numpy as np
14
+ from numpy import newaxis
15
+ from keras.layers import TimeDistributed
16
+ from keras.layers.core import Dense, Activation, Dropout
17
+ from keras.layers.recurrent import LSTM
18
+ from keras.models import Sequential
19
+ # import matplotlib.pyplot as plt
20
+ from matplotlib import pyplot
21
+ import pandas as pd
22
+ import math
23
+ from pandas import DataFrame
24
+ from keras import backend as K
25
+
26
+ def rmse(y_true, y_pred):
27
+ return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
28
+ def err_absolute(y_true, y_pred):
29
+ err = K.sqrt(K.square(y_pred - y_true))
30
+ return err
31
+
32
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
33
+ warnings.filterwarnings("ignore") #Hide messy Numpy warnings
34
+
35
+ # Open File to write results
36
+ File1 = open('Error_Model1_NoFilter.csv','w')
37
+ File2 = open('Traj_Model1_NoFilter.csv','w')
38
+ # File3 = open('Loss_PredictedL_50k_NoFilter.csv','w')
39
+ # File4 = open('Output_PredictedL_50k_NoFilter.csv','w')
40
+
41
+ NumTotalTraj = 120000
42
+
43
+ # Configure the sampling set -----------------
44
+ epochs_num = 0
45
+ NumTrajTraining = 20000
46
+ NumSam_PerTraj = 1
47
+ StartingTraj = 0
48
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
49
+ NumSample = NumTrajTraining*NumSam_PerTraj # NumTraj = NumSample/NumSam_PerTraj
50
+
51
+ # Configure the validation set -----------------
52
+ NumTrajVal = 10000
53
+ StartingTrajVal = 100000
54
+
55
+ StartingValidation = StartingTrajVal*NumSam_PerTraj
56
+ NumValidation = NumTrajVal*NumSam_PerTraj
57
+
58
+ # Main Run Thread
59
+ global_start_time = time.time()
60
+ num_rssi_reading = 11
61
+ output_layer = 2
62
+ timestep = 10
63
+ input_layer = num_rssi_reading*timestep # X, Y & 11 MAC Addresses
64
+ NumBatch = 256
65
+ hidden_layer1 = 100
66
+ hidden_layer2 = 100
67
+
68
+ ##########################################################
69
+ ###### Test Trajectory Splitting #########################
70
+ ##########################################################
71
+ # Read Training data -------5-------------------------------------------
72
+ print('> Loading data... ')
73
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/InputRNN_10points_120k_RSSI.csv')
74
+ filename = os.path.abspath(os.path.realpath(filename))
75
+ df1=pd.read_csv(filename)
76
+ df1= np.asarray(df1)
77
+ df1_split = df1[StartingSample:StartingSample+NumSample,:]
78
+ input_training = df1_split.reshape(len(df1_split),1,input_layer)
79
+ print(input_training.shape)
80
+
81
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Output_10point_RSSI_Model1.csv')
82
+ filename = os.path.abspath(os.path.realpath(filename))
83
+ df2=pd.read_csv(filename)
84
+ df2= np.asarray(df2)
85
+ df2_split = df2[StartingSample:StartingSample+NumSample,:]
86
+ output_training = df2_split.reshape(len(df2_split),1,output_layer)
87
+ print(output_training.shape)
88
+
89
+ ########################################################################
90
+ # Read Validation data --------------------------------------------------
91
+ #######################################################################
92
+ df3= df1[StartingValidation:StartingValidation+NumValidation,:]
93
+ input_validation = df3.reshape(len(df3),1,input_layer)
94
+ print(input_validation.shape)
95
+
96
+ df4=df2[StartingValidation:StartingValidation+NumValidation,:]
97
+ output_validation = df4.reshape(len(df4),1,output_layer)
98
+ print(output_validation.shape)
99
+
100
+ ####################################################################
101
+ # Testing data ------------------------
102
+ ####################################################################
103
+ # RSSI array
104
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_6July_v1.csv')
105
+ filename = os.path.abspath(os.path.realpath(filename))
106
+ TestData_origin =pd.read_csv(filename)
107
+ print(TestData_origin.shape)
108
+ TestData_origin = np.asarray(TestData_origin)
109
+
110
+ LengthTest = len(TestData_origin)
111
+ StartTestIdx = 0
112
+ StopTestIdx = StartTestIdx + LengthTest
113
+
114
+ # print(TestData.shape)
115
+ TestData = TestData_origin[StartTestIdx:StopTestIdx, :]
116
+ # LengthTest = len(TestData)
117
+
118
+ # List of Locations
119
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_Location.csv')
120
+ filename = os.path.abspath(os.path.realpath(filename))
121
+ Location_origin = pd.read_csv(filename)
122
+ print(Location_origin.shape)
123
+ Location_origin = np.asarray(Location_origin)
124
+ Location = Location_origin[StartTestIdx:StopTestIdx, :]
125
+
126
+ print('> Data Loaded. Compiling...')
127
+
128
+ ####################################################################
129
+ # Init 9 first Location ###########################################
130
+ ####################################################################
131
+ L1 = Location[0,:]
132
+ L2 = Location[0,:]
133
+ L3 = Location[0,:]
134
+ L4 = Location[0,:]
135
+ L5 = Location[0,:]
136
+ L6 = Location[0,:]
137
+ L7 = Location[0,:]
138
+ L8 = Location[0,:]
139
+ L9 = Location[0,:]
140
+ L_combine = np.concatenate((L1,L2,L3,L4,L5,L6,L7,L8,L9), axis=0)
141
+
142
+ # Take RSSI
143
+ RSSI_L1 = TestData[0,:]
144
+ RSSI_L2 = TestData[1,:]
145
+ RSSI_L3 = TestData[1,:]
146
+ RSSI_L4 = TestData[1,:]
147
+ RSSI_L5 = TestData[1,:]
148
+ RSSI_L6 = TestData[1,:]
149
+ RSSI_L7 = TestData[1,:]
150
+ RSSI_L8 = TestData[1,:]
151
+ RSSI_L9 = TestData[1,:]
152
+ RSSI_L10 = TestData[1,:]
153
+ RSSI_combine = np.concatenate((RSSI_L1, RSSI_L2, RSSI_L3, RSSI_L4, RSSI_L5,RSSI_L6, RSSI_L7, RSSI_L8, RSSI_L9, RSSI_L10), axis=0)
154
+
155
+ # Build the network --------------------------------------------
156
+ model1 = Sequential()
157
+ model1.add(LSTM(hidden_layer1, input_shape=(1, input_layer), return_sequences=True))
158
+ model1.add(Dropout(0.2))
159
+ model1.add(LSTM(hidden_layer2,return_sequences=True))
160
+ model1.add(Dropout(0.2))
161
+ model1.add(TimeDistributed(Dense(output_layer)))
162
+ model1.summary()
163
+ start = time.time()
164
+ model1.compile(loss="mse", optimizer="adam",metrics=[rmse])
165
+ # model1.compile(loss=rmse, optimizer="adam",metrics=[err_absolute]) #rmse
166
+ # root_mean_squared_error
167
+ print("> Compilation Time : ", time.time() - start)
168
+
169
+ # Training --------------------------------------------------------
170
+ loss = list()
171
+ ResultPlot = DataFrame()
172
+
173
+ #### Create a copy of input training ##########################
174
+ input_training_org = input_training
175
+ input_validation_org = input_validation
176
+ ###############################################################
177
+ model1.load_weights("lstm_20k_model1.h5")
178
+ StartingValidation = 0
179
+ if epochs_num > 0:
180
+ # iTimeStep = 1
181
+ # iTimeStep_val = 1
182
+ for ep in xrange(epochs_num):
183
+ print("Iteration {} ----- ".format(ep))
184
+
185
+ # Fit Model
186
+ # hist = model1.fit(input_training,output_training, validation_data=(input_validation, output_validation), epochs=1, batch_size=NumBatch, verbose=1)
187
+ hist = model1.fit(input_training,output_training, epochs=1, batch_size=NumBatch, verbose=1)
188
+ loss.append(hist.history['loss'][0])
189
+
190
+ # model1.fit(input_training,output_training, validation_split=0.2, epochs=epochs_num, batch_size=512)
191
+ model1.save_weights("lstm_20k_model1.h5")
192
+ print('Training duration (s) : ', time.time() - global_start_time)
193
+ else:
194
+ print("TESTING...........")
195
+ pass
196
+
197
+ # Testing ---------------------------------------------------------
198
+ #########################################################
199
+ # The first steps: Fill up the buffer
200
+ # #######################################################
201
+ CountTest = 0
202
+ Average_Err = 0
203
+ error = np.zeros(LengthTest)
204
+
205
+ for Step in xrange(timestep):
206
+ print("Location {}------------".format(CountTest))
207
+ # Update the Locations & RSSIs buffer
208
+ RSSIdx = 0
209
+ TestingData = np.zeros(len(RSSI_combine))
210
+ for i in xrange(timestep):
211
+ for j in xrange(len(RSSI_L2)): # Repeat the first location
212
+ TestingData[j+RSSIdx] = (float(RSSI_combine[j+RSSIdx])+100)/100
213
+ RSSIdx = RSSIdx + num_rssi_reading
214
+
215
+ TestingData = TestingData.reshape(1,1,input_layer)
216
+ # Prediction
217
+ Predicted_L = model1.predict(TestingData)
218
+ Final_L = Predicted_L[0,0,:]
219
+ # print(Final_L)
220
+
221
+ Correct_L = Location[CountTest,:]
222
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
223
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
224
+ Average_Err = Average_Err + error[CountTest]
225
+ CountTest = CountTest + 1
226
+
227
+ # Take RSSI
228
+ IdxTemp = 0
229
+ for j in xrange(Step*num_rssi_reading,len(RSSI_combine)):
230
+ RSSI_combine[j] = TestData[Step+1,IdxTemp]
231
+ IdxTemp = IdxTemp + 1
232
+ if IdxTemp == num_rssi_reading: # Reach the end
233
+ IdxTemp = 0
234
+
235
+ #########################################################
236
+ # AFter the buffer is full -
237
+ # #######################################################
238
+
239
+ while CountTest < LengthTest-1:
240
+ print("Location {}------------".format(CountTest))
241
+ # Update the Locations & RSSIs buffer
242
+ RSSIdx = 0
243
+ TestingData = np.zeros(len(RSSI_combine))
244
+ for i in xrange(timestep):
245
+ for j in xrange(len(RSSI_L2)):
246
+ TestingData[j+RSSIdx] = (float(RSSI_combine[j+RSSIdx])+100)/100
247
+ RSSIdx = RSSIdx + num_rssi_reading
248
+
249
+ TestingData = TestingData.reshape(1,1,input_layer)
250
+
251
+ # Prediction
252
+ Predicted_L = model1.predict(TestingData)
253
+ Final_L = Predicted_L[0,0,:]
254
+
255
+ Correct_L = Location[CountTest,:]
256
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
257
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
258
+ Average_Err = Average_Err + error[CountTest]
259
+
260
+ File1.write(str(error[CountTest]) + ' , ') # write to file
261
+ File2.write(str(Final_L[0]) + ',')
262
+ File2.write(str(Final_L[1]) + '\n') # write to file
263
+
264
+ # Re-arrange RSSI_combine
265
+ for t in xrange(timestep-1):
266
+ for k in xrange(num_rssi_reading):
267
+ RSSI_combine[t*num_rssi_reading+k] = RSSI_combine[(t+1)*num_rssi_reading+k]
268
+
269
+ for k in xrange(num_rssi_reading):
270
+ RSSI_combine[(timestep-1)*num_rssi_reading+k] = TestData[CountTest+1,k]
271
+
272
+ CountTest = CountTest + 1
273
+ ###################################################################
274
+ ############### The Last Locations ###############################
275
+ ###################################################################
276
+
277
+ Average_Err = Average_Err/LengthTest
278
+ print "Average Error: ", Average_Err
279
+
280
+ Std_Err = 0
281
+ for k in xrange(LengthTest):
282
+ Std_Err = Std_Err + np.power((error[k] - Average_Err),2)
283
+ Std_Err = Std_Err/(LengthTest-1)
284
+ Std_Err = np.sqrt(Std_Err)
285
+ print "Std: ", Std_Err
286
+
287
+ #### Show Figure ######################
288
+ #if epochs_num > 0:
289
+ # ResultPlot['neurons_500'] = loss
290
+ # ResultPlot.plot()
291
+ # pyplot.show()
292
+
293
+ File1.close()
294
+ File2.close()
295
+ # File3.close()
296
+ # File4.close()
LSTM_Model_2.py ADDED
@@ -0,0 +1,325 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #############################################################
2
+ # 10 locations/ trajectory
3
+ # First location is known
4
+ # Date: 8 June 2018
5
+ #############################################################
6
+
7
+ import time
8
+ import tensorflow
9
+ # import lstm
10
+ import os
11
+ #import time
12
+ import warnings
13
+ import numpy as np
14
+ from numpy import newaxis
15
+ from keras.layers import TimeDistributed
16
+ from keras.layers.core import Dense, Activation, Dropout
17
+ from keras.layers.recurrent import LSTM
18
+ from keras.models import Sequential
19
+ # import matplotlib.pyplot as plt
20
+ from matplotlib import pyplot
21
+ import pandas as pd
22
+ import math
23
+ from pandas import DataFrame
24
+ from keras import backend as K
25
+
26
+ def rmse(y_true, y_pred):
27
+ return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
28
+ def err_absolute(y_true, y_pred):
29
+ err = K.sqrt(K.square(y_pred - y_true))
30
+ return err
31
+
32
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
33
+ warnings.filterwarnings("ignore") #Hide messy Numpy warnings
34
+
35
+ # Open File to write results
36
+ File1 = open('Error_Model2.csv','w')
37
+ File2 = open('Traj_Model2.csv','w')
38
+ # File3 = open('Loss_PredictedL_50k_NoFilter.csv','w')
39
+ # File4 = open('Output_PredictedL_50k_NoFilter.csv','w')
40
+
41
+ NumTotalTraj = 120000
42
+
43
+ # Configure the sampling set -----------------
44
+ epochs_num = 300
45
+ NumTrajTraining = 40000
46
+ NumSam_PerTraj = 1
47
+ StartingTraj = 30000
48
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
49
+ NumSample = NumTrajTraining*NumSam_PerTraj # NumTraj = NumSample/NumSam_PerTraj
50
+
51
+ # Configure the validation set -----------------
52
+ NumTrajVal = 10000
53
+ StartingTrajVal = 100000
54
+
55
+ StartingValidation = StartingTrajVal*NumSam_PerTraj
56
+ NumValidation = NumTrajVal*NumSam_PerTraj
57
+
58
+ # Main Run Thread
59
+ global_start_time = time.time()
60
+ num_rssi_reading = 11
61
+ num_out_location = 2
62
+ output_layer = 2
63
+ timestep = 9
64
+ input_layer = (num_rssi_reading + num_out_location)*timestep # X, Y & 11 MAC Addresses
65
+ NumBatch = 256
66
+ hidden_layer1 = 100
67
+ hidden_layer2 = 100
68
+
69
+ ##########################################################
70
+ ###### Test Trajectory Splitting #########################
71
+ ##########################################################
72
+ # Read Training data -------5-------------------------------------------
73
+ print('> Loading data... ')
74
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Input_Location_RSSI_10points_365k.csv')
75
+ filename = os.path.abspath(os.path.realpath(filename))
76
+ df1=pd.read_csv(filename)
77
+ df1= np.asarray(df1)
78
+ df1_split = df1[StartingSample:StartingSample+NumSample,:]
79
+ input_training = df1_split.reshape(len(df1_split),1,input_layer)
80
+ print(input_training.shape)
81
+
82
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Output_Location_RSSI_10points_365k_Model2.csv')
83
+ filename = os.path.abspath(os.path.realpath(filename))
84
+ df2=pd.read_csv(filename)
85
+ df2= np.asarray(df2)
86
+ df2_split = df2[StartingSample:StartingSample+NumSample,:]
87
+ output_training = df2_split.reshape(len(df2_split),1,output_layer)
88
+ print(output_training.shape)
89
+
90
+ ########################################################################
91
+ # Read Validation data --------------------------------------------------
92
+ #######################################################################
93
+ df3= df1[StartingValidation:StartingValidation+NumValidation,:]
94
+ input_validation = df3.reshape(len(df3),1,input_layer)
95
+ print(input_validation.shape)
96
+
97
+ df4=df2[StartingValidation:StartingValidation+NumValidation,:]
98
+ output_validation = df4.reshape(len(df4),1,output_layer)
99
+ print(output_validation.shape)
100
+
101
+ ####################################################################
102
+ # Testing data ------------------------
103
+ ####################################################################
104
+ # RSSI array
105
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_6July_v1_AverageFilter.csv')
106
+ filename = os.path.abspath(os.path.realpath(filename))
107
+ TestData_origin =pd.read_csv(filename)
108
+ print(TestData_origin.shape)
109
+ TestData_origin = np.asarray(TestData_origin)
110
+
111
+ LengthTest = len(TestData_origin)
112
+ StartTestIdx = 0
113
+ StopTestIdx = StartTestIdx + LengthTest
114
+
115
+ # print(TestData.shape)
116
+ TestData = TestData_origin[StartTestIdx:StopTestIdx, :]
117
+ # LengthTest = len(TestData)
118
+
119
+ # List of Locations
120
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_Location.csv')
121
+ filename = os.path.abspath(os.path.realpath(filename))
122
+ Location_origin = pd.read_csv(filename)
123
+ print(Location_origin.shape)
124
+ Location_origin = np.asarray(Location_origin)
125
+ Location = Location_origin[StartTestIdx:StopTestIdx, :]
126
+
127
+ print('> Data Loaded. Compiling...')
128
+
129
+ ####################################################################
130
+ # Init 9 first Location ###########################################
131
+ ####################################################################
132
+ L1 = Location[0,:]
133
+ L2 = Location[0,:]
134
+ L3 = Location[0,:]
135
+ L4 = Location[0,:]
136
+ L5 = Location[0,:]
137
+ L6 = Location[0,:]
138
+ L7 = Location[0,:]
139
+ L8 = Location[0,:]
140
+ L9 = Location[0,:]
141
+ L10 = Location[0,:]
142
+ L_combine = np.concatenate((L1,L2,L3,L4,L5,L6,L7,L8,L9), axis=0)
143
+
144
+ # Take RSSI
145
+ RSSI_L1 = TestData[0,:]
146
+ RSSI_L2 = TestData[1,:]
147
+ RSSI_L3 = TestData[1,:]
148
+ RSSI_L4 = TestData[1,:]
149
+ RSSI_L5 = TestData[1,:]
150
+ RSSI_L6 = TestData[1,:]
151
+ RSSI_L7 = TestData[1,:]
152
+ RSSI_L8 = TestData[1,:]
153
+ RSSI_L9 = TestData[1,:]
154
+ RSSI_L10 = TestData[1,:]
155
+ RSSI_combine = np.concatenate((RSSI_L2, RSSI_L3, RSSI_L4, RSSI_L5,RSSI_L6, RSSI_L7, RSSI_L8, RSSI_L9, RSSI_L10), axis=0)
156
+
157
+ # Build the network --------------------------------------------
158
+ model1 = Sequential()
159
+ model1.add(LSTM(hidden_layer1, input_shape=(1, input_layer), return_sequences=True))
160
+ model1.add(Dropout(0.2))
161
+ model1.add(LSTM(hidden_layer2,return_sequences=True))
162
+ model1.add(Dropout(0.2))
163
+ model1.add(TimeDistributed(Dense(output_layer)))
164
+ model1.summary()
165
+ start = time.time()
166
+ model1.compile(loss="mse", optimizer="adam",metrics=[rmse])
167
+ # model1.compile(loss=rmse, optimizer="adam",metrics=[err_absolute]) #rmse
168
+ # root_mean_squared_error
169
+ print("> Compilation Time : ", time.time() - start)
170
+
171
+ # Training --------------------------------------------------------
172
+ loss = list()
173
+ ResultPlot = DataFrame()
174
+
175
+ #### Create a copy of input training ##########################
176
+ input_training_org = input_training
177
+ input_validation_org = input_validation
178
+ ###############################################################
179
+ model1.load_weights("lstm_20k_model2.h5")
180
+ StartingValidation = 0
181
+ if epochs_num > 0:
182
+ # iTimeStep = 1
183
+ # iTimeStep_val = 1
184
+ for ep in xrange(epochs_num):
185
+ print("Iteration {} ----- ".format(ep))
186
+
187
+ # Fit Model
188
+ # hist = model1.fit(input_training,output_training, validation_data=(input_validation, output_validation), epochs=1, batch_size=NumBatch, verbose=1)
189
+ hist = model1.fit(input_training,output_training, epochs=1, batch_size=NumBatch, verbose=1)
190
+ loss.append(hist.history['loss'][0])
191
+
192
+ # model1.fit(input_training,output_training, validation_split=0.2, epochs=epochs_num, batch_size=512)
193
+ model1.save_weights("lstm_20k_model2.h5")
194
+ print('Training duration (s) : ', time.time() - global_start_time)
195
+ else:
196
+ print("TESTING...........")
197
+ pass
198
+
199
+ # Testing ---------------------------------------------------------
200
+ #########################################################
201
+ # The first steps: Fill up the buffer
202
+ # #######################################################
203
+ CountTest = 0
204
+ Average_Err = 0
205
+ error = np.zeros(LengthTest)
206
+
207
+ for Step in xrange(timestep):
208
+ print("Location {}------------".format(CountTest))
209
+ # Update the Locations & RSSIs buffer
210
+ # Update the Locations & RSSIs buffer
211
+ LocationIdx = 0
212
+ RSSIdx = 0
213
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
214
+ for i in xrange(timestep):
215
+ TestingData[LocationIdx] = L_combine[i*2]
216
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
217
+ for j in xrange(len(RSSI_L2)):
218
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
219
+ LocationIdx = LocationIdx + num_rssi_reading + 2
220
+ RSSIdx = RSSIdx + num_rssi_reading
221
+ # print(TestingData)
222
+
223
+ TestingData = TestingData.reshape(1,1,input_layer)
224
+ # Prediction
225
+ Predicted_L = model1.predict(TestingData)
226
+ Final_L = Predicted_L[0,0,:]
227
+
228
+ Correct_L = Location[CountTest,:]
229
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
230
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
231
+ Average_Err = Average_Err + error[CountTest]
232
+ CountTest = CountTest + 1
233
+
234
+ IdxTemp = 0
235
+ # Update Location
236
+ for j in xrange(Step*2,len(L_combine)):
237
+ L_combine[j] = Final_L[IdxTemp]
238
+ if IdxTemp == 0:
239
+ IdxTemp = 1
240
+ else:
241
+ IdxTemp = 0
242
+ pass
243
+
244
+ # Take RSSI
245
+ IdxTemp = 0
246
+ for j in xrange(Step*num_rssi_reading,len(RSSI_combine)):
247
+ RSSI_combine[j] = TestData[Step+1,IdxTemp]
248
+ IdxTemp = IdxTemp + 1
249
+ if IdxTemp == num_rssi_reading: # Reach the end
250
+ IdxTemp = 0
251
+
252
+ #########################################################
253
+ # AFter the buffer is full -
254
+ # #######################################################
255
+
256
+ while CountTest < LengthTest-1:
257
+ print("Location {}------------".format(CountTest))
258
+ # Update the Locations & RSSIs buffer
259
+ LocationIdx = 0
260
+ RSSIdx = 0
261
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
262
+ for i in xrange(timestep):
263
+ TestingData[LocationIdx] = L_combine[i*2]
264
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
265
+ for j in xrange(len(RSSI_L2)):
266
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
267
+ LocationIdx = LocationIdx + num_rssi_reading + 2
268
+ RSSIdx = RSSIdx + num_rssi_reading
269
+
270
+ TestingData = TestingData.reshape(1,1,input_layer)
271
+
272
+ # Prediction
273
+ Predicted_L = model1.predict(TestingData)
274
+ Final_L = Predicted_L[0,0,:]
275
+
276
+ Correct_L = Location[CountTest,:]
277
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
278
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
279
+ Average_Err = Average_Err + error[CountTest]
280
+
281
+ File1.write(str(error[CountTest]) + ' , ') # write to file
282
+ File2.write(str(Final_L[0]) + ',')
283
+ File2.write(str(Final_L[1]) + '\n') # write to file
284
+
285
+ # Re-arrange L_combine
286
+ for t in xrange(timestep-1):
287
+ L_combine[t*2] = L_combine[(t+1)*2]
288
+ L_combine[t*2+1] = L_combine[(t+1)*2+1]
289
+ # Update
290
+ L_combine[(timestep-1)*2] = Final_L[0]
291
+ L_combine[(timestep-1)*2+1] = Final_L[1]
292
+
293
+ # Re-arrange RSSI_combine
294
+ for t in xrange(timestep-1):
295
+ for k in xrange(num_rssi_reading):
296
+ RSSI_combine[t*num_rssi_reading+k] = RSSI_combine[(t+1)*num_rssi_reading+k]
297
+
298
+ for k in xrange(num_rssi_reading):
299
+ RSSI_combine[(timestep-1)*num_rssi_reading+k] = TestData[CountTest+1,k]
300
+
301
+ CountTest = CountTest + 1
302
+ ###################################################################
303
+ ############### The Last Locations ###############################
304
+ ###################################################################
305
+
306
+ Average_Err = Average_Err/LengthTest
307
+ print "Average Error: ", Average_Err
308
+
309
+ Std_Err = 0
310
+ for k in xrange(LengthTest):
311
+ Std_Err = Std_Err + np.power((error[k] - Average_Err),2)
312
+ Std_Err = Std_Err/(LengthTest-1)
313
+ Std_Err = np.sqrt(Std_Err)
314
+ print "Std: ", Std_Err
315
+
316
+ #### Show Figure ######################
317
+ if epochs_num > 0:
318
+ ResultPlot['neurons_500'] = loss
319
+ ResultPlot.plot()
320
+ pyplot.show()
321
+
322
+ File1.close()
323
+ File2.close()
324
+ # File3.close()
325
+ # File4.close()
LSTM_Model_3.py ADDED
@@ -0,0 +1,323 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #############################################################
2
+ # 10 locations/ trajectory
3
+ # First location is known
4
+ # Date: 8 June 2018
5
+ #############################################################
6
+
7
+ import time
8
+ import tensorflow
9
+ # import lstm
10
+ import os
11
+ #import time
12
+ import warnings
13
+ import numpy as np
14
+ from numpy import newaxis
15
+ from keras.layers import TimeDistributed
16
+ from keras.layers.core import Dense, Activation, Dropout
17
+ from keras.layers.recurrent import LSTM
18
+ from keras.models import Sequential
19
+ # import matplotlib.pyplot as plt
20
+ from matplotlib import pyplot
21
+ import pandas as pd
22
+ import math
23
+ from pandas import DataFrame
24
+ from keras import backend as K
25
+
26
+ def rmse(y_true, y_pred):
27
+ return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
28
+ def err_absolute(y_true, y_pred):
29
+ err = K.sqrt(K.square(y_pred - y_true))
30
+ return err
31
+
32
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
33
+ warnings.filterwarnings("ignore") #Hide messy Numpy warnings
34
+
35
+ # Open File to write results
36
+ File1 = open('Error_Model3.csv','w')
37
+ File2 = open('Traj_Model3.csv','w')
38
+ # File3 = open('Loss_PredictedL_50k_NoFilter.csv','w')
39
+ # File4 = open('Output_PredictedL_50k_NoFilter.csv','w')
40
+
41
+ NumTotalTraj = 120000
42
+
43
+ # Configure the sampling set -----------------
44
+ epochs_num = 0
45
+ NumTrajTraining = 20000
46
+ NumSam_PerTraj = 1
47
+ StartingTraj = 20000
48
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
49
+ NumSample = NumTrajTraining*NumSam_PerTraj # NumTraj = NumSample/NumSam_PerTraj
50
+
51
+ # Configure the validation set -----------------
52
+ NumTrajVal = 10000
53
+ StartingTrajVal = 100000
54
+
55
+ StartingValidation = StartingTrajVal*NumSam_PerTraj
56
+ NumValidation = NumTrajVal*NumSam_PerTraj
57
+
58
+ # Main Run Thread
59
+ global_start_time = time.time()
60
+ input_layer = 11 # X, Y & 11 MAC Addresses
61
+ num_rssi_reading = 11
62
+ output_layer = 2
63
+ timestep = 10
64
+ NumBatch = 256
65
+ hidden_layer1 = 100
66
+ hidden_layer2 = 100
67
+
68
+ ##########################################################
69
+ ###### Test Trajectory Splitting #########################
70
+ ##########################################################
71
+ # Read Training data -------5-------------------------------------------
72
+ print('> Loading data... ')
73
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/InputRNN_10points_120k_RSSI.csv')
74
+ filename = os.path.abspath(os.path.realpath(filename))
75
+ df1=pd.read_csv(filename)
76
+ df1= np.asarray(df1)
77
+ df1_split = df1[StartingSample:StartingSample+NumSample,:]
78
+ input_training = df1_split.reshape(len(df1_split),timestep,input_layer)
79
+ print(input_training.shape)
80
+
81
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Traj_10points_120k_ModelTest.csv')
82
+ filename = os.path.abspath(os.path.realpath(filename))
83
+ df2=pd.read_csv(filename)
84
+ df2= np.asarray(df2)
85
+ df2_split = df2[StartingSample:StartingSample+NumSample,:]
86
+ output_training = df2_split.reshape(len(df2_split),timestep,output_layer)
87
+ print(output_training.shape)
88
+
89
+ ########################################################################
90
+ # Read Validation data --------------------------------------------------
91
+ #######################################################################
92
+ df3= df1[StartingValidation:StartingValidation+NumValidation,:]
93
+ input_validation = df3.reshape(len(df3),timestep,input_layer)
94
+ print(input_validation.shape)
95
+
96
+ df4=df2[StartingValidation:StartingValidation+NumValidation,:]
97
+ output_validation = df4.reshape(len(df4),timestep,output_layer)
98
+ print(output_validation.shape)
99
+
100
+ ####################################################################
101
+ # Testing data ------------------------
102
+ ####################################################################
103
+ # RSSI array
104
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_6July_v1_AverageFilter.csv')
105
+ filename = os.path.abspath(os.path.realpath(filename))
106
+ TestData_origin =pd.read_csv(filename)
107
+ print(TestData_origin.shape)
108
+ TestData_origin = np.asarray(TestData_origin)
109
+
110
+ LengthTest = len(TestData_origin)
111
+ StartTestIdx = 0
112
+ StopTestIdx = StartTestIdx + LengthTest
113
+
114
+ # print(TestData.shape)
115
+ TestData = TestData_origin[StartTestIdx:StopTestIdx, :]
116
+ # LengthTest = len(TestData)
117
+
118
+ # List of Locations
119
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_Location.csv')
120
+ filename = os.path.abspath(os.path.realpath(filename))
121
+ Location_origin = pd.read_csv(filename)
122
+ print(Location_origin.shape)
123
+ Location_origin = np.asarray(Location_origin)
124
+ Location = Location_origin[StartTestIdx:StopTestIdx, :]
125
+
126
+ print('> Data Loaded. Compiling...')
127
+
128
+ ####################################################################
129
+ # Init 9 first Location ###########################################
130
+ ####################################################################
131
+ L1 = Location[0,:]
132
+ L2 = Location[0,:]
133
+ L3 = Location[0,:]
134
+ L4 = Location[0,:]
135
+ L5 = Location[0,:]
136
+ L6 = Location[0,:]
137
+ L7 = Location[0,:]
138
+ L8 = Location[0,:]
139
+ L9 = Location[0,:]
140
+ L_combine = np.concatenate((L1,L2,L3,L4,L5,L6,L7,L8,L9), axis=0)
141
+
142
+ # Take RSSI
143
+ RSSI_L1 = TestData[0,:]
144
+ RSSI_L2 = TestData[1,:]
145
+ RSSI_L3 = TestData[1,:]
146
+ RSSI_L4 = TestData[1,:]
147
+ RSSI_L5 = TestData[1,:]
148
+ RSSI_L6 = TestData[1,:]
149
+ RSSI_L7 = TestData[1,:]
150
+ RSSI_L8 = TestData[1,:]
151
+ RSSI_L9 = TestData[1,:]
152
+ RSSI_L10 = TestData[1,:]
153
+ RSSI_combine = np.concatenate((RSSI_L1, RSSI_L2, RSSI_L3, RSSI_L4, RSSI_L5,RSSI_L6, RSSI_L7, RSSI_L8, RSSI_L9, RSSI_L10), axis=0)
154
+
155
+ # Build the network --------------------------------------------
156
+ model1 = Sequential()
157
+ model1.add(LSTM(hidden_layer1, input_shape=(timestep, input_layer), return_sequences=True))
158
+ model1.add(Dropout(0.2))
159
+ model1.add(LSTM(hidden_layer2,return_sequences=True))
160
+ model1.add(Dropout(0.2))
161
+ model1.add(TimeDistributed(Dense(output_layer)))
162
+ model1.summary()
163
+ start = time.time()
164
+ model1.compile(loss="mse", optimizer="adam",metrics=[rmse])
165
+ # model1.compile(loss=rmse, optimizer="adam",metrics=[err_absolute]) #rmse
166
+ # root_mean_squared_error
167
+ print("> Compilation Time : ", time.time() - start)
168
+
169
+ # Training --------------------------------------------------------
170
+ loss = list()
171
+ ResultPlot = DataFrame()
172
+
173
+ #### Create a copy of input training ##########################
174
+ input_training_org = input_training
175
+ input_validation_org = input_validation
176
+ ###############################################################
177
+ model1.load_weights("lstm_20k_model3.h5")
178
+ StartingValidation = 0
179
+ if epochs_num > 0:
180
+ # iTimeStep = 1
181
+ # iTimeStep_val = 1
182
+ for ep in xrange(epochs_num):
183
+ print("Iteration {} ----- ".format(ep))
184
+
185
+ # Fit Model
186
+ # hist = model1.fit(input_training,output_training, validation_data=(input_validation, output_validation), epochs=1, batch_size=NumBatch, verbose=1)
187
+ hist = model1.fit(input_training,output_training, epochs=1, batch_size=NumBatch, verbose=1)
188
+ loss.append(hist.history['loss'][0])
189
+
190
+ # model1.fit(input_training,output_training, validation_split=0.2, epochs=epochs_num, batch_size=512)
191
+ model1.save_weights("lstm_20k_model3.h5")
192
+ print('Training duration (s) : ', time.time() - global_start_time)
193
+ else:
194
+ print("TESTING...........")
195
+ pass
196
+
197
+ # Testing ---------------------------------------------------------
198
+ #########################################################
199
+ # The first steps: Fill up the buffer
200
+ # #######################################################
201
+ Acc_Location = np.zeros((timestep,2))
202
+ Acc_Location[0,:] = L1
203
+
204
+ for Step in xrange(1,timestep):
205
+
206
+ # Update the Locations & RSSIs buffer
207
+ RSSIdx = 0
208
+ TestingData = np.zeros(len(RSSI_combine))
209
+ for i in xrange(timestep):
210
+ for j in xrange(len(RSSI_L2)): # Repeat the first location
211
+ TestingData[j+RSSIdx] = (float(RSSI_combine[j+RSSIdx])+100)/100
212
+ RSSIdx = RSSIdx + num_rssi_reading
213
+
214
+ TestingData = TestingData.reshape(1,timestep,input_layer)
215
+ # print(TestingData)
216
+
217
+ # Prediction
218
+ Predicted_L = model1.predict(TestingData)
219
+ Acc_Location[Step,:] = Predicted_L[0,Step,:]
220
+ print(Acc_Location)
221
+
222
+ # Take RSSI
223
+ IdxTemp = 0
224
+ for j in xrange(Step*num_rssi_reading,len(RSSI_combine)):
225
+ RSSI_combine[j] = TestData[Step+1,IdxTemp]
226
+ IdxTemp = IdxTemp + 1
227
+ if IdxTemp == num_rssi_reading: # Reach the end
228
+ IdxTemp = 0
229
+
230
+ #########################################################
231
+ # AFter the buffer is full - Do the Test
232
+ # #######################################################
233
+
234
+ CountArray = np.ones(timestep)
235
+ error = np.zeros(LengthTest-1)
236
+ # Predicted_array = np.zeros((LengthTest-1,2))
237
+ Average_Err = 0
238
+
239
+ for CountTest in xrange(LengthTest-timestep):
240
+ print("Location {}------------".format(CountTest))
241
+ # Update the Locations & RSSIs buffer
242
+ RSSIdx = 0
243
+ TestingData = np.zeros(len(RSSI_combine))
244
+ for i in xrange(timestep):
245
+ for j in xrange(len(RSSI_L2)):
246
+ TestingData[j+RSSIdx] = (float(RSSI_combine[j+RSSIdx])+100)/100
247
+ RSSIdx = RSSIdx + num_rssi_reading
248
+
249
+ TestingData = TestingData.reshape(1,timestep,input_layer)
250
+
251
+ # Prediction
252
+ Predicted_L = model1.predict(TestingData)
253
+ # Re-arrange Accumulated Location
254
+ for t in xrange(timestep-1):
255
+ CountArray[t] = CountArray[t+1]
256
+ Acc_Location[t,:] = Acc_Location[t+1,:]+Predicted_L[0,t,:]
257
+ CountArray[t] = CountArray[t] + 1
258
+
259
+ Acc_Location[timestep-1,:] = Predicted_L[0,timestep-1,:] # Update new Location
260
+ CountArray[timestep-1] = 1 #Update New Count
261
+ # print(Acc_Location)
262
+ # print(CountArray)
263
+ ######################################################################
264
+ ############# UPDATE LOCATION ########################################
265
+ ######################################################################
266
+
267
+ Final_L = Acc_Location[0,:]/CountArray[0]
268
+ CountArray[0] = 1
269
+ # Take correct location, compare the result
270
+ Correct_L = Location[CountTest+1,:]
271
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
272
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
273
+ Average_Err = Average_Err + error[CountTest]
274
+
275
+ File1.write(str(error[CountTest]) + ' , ') # write to file
276
+ File2.write(str(Final_L[0]) + ',')
277
+ File2.write(str(Final_L[1]) + '\n') # write to file
278
+
279
+ # Re-arrange RSSI_combine
280
+ if CountTest+timestep+1 < LengthTest:
281
+ for t in xrange(timestep-1):
282
+ for k in xrange(num_rssi_reading):
283
+ RSSI_combine[t*num_rssi_reading+k] = RSSI_combine[(t+1)*num_rssi_reading+k]
284
+
285
+ for k in xrange(num_rssi_reading):
286
+ RSSI_combine[(timestep-1)*num_rssi_reading+k] = TestData[CountTest+timestep+1,k]
287
+
288
+ ###################################################################
289
+ ############### The Last Locations ###############################
290
+ ###################################################################
291
+ for i in xrange(timestep-1):
292
+ Final_L = Acc_Location[i+1,:]/CountArray[i+1]
293
+ # Take correct location, compare the result
294
+ Correct_L = Location[CountTest+1,:]
295
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
296
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), error[CountTest])
297
+ Average_Err = Average_Err + error[CountTest]
298
+
299
+ File1.write(str(error[CountTest]) + ' , ') # write to file
300
+ File2.write(str(Final_L[0]) + ',')
301
+ File2.write(str(Final_L[1]) + '\n') # write to file
302
+ CountTest = CountTest + 1
303
+
304
+ Average_Err = Average_Err/(LengthTest-1)
305
+ print "Average Error: ", Average_Err
306
+
307
+ Std_Err = 0
308
+ for k in xrange(LengthTest-1):
309
+ Std_Err = Std_Err + np.power((error[k] - Average_Err),2)
310
+ Std_Err = Std_Err/(LengthTest-1)
311
+ Std_Err = np.sqrt(Std_Err)
312
+ print "Std: ", Std_Err
313
+
314
+ #### Show Figure ######################
315
+ if epochs_num > 0:
316
+ ResultPlot['neurons_500'] = loss
317
+ ResultPlot.plot()
318
+ pyplot.show()
319
+
320
+ File1.close()
321
+ File2.close()
322
+ # File3.close()
323
+ # File4.close()
LSTM_Model_4.py ADDED
@@ -0,0 +1,392 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #############################################################
2
+ # 10 locations/ trajectory
3
+ # First location is known
4
+ # Date: 8 June 2018
5
+ #############################################################
6
+
7
+ import time
8
+ import tensorflow
9
+ # import lstm
10
+ import os
11
+ #import time
12
+ import warnings
13
+ import numpy as np
14
+ from numpy import newaxis
15
+ from keras.layers import TimeDistributed
16
+ from keras.layers.core import Dense, Activation, Dropout
17
+ from keras.layers.recurrent import LSTM
18
+ from keras.models import Sequential
19
+ # import matplotlib.pyplot as plt
20
+ from matplotlib import pyplot
21
+ import pandas as pd
22
+ import math
23
+ from pandas import DataFrame
24
+ from keras import backend as K
25
+
26
+ def rmse(y_true, y_pred):
27
+ return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
28
+ def err_absolute(y_true, y_pred):
29
+ err = K.sqrt(K.square(y_pred - y_true))
30
+ return err
31
+
32
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
33
+ warnings.filterwarnings("ignore") #Hide messy Numpy warnings
34
+
35
+ # Open File to write results
36
+ File1 = open('Error_Model_4.csv','w')
37
+ File2 = open('Traj_Model_4.csv','w')
38
+ # File3 = open('Loss_PredictedL_50k_NoFilter.csv','w')
39
+ # File4 = open('Output_PredictedL_50k_NoFilter.csv','w')
40
+
41
+ NumTotalTraj = 365000
42
+
43
+ # Configure the sampling set -----------------
44
+ epochs_num = 200
45
+ NumTrajTraining = 20000
46
+ NumSam_PerTraj = 1
47
+ StartingTraj = 0
48
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
49
+ NumSample = NumTrajTraining*NumSam_PerTraj # NumTraj = NumSample/NumSam_PerTraj
50
+
51
+ # Configure the validation set -----------------
52
+ NumTrajVal = 10000
53
+ StartingTrajVal = 100000
54
+
55
+ StartingValidation = StartingTrajVal*NumSam_PerTraj
56
+ NumValidation = NumTrajVal*NumSam_PerTraj
57
+
58
+ # Main Run Thread
59
+ global_start_time = time.time()
60
+ input_layer = 13 # X, Y & 11 MAC Addresses
61
+ num_rssi_reading = 11
62
+ output_layer = 2
63
+ timestep = 9
64
+ NumBatch = 512
65
+ hidden_layer1 = 100
66
+ hidden_layer2 = 100
67
+
68
+ ##########################################################
69
+ ###### Test Trajectory Splitting #########################
70
+ ##########################################################
71
+ # Read Training data -------5-------------------------------------------
72
+ print('> Loading data... ')
73
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Input_Location_RSSI_10points_365k.csv')
74
+ filename = os.path.abspath(os.path.realpath(filename))
75
+ df1=pd.read_csv(filename)
76
+ df1= np.asarray(df1)
77
+ df1_split = df1[StartingSample:StartingSample+NumSample,:]
78
+ input_training = df1_split.reshape(len(df1_split),timestep,input_layer)
79
+ print(input_training.shape)
80
+
81
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Output_Location_RSSI_10points_365k.csv')
82
+ filename = os.path.abspath(os.path.realpath(filename))
83
+ df2=pd.read_csv(filename)
84
+ df2= np.asarray(df2)
85
+ df2_split = df2[StartingSample:StartingSample+NumSample,:]
86
+ output_training = df2_split.reshape(len(df2_split),timestep,output_layer)
87
+ print(output_training.shape)
88
+
89
+ ########################################################################
90
+ # Read Validation data --------------------------------------------------
91
+ #######################################################################
92
+ df3= df1[StartingValidation:StartingValidation+NumValidation,:]
93
+ input_validation = df3.reshape(len(df3),timestep,input_layer)
94
+ print(input_validation.shape)
95
+
96
+ df4=df2[StartingValidation:StartingValidation+NumValidation,:]
97
+ output_validation = df4.reshape(len(df4),timestep,output_layer)
98
+ print(output_validation.shape)
99
+
100
+ ####################################################################
101
+ # Testing data ------------------------
102
+ ####################################################################
103
+ # RSSI array
104
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_6July_v1_AverageFilter.csv')
105
+ filename = os.path.abspath(os.path.realpath(filename))
106
+ TestData_origin =pd.read_csv(filename)
107
+ print(TestData_origin.shape)
108
+ TestData_origin = np.asarray(TestData_origin)
109
+
110
+ LengthTest = len(TestData_origin)
111
+ StartTestIdx = 0
112
+ StopTestIdx = StartTestIdx + LengthTest
113
+
114
+ # print(TestData.shape)
115
+ TestData = TestData_origin[StartTestIdx:StopTestIdx, :]
116
+ # LengthTest = len(TestData)
117
+
118
+ # List of Locations
119
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_Location.csv')
120
+ filename = os.path.abspath(os.path.realpath(filename))
121
+ Location_origin = pd.read_csv(filename)
122
+ print(Location_origin.shape)
123
+ Location_origin = np.asarray(Location_origin)
124
+ Location = Location_origin[StartTestIdx:StopTestIdx, :]
125
+
126
+ print('> Data Loaded. Compiling...')
127
+
128
+ ####################################################################
129
+ # Init 9 first Location ###########################################
130
+ ####################################################################
131
+ L1 = Location[0,:]
132
+ L2 = Location[0,:]
133
+ L3 = Location[0,:]
134
+ L4 = Location[0,:]
135
+ L5 = Location[0,:]
136
+ L6 = Location[0,:]
137
+ L7 = Location[0,:]
138
+ L8 = Location[0,:]
139
+ L9 = Location[0,:]
140
+ L_combine = np.concatenate((L1,L2,L3,L4,L5,L6,L7,L8,L9), axis=0)
141
+
142
+ # Take RSSI
143
+ RSSI_L1 = TestData[0,:]
144
+ RSSI_L2 = TestData[1,:]
145
+ RSSI_L3 = TestData[1,:]
146
+ RSSI_L4 = TestData[1,:]
147
+ RSSI_L5 = TestData[1,:]
148
+ RSSI_L6 = TestData[1,:]
149
+ RSSI_L7 = TestData[1,:]
150
+ RSSI_L8 = TestData[1,:]
151
+ RSSI_L9 = TestData[1,:]
152
+ RSSI_L10 = TestData[1,:]
153
+ RSSI_combine = np.concatenate((RSSI_L2, RSSI_L3, RSSI_L4, RSSI_L5,RSSI_L6, RSSI_L7, RSSI_L8, RSSI_L9, RSSI_L10), axis=0)
154
+
155
+ # Build the network --------------------------------------------
156
+ model1 = Sequential()
157
+ model1.add(LSTM(hidden_layer1, input_shape=(timestep, input_layer), return_sequences=True))
158
+ model1.add(Dropout(0.2))
159
+ model1.add(LSTM(hidden_layer2,return_sequences=True))
160
+ model1.add(Dropout(0.2))
161
+ model1.add(TimeDistributed(Dense(output_layer)))
162
+ model1.summary()
163
+ start = time.time()
164
+ model1.compile(loss="mse", optimizer="adam",metrics=[rmse])
165
+ # model1.compile(loss=rmse, optimizer="adam",metrics=[err_absolute]) #rmse
166
+ # root_mean_squared_error
167
+ print("> Compilation Time : ", time.time() - start)
168
+
169
+ # Training --------------------------------------------------------
170
+ loss = list()
171
+ ResultPlot = DataFrame()
172
+
173
+ #### Create a copy of input training ##########################
174
+ input_training_org = input_training
175
+ input_validation_org = input_validation
176
+ ###############################################################
177
+ model1.load_weights("LSTM_Model_4_v1.h5")
178
+ StartingValidation = 0
179
+ if epochs_num > 0:
180
+ iTimeStep = 1
181
+ iTimeStep_val = 1
182
+ for ep in xrange(epochs_num):
183
+ print("Iteration {} ----- ".format(ep))
184
+
185
+ # Fit Model
186
+ # hist = model1.fit(input_training,output_training, validation_data=(input_validation, output_validation), epochs=1, batch_size=NumBatch, verbose=1)
187
+ hist = model1.fit(input_training,output_training, epochs=1, batch_size=NumBatch, verbose=1)
188
+ loss.append(hist.history['loss'][0])
189
+ # File3.write(str(hist.history['loss'][0]) + '\n') # write to file
190
+
191
+ # Reform the input training ---------------------------------
192
+ # print("Training Predict Sample {} ... ".format(iTimeStep))
193
+ # Predicted_L_Training = model1.predict(input_training)
194
+ # get the predicted locations to become the inputs of the next step
195
+ # for iSample in xrange(0,len(df1_split)):
196
+ # First location: known
197
+ # Update predicted Location sNormalizeOutputRNN_10points_120k_AverageFiltertarting from the second location
198
+ # input_training[iSample,iTimeStep,0] = Predicted_L_Training[iSample,iTimeStep-1,0]
199
+ # input_training[iSample,iTimeStep,1] = Predicted_L_Training[iSample,iTimeStep-1,1]
200
+ # iTimeStep = iTimeStep + 1 # Update for next time step
201
+
202
+ # if iTimeStep == timestep: # reset 1 round -------
203
+ # input_training = input_training_org
204
+ # iTimeStep = 1
205
+ # --------------------------------------------------------------------------------------
206
+
207
+ # Reform the validation set for testing -------------------------------------------------
208
+ # Input is updated by prediction
209
+ # Output: Ideal
210
+ # print("Validation Predict ...")
211
+ # Predicted_L_Validation = model1.predict(input_validation)
212
+ # for iSample in xrange(0,len(df3)):
213
+ # input_validation[iSample,iTimeStep_val,0] = Predicted_L_Validation[iSample,iTimeStep_val-1,0]
214
+ # input_validation[iSample,iTimeStep_val,1] = Predicted_L_Validation[iSample,iTimeStep_val-1,1]
215
+ # iTimeStep_val = iTimeStep_val + 1
216
+ # print("TESTING...........")
217
+
218
+ # if iTimeStep_val == timestep: # reset 1 round -------
219
+ # input_validation = input_validation_org
220
+ # iTimeStep_val = 1
221
+ # ------------------------------------------------------------------------------------------
222
+ # Calculate the validation error -------------------------------------------------
223
+
224
+
225
+ # model1.fit(input_training,output_training, validation_split=0.2, epochs=epochs_num, batch_size=512)
226
+ model1.save_weights("LSTM_Model_4_v1.h5")
227
+ print('Training duration (s) : ', time.time() - global_start_time)
228
+ else:
229
+ print("TESTING...........")
230
+ pass
231
+
232
+ # Testing ---------------------------------------------------------
233
+ #########################################################
234
+ # The first 3 steps: Fill up the buffer
235
+ # #######################################################
236
+ Acc_Location = np.zeros((timestep,2))
237
+ Acc_Location[0,:] = L1
238
+
239
+ for Step in xrange(1,timestep):
240
+
241
+ # Update the Locations & RSSIs buffer
242
+ LocationIdx = 0
243
+ RSSIdx = 0
244
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
245
+ for i in xrange(timestep):
246
+ TestingData[LocationIdx] = L_combine[i*2]
247
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
248
+ for j in xrange(len(RSSI_L2)):
249
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
250
+ LocationIdx = LocationIdx + input_layer
251
+ RSSIdx = RSSIdx + num_rssi_reading
252
+
253
+ TestingData = TestingData.reshape(1,timestep,input_layer)
254
+ # print(TestingData)
255
+
256
+ # Prediction
257
+ Predicted_L = model1.predict(TestingData)
258
+ Acc_Location[Step,:] = Predicted_L[0,Step-1,:]
259
+ print(Acc_Location)
260
+
261
+ # Update for next step
262
+ # Init 4 first Location
263
+ IdxTemp = 0
264
+ # Update Location
265
+ for j in xrange(Step*2,len(L_combine)):
266
+ L_combine[j] = Acc_Location[Step,IdxTemp]
267
+ if IdxTemp == 0:
268
+ IdxTemp = 1
269
+ else:
270
+ IdxTemp = 0
271
+ pass
272
+
273
+ # Take RSSI
274
+ IdxTemp = 0
275
+ for j in xrange(Step*num_rssi_reading,len(RSSI_combine)):
276
+ RSSI_combine[j] = TestData[Step+1,IdxTemp]
277
+ IdxTemp = IdxTemp + 1
278
+ if IdxTemp == num_rssi_reading: # Reach the end
279
+ IdxTemp = 0
280
+
281
+ #########################################################
282
+ # AFter the buffer is full - Do the Test
283
+ # #######################################################
284
+
285
+ CountArray = np.ones(timestep)
286
+ error = np.zeros(LengthTest-1)
287
+ # Predicted_array = np.zeros((LengthTest-1,2))
288
+ Average_Err = 0
289
+
290
+ for CountTest in xrange(LengthTest-timestep):
291
+ print("Location {}------------".format(CountTest))
292
+ # Update the Locations & RSSIs buffer
293
+ LocationIdx = 0
294
+ RSSIdx = 0
295
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
296
+ for i in xrange(timestep):
297
+ TestingData[LocationIdx] = L_combine[i*2]
298
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
299
+ for j in xrange(len(RSSI_L2)):
300
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
301
+ LocationIdx = LocationIdx + input_layer
302
+ RSSIdx = RSSIdx + num_rssi_reading
303
+
304
+ TestingData = TestingData.reshape(1,timestep,input_layer)
305
+ # print(TestingData)
306
+
307
+ # Prediction
308
+ Predicted_L = model1.predict(TestingData)
309
+
310
+ # for t in xrange(timestep):
311
+ # File4.write(str(Predicted_L[0,t,0]) + ',')
312
+ # File4.write(str(Predicted_L[0,t,1])+ ',') # write to file
313
+ # File4.write('\n')
314
+
315
+ # Re-arrange Accumulated Location
316
+ for t in xrange(timestep-1):
317
+ CountArray[t] = CountArray[t+1]
318
+ Acc_Location[t,:] = Acc_Location[t+1,:]+Predicted_L[0,t,:]
319
+ CountArray[t] = CountArray[t] + 1
320
+
321
+ Acc_Location[timestep-1,:] = Predicted_L[0,timestep-1,:] # Update new Location
322
+ CountArray[timestep-1] = 1 #Update New Count
323
+ # print(Acc_Location)
324
+ # print(CountArray)
325
+ ######################################################################
326
+ ############# UPDATE LOCATION ########################################
327
+ ######################################################################
328
+
329
+ Final_L = Acc_Location[0,:]/CountArray[0]
330
+ CountArray[0] = 1
331
+ # Take correct location, compare the result
332
+ Correct_L = Location[CountTest+1,:]
333
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
334
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
335
+ Average_Err = Average_Err + error[CountTest]
336
+
337
+ File1.write(str(error[CountTest]) + ' , ') # write to file
338
+ File2.write(str(Final_L[0]) + ',')
339
+ File2.write(str(Final_L[1]) + '\n') # write to file
340
+
341
+ # Re-arrange L_combine
342
+ for t in xrange(timestep-1):
343
+ L_combine[t*2] = L_combine[(t+1)*2]
344
+ L_combine[t*2+1] = L_combine[(t+1)*2+1]
345
+ # Update
346
+ L_combine[(timestep-1)*2] = Predicted_L[0,timestep-1,0]
347
+ L_combine[(timestep-1)*2+1] = Predicted_L[0,timestep-1,1]
348
+ # Re-arrange RSSI_combine
349
+ if CountTest+timestep+1 < LengthTest:
350
+ for t in xrange(timestep-1):
351
+ for k in xrange(num_rssi_reading):
352
+ RSSI_combine[t*num_rssi_reading+k] = RSSI_combine[(t+1)*num_rssi_reading+k]
353
+
354
+ for k in xrange(num_rssi_reading):
355
+ RSSI_combine[(timestep-1)*num_rssi_reading+k] = TestData[CountTest+timestep+1,k]
356
+
357
+ ###################################################################
358
+ ############### The Last Locations ###############################
359
+ ###################################################################
360
+ for i in xrange(timestep-1):
361
+ Final_L = Acc_Location[i+1,:]/CountArray[i+1]
362
+ # Take correct location, compare the result
363
+ Correct_L = Location[CountTest+1,:]
364
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
365
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), error[CountTest])
366
+ Average_Err = Average_Err + error[CountTest]
367
+
368
+ File1.write(str(error[CountTest]) + ' , ') # write to file
369
+ File2.write(str(Final_L[0]) + ',')
370
+ File2.write(str(Final_L[1]) + '\n') # write to file
371
+ CountTest = CountTest + 1
372
+
373
+ Average_Err = Average_Err/(LengthTest-1)
374
+ print "Average Error: ", Average_Err
375
+
376
+ Std_Err = 0
377
+ for k in xrange(LengthTest-1):
378
+ Std_Err = Std_Err + np.power((error[k] - Average_Err),2)
379
+ Std_Err = Std_Err/(LengthTest-1)
380
+ Std_Err = np.sqrt(Std_Err)
381
+ print "Std: ", Std_Err
382
+
383
+ #### Show Figure ######################
384
+ #if epochs_num > 0:
385
+ # ResultPlot['neurons_500'] = loss
386
+ # ResultPlot.plot()
387
+ # pyplot.show()
388
+
389
+ File1.close()
390
+ File2.close()
391
+ # File3.close()
392
+ # File4.close()
LSTM_Model_4_v1.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:079f3fbd158367da1807e6a589f355a7177186da7e5f464389c6c6ad97138b29
3
+ size 522080
LSTM_Model_5.py ADDED
@@ -0,0 +1,395 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #############################################################
2
+ # 10 locations/ trajectory
3
+ # First location is known
4
+ # Date: 8 June 2018
5
+ #############################################################
6
+
7
+ import time
8
+ import tensorflow
9
+ # import lstm
10
+ import os
11
+ #import time
12
+ import warnings
13
+ import numpy as np
14
+ from numpy import newaxis
15
+ from keras.layers import TimeDistributed
16
+ from keras.layers.core import Dense, Activation, Dropout
17
+ from keras.layers.recurrent import LSTM
18
+ from keras.models import Sequential
19
+ # import matplotlib.pyplot as plt
20
+ from matplotlib import pyplot
21
+ import pandas as pd
22
+ import math
23
+ from pandas import DataFrame
24
+ from keras import backend as K
25
+
26
+ def rmse(y_true, y_pred):
27
+ return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
28
+ def err_absolute(y_true, y_pred):
29
+ err = K.sqrt(K.square(y_pred - y_true))
30
+ return err
31
+
32
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
33
+ warnings.filterwarnings("ignore") #Hide messy Numpy warnings
34
+
35
+ # Open File to write results
36
+ File1 = open('Error_Model_5_0_5ms.csv','w')
37
+ File2 = open('Traj_Model_5.csv','w')
38
+ # File3 = open('Loss_PredictedL_50k_NoFilter.csv','w')
39
+ # File4 = open('Output_PredictedL_50k_NoFilter.csv','w')
40
+
41
+ NumTotalTraj = 365000
42
+
43
+ # Configure the sampling set -----------------
44
+ epochs_num = 0
45
+ NumTrajTraining = 20000
46
+ NumSam_PerTraj = 1
47
+ StartingTraj = 0
48
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
49
+ NumSample = NumTrajTraining*NumSam_PerTraj # NumTraj = NumSample/NumSam_PerTraj
50
+
51
+ # Configure the validation set -----------------
52
+ NumTrajVal = 10000
53
+ StartingTrajVal = 100000
54
+
55
+ StartingValidation = StartingTrajVal*NumSam_PerTraj
56
+ NumValidation = NumTrajVal*NumSam_PerTraj
57
+
58
+ # Main Run Thread
59
+ global_start_time = time.time()
60
+ input_layer = 13 # X, Y & 11 MAC Addresses
61
+ num_rssi_reading = 11
62
+ output_layer = 2
63
+ timestep = 9
64
+ NumBatch = 512
65
+ hidden_layer1 = 100
66
+ hidden_layer2 = 100
67
+
68
+ ##########################################################
69
+ ###### Test Trajectory Splitting #########################
70
+ ##########################################################
71
+ # Read Training data -------5-------------------------------------------
72
+ if epochs_num > 0:
73
+ print('> Loading data... ')
74
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Input_Location_RSSI_10points_365k.csv')
75
+ filename = os.path.abspath(os.path.realpath(filename))
76
+ df1=pd.read_csv(filename)
77
+ df1= np.asarray(df1)
78
+ df1_split = df1[StartingSample:StartingSample+NumSample,:]
79
+ input_training = df1_split.reshape(len(df1_split),timestep,input_layer)
80
+ print(input_training.shape)
81
+
82
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Output_Location_RSSI_10points_365k.csv')
83
+ filename = os.path.abspath(os.path.realpath(filename))
84
+ df2=pd.read_csv(filename)
85
+ df2= np.asarray(df2)
86
+ df2_split = df2[StartingSample:StartingSample+NumSample,:]
87
+ output_training = df2_split.reshape(len(df2_split),timestep,output_layer)
88
+ print(output_training.shape)
89
+
90
+ ########################################################################
91
+ # Read Validation data --------------------------------------------------
92
+ #######################################################################
93
+ df3= df1[StartingValidation:StartingValidation+NumValidation,:]
94
+ input_validation = df3.reshape(len(df3),timestep,input_layer)
95
+ print(input_validation.shape)
96
+
97
+ df4=df2[StartingValidation:StartingValidation+NumValidation,:]
98
+ output_validation = df4.reshape(len(df4),timestep,output_layer)
99
+ print(output_validation.shape)
100
+
101
+ ####################################################################
102
+ # Testing data ------------------------
103
+ ####################################################################
104
+ # RSSI array
105
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_6July_v1_AverageFilter.csv')
106
+ filename = os.path.abspath(os.path.realpath(filename))
107
+ TestData_origin =pd.read_csv(filename)
108
+ print(TestData_origin.shape)
109
+ TestData_origin = np.asarray(TestData_origin)
110
+
111
+ LengthTest = len(TestData_origin)
112
+ StartTestIdx = 0
113
+ StopTestIdx = StartTestIdx + LengthTest
114
+
115
+ # print(TestData.shape)
116
+ TestData = TestData_origin[StartTestIdx:StopTestIdx, :]
117
+ # LengthTest = len(TestData)
118
+
119
+ # List of Locations
120
+ filename = os.path.join(fileDir, '../RSSI_6AP_Experiments/Nexus4_Data/Long_Traj_Location.csv')
121
+ filename = os.path.abspath(os.path.realpath(filename))
122
+ Location_origin = pd.read_csv(filename)
123
+ print(Location_origin.shape)
124
+ Location_origin = np.asarray(Location_origin)
125
+ Location = Location_origin[StartTestIdx:StopTestIdx, :]
126
+
127
+ print('> Data Loaded. Compiling...')
128
+
129
+
130
+ ####################################################################
131
+ # Init 9 first Location ###########################################
132
+ ####################################################################
133
+ L1 = Location[0,:]
134
+ L2 = Location[0,:]
135
+ L3 = Location[0,:]
136
+ L4 = Location[0,:]
137
+ L5 = Location[0,:]
138
+ L6 = Location[0,:]
139
+ L7 = Location[0,:]
140
+ L8 = Location[0,:]
141
+ L9 = Location[0,:]
142
+ L_combine = np.concatenate((L1,L2,L3,L4,L5,L6,L7,L8,L9), axis=0)
143
+
144
+ # Take RSSI
145
+ RSSI_L1 = TestData[0,:]
146
+ RSSI_L2 = TestData[1,:]
147
+ RSSI_L3 = TestData[1,:]
148
+ RSSI_L4 = TestData[1,:]
149
+ RSSI_L5 = TestData[1,:]
150
+ RSSI_L6 = TestData[1,:]
151
+ RSSI_L7 = TestData[1,:]
152
+ RSSI_L8 = TestData[1,:]
153
+ RSSI_L9 = TestData[1,:]
154
+ RSSI_L10 = TestData[1,:]
155
+ RSSI_combine = np.concatenate((RSSI_L2, RSSI_L3, RSSI_L4, RSSI_L5,RSSI_L6, RSSI_L7, RSSI_L8, RSSI_L9, RSSI_L10), axis=0)
156
+
157
+ # Build the network --------------------------------------------
158
+ model1 = Sequential()
159
+ model1.add(LSTM(hidden_layer1, input_shape=(timestep, input_layer), return_sequences=True))
160
+ model1.add(Dropout(0.2))
161
+ model1.add(LSTM(hidden_layer2,return_sequences=True))
162
+ model1.add(Dropout(0.2))
163
+ model1.add(TimeDistributed(Dense(output_layer)))
164
+ model1.summary()
165
+ start = time.time()
166
+ model1.compile(loss="mse", optimizer="adam",metrics=[rmse])
167
+ # model1.compile(loss=rmse, optimizer="adam",metrics=[err_absolute]) #rmse
168
+ # root_mean_squared_error
169
+ print("> Compilation Time : ", time.time() - start)
170
+
171
+ # Training --------------------------------------------------------
172
+ loss = list()
173
+ ResultPlot = DataFrame()
174
+
175
+ #### Create a copy of input training ##########################
176
+ if epochs_num > 0:
177
+ input_training_org = input_training
178
+ input_validation_org = input_validation
179
+ ###############################################################
180
+ model1.load_weights("lstm_Model_5_DiffSpeed.h5")
181
+ StartingValidation = 0
182
+ if epochs_num > 0:
183
+ iTimeStep = 1
184
+ iTimeStep_val = 1
185
+ for ep in xrange(epochs_num):
186
+ print("Iteration {} ----- ".format(ep))
187
+
188
+ # Fit Model
189
+ # hist = model1.fit(input_training,output_training, validation_data=(input_validation, output_validation), epochs=1, batch_size=NumBatch, verbose=1)
190
+ hist = model1.fit(input_training,output_training, epochs=1, batch_size=NumBatch, verbose=1)
191
+ loss.append(hist.history['loss'][0])
192
+ # File3.write(str(hist.history['loss'][0]) + '\n') # write to file
193
+
194
+ # Reform the input training ---------------------------------
195
+ # print("Training Predict Sample {} ... ".format(iTimeStep))
196
+ Predicted_L_Training = model1.predict(input_training)
197
+ # get the predicted locations to become the inputs of the next step
198
+ for iSample in xrange(0,len(df1_split)):
199
+ # First location: known
200
+ # Update predicted Location sNormalizeOutputRNN_10points_120k_AverageFiltertarting from the second location
201
+ input_training[iSample,iTimeStep,0] = Predicted_L_Training[iSample,iTimeStep-1,0]
202
+ input_training[iSample,iTimeStep,1] = Predicted_L_Training[iSample,iTimeStep-1,1]
203
+ iTimeStep = iTimeStep + 1 # Update for next time step
204
+
205
+ if iTimeStep == timestep: # reset 1 round -------
206
+ input_training = input_training_org
207
+ iTimeStep = 1
208
+ # --------------------------------------------------------------------------------------
209
+
210
+ # Reform the validation set for testing -------------------------------------------------
211
+ # Input is updated by prediction
212
+ # Output: Ideal
213
+ # print("Validation Predict ...")
214
+ # Predicted_L_Validation = model1.predict(input_validation)
215
+ # for iSample in xrange(0,len(df3)):
216
+ # input_validation[iSample,iTimeStep_val,0] = Predicted_L_Validation[iSample,iTimeStep_val-1,0]
217
+ # input_validation[iSample,iTimeStep_val,1] = Predicted_L_Validation[iSample,iTimeStep_val-1,1]
218
+ # iTimeStep_val = iTimeStep_val + 1
219
+ # print("TESTING...........")
220
+
221
+ # if iTimeStep_val == timestep: # reset 1 round -------
222
+ # input_validation = input_validation_org
223
+ # iTimeStep_val = 1
224
+ # ------------------------------------------------------------------------------------------
225
+ # Calculate the validation error -------------------------------------------------
226
+
227
+
228
+ # model1.fit(input_training,output_training, validation_split=0.2, epochs=epochs_num, batch_size=512)
229
+ model1.save_weights("lstm_Model_5_DiffSpeed.h5")
230
+ print('Training duration (s) : ', time.time() - global_start_time)
231
+ else:
232
+ print("TESTING...........")
233
+ pass
234
+
235
+ # Testing ---------------------------------------------------------
236
+ #########################################################
237
+ # The first 3 steps: Fill up the buffer
238
+ # #######################################################
239
+ Acc_Location = np.zeros((timestep,2))
240
+ Acc_Location[0,:] = L1
241
+
242
+ for Step in xrange(1,timestep):
243
+
244
+ # Update the Locations & RSSIs buffer
245
+ LocationIdx = 0
246
+ RSSIdx = 0
247
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
248
+ for i in xrange(timestep):
249
+ TestingData[LocationIdx] = L_combine[i*2]
250
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
251
+ for j in xrange(len(RSSI_L2)):
252
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
253
+ LocationIdx = LocationIdx + input_layer
254
+ RSSIdx = RSSIdx + num_rssi_reading
255
+
256
+ TestingData = TestingData.reshape(1,timestep,input_layer)
257
+ # print(TestingData)
258
+
259
+ # Prediction
260
+ Predicted_L = model1.predict(TestingData)
261
+ Acc_Location[Step,:] = Predicted_L[0,Step-1,:]
262
+ print(Acc_Location)
263
+
264
+ # Update for next step
265
+ # Init 4 first Location
266
+ IdxTemp = 0
267
+ # Update Location
268
+ for j in xrange(Step*2,len(L_combine)):
269
+ L_combine[j] = Acc_Location[Step,IdxTemp]
270
+ if IdxTemp == 0:
271
+ IdxTemp = 1
272
+ else:
273
+ IdxTemp = 0
274
+ pass
275
+
276
+ # Take RSSI
277
+ IdxTemp = 0
278
+ for j in xrange(Step*num_rssi_reading,len(RSSI_combine)):
279
+ RSSI_combine[j] = TestData[Step+1,IdxTemp]
280
+ IdxTemp = IdxTemp + 1
281
+ if IdxTemp == num_rssi_reading: # Reach the end
282
+ IdxTemp = 0
283
+
284
+ #########################################################
285
+ # AFter the buffer is full - Do the Test
286
+ # #######################################################
287
+
288
+ CountArray = np.ones(timestep)
289
+ error = np.zeros(LengthTest-1)
290
+ # Predicted_array = np.zeros((LengthTest-1,2))
291
+ Average_Err = 0
292
+
293
+ for CountTest in xrange(LengthTest-timestep):
294
+ print("Location {}------------".format(CountTest))
295
+ # Update the Locations & RSSIs buffer
296
+ LocationIdx = 0
297
+ RSSIdx = 0
298
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
299
+ for i in xrange(timestep):
300
+ TestingData[LocationIdx] = L_combine[i*2]
301
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
302
+ for j in xrange(len(RSSI_L2)):
303
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
304
+ LocationIdx = LocationIdx + input_layer
305
+ RSSIdx = RSSIdx + num_rssi_reading
306
+
307
+ TestingData = TestingData.reshape(1,timestep,input_layer)
308
+ # print(TestingData)
309
+
310
+ # Prediction
311
+ Predicted_L = model1.predict(TestingData)
312
+
313
+ # for t in xrange(timestep):
314
+ # File4.write(str(Predicted_L[0,t,0]) + ',')
315
+ # File4.write(str(Predicted_L[0,t,1])+ ',') # write to file
316
+ # File4.write('\n')
317
+
318
+ # Re-arrange Accumulated Location
319
+ for t in xrange(timestep-1):
320
+ CountArray[t] = CountArray[t+1]
321
+ Acc_Location[t,:] = Acc_Location[t+1,:]+Predicted_L[0,t,:]
322
+ CountArray[t] = CountArray[t] + 1
323
+
324
+ Acc_Location[timestep-1,:] = Predicted_L[0,timestep-1,:] # Update new Location
325
+ CountArray[timestep-1] = 1 #Update New Count
326
+ # print(Acc_Location)
327
+ # print(CountArray)
328
+ ######################################################################
329
+ ############# UPDATE LOCATION ########################################
330
+ ######################################################################
331
+
332
+ Final_L = Acc_Location[0,:]/CountArray[0]
333
+ CountArray[0] = 1
334
+ # Take correct location, compare the result
335
+ Correct_L = Location[CountTest+1,:]
336
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
337
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
338
+ Average_Err = Average_Err + error[CountTest]
339
+
340
+ File1.write(str(error[CountTest]) + ' , ') # write to file
341
+ File2.write(str(Final_L[0]) + ',')
342
+ File2.write(str(Final_L[1]) + '\n') # write to file
343
+
344
+ # Re-arrange L_combine
345
+ for t in xrange(timestep-1):
346
+ L_combine[t*2] = L_combine[(t+1)*2]
347
+ L_combine[t*2+1] = L_combine[(t+1)*2+1]
348
+ # Update
349
+ L_combine[(timestep-1)*2] = Predicted_L[0,timestep-1,0]
350
+ L_combine[(timestep-1)*2+1] = Predicted_L[0,timestep-1,1]
351
+ # Re-arrange RSSI_combine
352
+ if CountTest+timestep+1 < LengthTest:
353
+ for t in xrange(timestep-1):
354
+ for k in xrange(num_rssi_reading):
355
+ RSSI_combine[t*num_rssi_reading+k] = RSSI_combine[(t+1)*num_rssi_reading+k]
356
+
357
+ for k in xrange(num_rssi_reading):
358
+ RSSI_combine[(timestep-1)*num_rssi_reading+k] = TestData[CountTest+timestep+1,k]
359
+
360
+ ###################################################################
361
+ ############### The Last Locations ###############################
362
+ ###################################################################
363
+ for i in xrange(timestep-1):
364
+ Final_L = Acc_Location[i+1,:]/CountArray[i+1]
365
+ # Take correct location, compare the result
366
+ Correct_L = Location[CountTest+1,:]
367
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
368
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), error[CountTest])
369
+ Average_Err = Average_Err + error[CountTest]
370
+
371
+ File1.write(str(error[CountTest]) + ' , ') # write to file
372
+ File2.write(str(Final_L[0]) + ',')
373
+ File2.write(str(Final_L[1]) + '\n') # write to file
374
+ CountTest = CountTest + 1
375
+
376
+ Average_Err = Average_Err/(LengthTest-1)
377
+ print "Average Error: ", Average_Err
378
+
379
+ Std_Err = 0
380
+ for k in xrange(LengthTest-1):
381
+ Std_Err = Std_Err + np.power((error[k] - Average_Err),2)
382
+ Std_Err = Std_Err/(LengthTest-1)
383
+ Std_Err = np.sqrt(Std_Err)
384
+ print "Std: ", Std_Err
385
+
386
+ #### Show Figure ######################
387
+ #if epochs_num > 0:
388
+ # ResultPlot['neurons_500'] = loss
389
+ # ResultPlot.plot()
390
+ # pyplot.show()
391
+
392
+ File1.close()
393
+ File2.close()
394
+ # File3.close()
395
+ # File4.close()
LSTM_Model_5_10fold.py ADDED
@@ -0,0 +1,389 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #############################################################
2
+ # 10 locations/ trajectory
3
+ # First location is known
4
+ # Date: 8 June 2018
5
+ #############################################################
6
+
7
+ import time
8
+ import tensorflow
9
+ # import lstm
10
+ import os
11
+ #import time
12
+ import warnings
13
+ import numpy as np
14
+ from numpy import newaxis
15
+ from keras.layers import TimeDistributed
16
+ from keras.layers.core import Dense, Activation, Dropout
17
+ from keras.layers.recurrent import LSTM
18
+ from keras.models import Sequential
19
+ # import matplotlib.pyplot as plt
20
+ from matplotlib import pyplot
21
+ import pandas as pd
22
+ import math
23
+ from pandas import DataFrame
24
+ from keras import backend as K
25
+
26
+ def rmse(y_true, y_pred):
27
+ return K.sqrt(K.mean(K.square(y_pred - y_true), axis=-1))
28
+ def err_absolute(y_true, y_pred):
29
+ err = K.sqrt(K.square(y_pred - y_true))
30
+ return err
31
+
32
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
33
+ warnings.filterwarnings("ignore") #Hide messy Numpy warnings
34
+
35
+ # Open File to write results
36
+ File1 = open('Error_Model_5_10_fold.csv','w')
37
+ # File2 = open('Traj_Model_5_DiffSpeed_2ms.csv','w')
38
+ # File3 = open('Loss_PredictedL_50k_NoFilter.csv','w')
39
+ # File4 = open('Output_PredictedL_50k_NoFilter.csv','w')
40
+
41
+ NumTotalTraj = 365000
42
+
43
+ # Configure the sampling set -----------------
44
+ epochs_num = 500
45
+ N_fold = 10
46
+ NumTrajTraining = 20000
47
+ NumSam_PerTraj = 1
48
+ StartingTraj = 0
49
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
50
+ NumSample = NumTrajTraining*NumSam_PerTraj # NumTraj = NumSample/NumSam_PerTraj
51
+
52
+ # Configure the validation set -----------------
53
+ NumTrajVal = 10000
54
+ StartingTrajVal = 100000
55
+
56
+ StartingValidation = StartingTrajVal*NumSam_PerTraj
57
+ NumValidation = NumTrajVal*NumSam_PerTraj
58
+
59
+ # Main Run Thread
60
+ global_start_time = time.time()
61
+ input_layer = 13 # X, Y & 11 MAC Addresses
62
+ num_rssi_reading = 11
63
+ output_layer = 2
64
+ timestep = 9
65
+ NumBatch = 512 #512
66
+ hidden_layer1 = 100
67
+ hidden_layer2 = 100
68
+
69
+ ##########################################################
70
+ ###### Test Trajectory Splitting #########################
71
+ ##########################################################
72
+ # Read Training data --------------------------------------------------
73
+ print('> Loading data... ')
74
+ df1=pd.read_csv('Input_Location_RSSI_10points_365k.csv')
75
+ df1= np.asarray(df1)
76
+
77
+ df2=pd.read_csv('Output_Location_RSSI_10points_365k.csv')
78
+ df2= np.asarray(df2)
79
+
80
+ ########################################################################
81
+ # Read Validation data --------------------------------------------------
82
+ #######################################################################
83
+ # df3= df1[StartingValidation:StartingValidation+NumValidation,:]
84
+ # input_validation = df3.reshape(len(df3),timestep,input_layer)
85
+ # print(input_validation.shape)
86
+
87
+ # df4=df2[StartingValidation:StartingValidation+NumValidation,:]
88
+ # output_validation = df4.reshape(len(df4),timestep,output_layer)
89
+ # print(output_validation.shape)
90
+
91
+ ####################################################################
92
+ # Testing data ------------------------
93
+ ####################################################################
94
+ # RSSI array
95
+ TestData_origin =pd.read_csv('Long_Traj_6July_v1_AverageFilter.csv')
96
+ print(TestData_origin.shape)
97
+ TestData_origin = np.asarray(TestData_origin)
98
+
99
+ LengthTest = len(TestData_origin)
100
+ StartTestIdx = 0
101
+ StopTestIdx = StartTestIdx + LengthTest
102
+
103
+ # print(TestData.shape)
104
+ TestData = TestData_origin[StartTestIdx:StopTestIdx, :]
105
+ # LengthTest = len(TestData)
106
+
107
+ # List of Locations
108
+ Location_origin = pd.read_csv('Long_Traj_Location.csv')
109
+ print(Location_origin.shape)
110
+ Location_origin = np.asarray(Location_origin)
111
+ Location = Location_origin[StartTestIdx:StopTestIdx, :]
112
+
113
+ print('> Data Loaded. Compiling...')
114
+
115
+ ####################################################################
116
+ # Init 9 first Location ###########################################
117
+ ####################################################################
118
+ L1 = Location[0,:]
119
+ L2 = Location[0,:]
120
+ L3 = Location[0,:]
121
+ L4 = Location[0,:]
122
+ L5 = Location[0,:]
123
+ L6 = Location[0,:]
124
+ L7 = Location[0,:]
125
+ L8 = Location[0,:]
126
+ L9 = Location[0,:]
127
+
128
+ # Take RSSI
129
+ RSSI_L1 = TestData[0,:]
130
+ RSSI_L2 = TestData[1,:]
131
+ RSSI_L3 = TestData[1,:]
132
+ RSSI_L4 = TestData[1,:]
133
+ RSSI_L5 = TestData[1,:]
134
+ RSSI_L6 = TestData[1,:]
135
+ RSSI_L7 = TestData[1,:]
136
+ RSSI_L8 = TestData[1,:]
137
+ RSSI_L9 = TestData[1,:]
138
+ RSSI_L10 = TestData[1,:]
139
+
140
+ # Build the network --------------------------------------------
141
+ model1 = Sequential()
142
+ model1.add(LSTM(hidden_layer1, input_shape=(timestep, input_layer), return_sequences=True))
143
+ model1.add(Dropout(0.2))
144
+ model1.add(LSTM(hidden_layer2,return_sequences=True))
145
+ model1.add(Dropout(0.2))
146
+ model1.add(TimeDistributed(Dense(output_layer)))
147
+ model1.summary()
148
+ start = time.time()
149
+ model1.compile(loss="mse", optimizer="adam",metrics=[rmse])
150
+ # model1.compile(loss=rmse, optimizer="adam",metrics=[err_absolute]) #rmse
151
+ # root_mean_squared_error
152
+ print("> Compilation Time : ", time.time() - start)
153
+
154
+ # Training --------------------------------------------------------
155
+ loss = list()
156
+ ResultPlot = DataFrame()
157
+
158
+ #### Create a copy of input training ##########################
159
+ # input_training_org = input_training
160
+ # input_validation_org = input_validation
161
+ ###############################################################
162
+ # model1.load_weights("lstm_Model_5_DiffSpeed.h5")
163
+ # StartingValidation = 0
164
+ for CountFold in xrange(1, N_fold):
165
+ # training samples -----------------------
166
+ StartingTraj = CountFold*NumTrajTraining
167
+ StartingSample = StartingTraj*NumSam_PerTraj # the factor of NumSam_PerTraj
168
+ df1_split = df1[StartingSample:StartingSample+NumSample,:]
169
+ input_training = df1_split.reshape(len(df1_split),timestep,input_layer)
170
+ # print(input_training.shape)
171
+ input_training_org = input_training
172
+
173
+ df2_split = df2[StartingSample:StartingSample+NumSample,:]
174
+ output_training = df2_split.reshape(len(df2_split),timestep,output_layer)
175
+ # print(output_training.shape)
176
+
177
+ H5_Name = 'lstm_Model_5_fold_'+ str(CountFold) + '.h5'
178
+ iTimeStep = 1
179
+ iTimeStep_val = 1
180
+ for ep in xrange(epochs_num):
181
+ print("Iteration {} ----- ".format(ep))
182
+
183
+ # Fit Model
184
+ # hist = model1.fit(input_training,output_training, validation_data=(input_validation, output_validation), epochs=1, batch_size=NumBatch, verbose=1)
185
+ hist = model1.fit(input_training,output_training, epochs=1, batch_size=NumBatch, verbose=1, shuffle=False)
186
+ loss.append(hist.history['loss'][0])
187
+ # File3.write(str(hist.history['loss'][0]) + '\n') #100 write to file
188
+
189
+ # Reform the input training ---------------------------------
190
+ print("Training Predict Sample {} ... ".format(iTimeStep))
191
+ Predicted_L_Training = model1.predict(input_training)
192
+ # get the predicted locations to become the inputs of the next step
193
+ for iSample in xrange(0,len(df1_split)):
194
+ # First location: known
195
+ # Update predicted Location starting from the second location
196
+ input_training[iSample,iTimeStep,0] = Predicted_L_Training[iSample,iTimeStep-1,0]
197
+ input_training[iSample,iTimeStep,1] = Predicted_L_Training[iSample,iTimeStep-1,1]
198
+ iTimeStep = iTimeStep + 1 # Update for next time step
199
+
200
+ if iTimeStep == timestep: # reset 1 round -------
201
+ input_training = input_training_org
202
+ iTimeStep = 1
203
+ # --------------------------------------------------------------------------------------
204
+
205
+ # Reform the validation set for testing -------------------------------------------------
206
+ # Input is updated by prediction
207
+ # Output: Ideal
208
+ # print("Validation Predict ...")
209
+ # Predicted_L_Validation = model1.predict(input_validation)
210
+ # for iSample in xrange(0,len(df3)):
211
+ # input_validation[iSample,iTimeStep_val,0] = Predicted_L_Validation[iSample,iTimeStep_val-1,0]
212
+ # input_validation[iSample,iTimeStep_val,1] = Predicted_L_Validation[iSample,iTimeStep_val-1,1]
213
+ # iTimeStep_val = iTimeStep_val + 1
214
+ # print("TESTING...........")
215
+
216
+ # if iTimeStep_val == timestep: # reset 1 round -------
217
+ # input_validation = input_validation_org
218
+ # iTimeStep_val = 1
219
+ # ------------------------------------------------------------------------------------------
220
+ # Calculate the validation error ------------------------------------------------
221
+
222
+ # model1.fit(input_training,output_training, validation_split=0.2, epochs=epochs_num, batch_size=512)
223
+ model1.save_weights(H5_Name)
224
+ # print('Training duration (s) 100: ', time.time() - global_start_time)
225
+
226
+ # Testing ---------------------------------------------------------
227
+ #########################################################
228
+ # The first 3 steps: Fill up the buffer
229
+ # #######################################################
230
+ Acc_Location = np.zeros((timestep,2))
231
+ Acc_Location[0,:] = L1
232
+ L_combine = np.concatenate((L1,L2,L3,L4,L5,L6,L7,L8,L9), axis=0)
233
+ RSSI_combine = np.concatenate((RSSI_L2, RSSI_L3, RSSI_L4, RSSI_L5,RSSI_L6, RSSI_L7, RSSI_L8, RSSI_L9, RSSI_L10), axis=0)
234
+
235
+ for Step in xrange(1,timestep):
236
+
237
+ # Update the Locations & RSSIs buffer
238
+ LocationIdx = 0
239
+ RSSIdx = 0
240
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
241
+ for i in xrange(timestep):
242
+ TestingData[LocationIdx] = L_combine[i*2]
243
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
244
+ for j in xrange(len(RSSI_L2)):
245
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
246
+ LocationIdx = LocationIdx + input_layer
247
+ RSSIdx = RSSIdx + num_rssi_reading
248
+ # print(TestingData)
249
+
250
+ TestingData = TestingData.reshape(1,timestep,input_layer)
251
+ # print(TestingData)
252
+
253
+ # Prediction
254
+ Predicted_L = model1.predict(TestingData)
255
+ Acc_Location[Step,:] = Predicted_L[0,Step-1,:]
256
+ print(Acc_Location)
257
+
258
+ # Update for next step
259
+ # Init 4 first Location
260
+ IdxTemp = 0
261
+ # Update Location
262
+ for j in xrange(Step*2,len(L_combine)):
263
+ L_combine[j] = Acc_Location[Step,IdxTemp]
264
+ if IdxTemp == 0:
265
+ IdxTemp = 1
266
+ else:
267
+ IdxTemp = 0
268
+ pass
269
+
270
+ # Take RSSI
271
+ IdxTemp = 0
272
+ for j in xrange(Step*num_rssi_reading,len(RSSI_combine)):
273
+ RSSI_combine[j] = TestData[Step+1,IdxTemp]
274
+ IdxTemp = IdxTemp + 1
275
+ if IdxTemp == num_rssi_reading: # Reach the end
276
+ IdxTemp = 0
277
+
278
+ #########################################################
279
+ # AFter the buffer is full - Do the Test
280
+ # #######################################################
281
+
282
+ CountArray = np.ones(timestep)
283
+ error = np.zeros(LengthTest-1)
284
+ # Predicted_array = np.zeros((LengthTest-1,2))
285
+ Average_Err = 0
286
+
287
+ for CountTest in xrange(LengthTest-timestep):
288
+ print("Location {}------------".format(CountTest))
289
+ # Update the Locations & RSSIs buffer
290
+ LocationIdx = 0
291
+ RSSIdx = 0
292
+ TestingData = np.zeros(len(L_combine)+len(RSSI_combine))
293
+ for i in xrange(timestep):
294
+ TestingData[LocationIdx] = L_combine[i*2]
295
+ TestingData[LocationIdx+1] = L_combine[i*2+1]
296
+ for j in xrange(len(RSSI_L2)):
297
+ TestingData[j+LocationIdx+2] = (float(RSSI_combine[j+RSSIdx])+100)/100
298
+ LocationIdx = LocationIdx + input_layer
299
+ RSSIdx = RSSIdx + num_rssi_reading
300
+
301
+ TestingData = TestingData.reshape(1,timestep,input_layer)
302
+ # print(TestingData)
303
+
304
+ # Prediction
305
+ Predicted_L = model1.predict(TestingData)
306
+
307
+ # for t in xrange(timestep):
308
+ # File4.write(str(Predicted_L[0,t,0]) + ',')
309
+ # File4.write(str(Predicted_L[0,t,1])+ ',') # write to file
310
+ # File4.write('\n')
311
+
312
+ # Re-arrange Accumulated Location
313
+ for t in xrange(timestep-1):
314
+ CountArray[t] = CountArray[t+1]
315
+ Acc_Location[t,:] = Acc_Location[t+1,:]+Predicted_L[0,t,:]
316
+ CountArray[t] = CountArray[t] + 1
317
+
318
+ Acc_Location[timestep-1,:] = Predicted_L[0,timestep-1,:] # Update new Location
319
+ CountArray[timestep-1] = 1 #Update New Count
320
+ # print(Acc_Location)
321
+ # print(CountArray)
322
+ ######################################################################
323
+ ############# UPDATE LOCATION ########################################
324
+ ######################################################################
325
+
326
+ Final_L = Acc_Location[0,:]/CountArray[0]
327
+ CountArray[0] = 1
328
+ # Take correct location, compare the result
329
+ Correct_L = Location[CountTest+1,:]
330
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
331
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), (error[CountTest]))
332
+ Average_Err = Average_Err + error[CountTest]
333
+
334
+ File1.write(str(error[CountTest]) + ' , ') # write to file
335
+ # File2.write(str(Final_L[0]) + ',')
336
+ # File2.write(str(Final_L[1]) + '\n') # write to file
337
+ # Re-arrange L_combine
338
+ for t in xrange(timestep-1):
339
+ L_combine[t*2] = L_combine[(t+1)*2]
340
+ L_combine[t*2+1] = L_combine[(t+1)*2+1]
341
+ # Update
342
+ L_combine[(timestep-1)*2] = Predicted_L[0,timestep-1,0]
343
+ L_combine[(timestep-1)*2+1] = Predicted_L[0,timestep-1,1]
344
+ # Re-arrange RSSI_combine
345
+ if CountTest+timestep+1 < LengthTest:
346
+ for t in xrange(timestep-1):
347
+ for k in xrange(num_rssi_reading):
348
+ RSSI_combine[t*num_rssi_reading+k] = RSSI_combine[(t+1)*num_rssi_reading+k]
349
+
350
+ for k in xrange(num_rssi_reading):
351
+ RSSI_combine[(timestep-1)*num_rssi_reading+k] = TestData[CountTest+timestep+1,k]
352
+
353
+ ###################################################################
354
+ ############### The Last Locations ###############################
355
+ ###################################################################
356
+ for i in xrange(timestep-1):
357
+ Final_L = Acc_Location[i+1,:]/CountArray[i+1]
358
+ # Take correct location, compare the result
359
+ Correct_L = Location[CountTest+1,:]
360
+ error[CountTest] = np.sqrt(np.power((Final_L[0] - Correct_L[0]),2)+np.power((Final_L[1] - Correct_L[1]),2))
361
+ print "Predict: {}--- Exact: {} , Error: {}" .format((Final_L[0], Final_L[1]), (Correct_L[0], Correct_L[1]), error[CountTest])
362
+ Average_Err = Average_Err + error[CountTest]
363
+
364
+ File1.write(str(error[CountTest]) + ' , ') # write to file
365
+ # File2.write(str(Final_L[0]) + ',')
366
+ # File2.write(str(Final_L[1]) + '\n') # write to file
367
+ CountTest = CountTest + 1
368
+
369
+ File1.write('\n')
370
+ Average_Err = Average_Err/(LengthTest-1)
371
+ print "Average Error: ", Average_Err
372
+
373
+ Std_Err = 0
374
+ for k in xrange(LengthTest-1):
375
+ Std_Err = Std_Err + np.power((error[k] - Average_Err),2)
376
+ Std_Err = Std_Err/(LengthTest-1)
377
+ Std_Err = np.sqrt(Std_Err)
378
+ print "Std: ", Std_Err
379
+
380
+ #### Show Figure ######################
381
+ #if epochs_num > 0:
382
+ # ResultPlot['neurons_500'] = loss
383
+ # ResultPlot.plot()
384
+ # pyplot.show()
385
+
386
+ File1.close()
387
+ # File2.close()
388
+ # File3.close()
389
+ # File4.close()
Median_Filter.m ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2
+ % Median Filter
3
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
4
+ % N: Window Size
5
+ % RSSI_Before: Array
6
+ % RSSI_After : Scalar
7
+ % n: number of available samples in the RSSI_Before
8
+
9
+ function RSSI_After = Median_Filter(RSSI_Before, n)
10
+
11
+ %
12
+ if n < length(RSSI_Before) % if don't have enough samples
13
+ RSSI_After = RSSI_Before(n);
14
+ else % have enough sample
15
+ RSSI_After = median(RSSI_Before);
16
+ end
17
+
18
+ end
NormalizedDatabase.m ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2
+ % X Y Z MAC1 Mean_RSSI1 MAC2 Mean_RSSI2 ...
3
+ % Normalize Database - Standardization
4
+ %
5
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
6
+ clear;
7
+ close all;
8
+ clc;
9
+
10
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
11
+ % Parameter - 1 Unit = 40 inches
12
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
13
+ Num_Mac = 11; % Number of APs per vector in database
14
+
15
+ % Normalize ------------------------------------
16
+ % r = (r- mean)/sigma
17
+ % -----------------------------------------------
18
+
19
+ % Test
20
+ myFolder = 'C:\Users\minh_\Desktop\CSI_RSSI_Database\RSSI_6AP_Experiments\Nexus4_Data\'; % Database Folder
21
+ InputTest = importdata([myFolder 'UpdatedDatabase_8June2018_11MAC_AverageFilter.csv']);
22
+ Database = InputTest;
23
+ Temp = size(Database);
24
+ LengthDatabase = Temp(1); % Number of Test points
25
+
26
+ % % % % InputMean = importdata('NormalizeddDatabase_AverageFilter.csv');
27
+
28
+ RSSI_Array = Database(:,3:end);
29
+ Location_Array = Database(:,1:2);
30
+ NumLocation = length(Location_Array);
31
+
32
+ %%%% Locations %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
33
+ MeanArray_Location = zeros(1,2);
34
+ StdArray_Location = zeros(1,2);
35
+ for ii = 1:2
36
+ MeanArray_Location(ii) = mean(Location_Array(:,ii));
37
+ end
38
+ for Count = 1:NumLocation
39
+ for CountMac = 1:2
40
+ StdArray_Location(CountMac) = StdArray_Location(CountMac) + (Location_Array(Count,CountMac)-MeanArray_Location(CountMac))^2;
41
+ end
42
+ end
43
+ StdArray_Location = StdArray_Location/LengthDatabase;
44
+ StdArray_Location = sqrt(StdArray_Location);
45
+
46
+ %%%% RSSI %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
47
+ % Calculate Mean & Standard deviation for every AP
48
+ MeanArray_RSSI = zeros(1,Num_Mac);
49
+ %%%% Mean Calculation %%%%%%%%%%%%%%%%
50
+ for Count = 1:LengthDatabase
51
+ for CountMac = 1:Num_Mac
52
+ MeanArray_RSSI(CountMac) = MeanArray_RSSI(CountMac) + RSSI_Array(Count,CountMac);
53
+ end
54
+ end
55
+ MeanArray_RSSI = round(MeanArray_RSSI / LengthDatabase);
56
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
57
+ %%%% Standard Deviation %%%%%%%%%%%%%%%%%%%%%
58
+ StdArray_RSSI = zeros(1,Num_Mac);
59
+ for Count = 1:LengthDatabase
60
+ for CountMac = 1:Num_Mac
61
+ StdArray_RSSI(CountMac) = StdArray_RSSI(CountMac) + (RSSI_Array(Count,CountMac)-MeanArray_RSSI(CountMac))^2;
62
+ end
63
+ end
64
+ StdArray_RSSI = StdArray_RSSI/LengthDatabase;
65
+ StdArray_RSSI = round(sqrt(StdArray_RSSI));
66
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
67
+
68
+ %% Normalized Step
69
+ Normalized_Database = zeros(size(Database));
70
+ for Count = 1:LengthDatabase
71
+ % Location
72
+ for ii = 1:2
73
+ Normalized_Database(Count,ii) = (Database(Count,ii) - MeanArray_Location(ii))/StdArray_Location(ii);
74
+ end
75
+ % RSSI
76
+ for ii = 3:13
77
+ Normalized_Database(Count,ii) = (Database(Count,ii) - MeanArray_RSSI(ii-2))/StdArray_RSSI(ii-2);
78
+ end
79
+ end
80
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
81
+ csvwrite([myFolder 'Normalize_Database_AverageFilter.csv'],Normalized_Database);
Process_Combine.pdf ADDED
Binary file (746 kB). View file
 
README.md CHANGED
@@ -1,3 +1,18 @@
1
- ---
2
- license: cc-by-nc-sa-4.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Recurrent Neural Networks for Accurate RSSI Indoor Localization
2
+
3
+ Source code for M.T. Hoang, B. Yuen, X. Dong, T. Lu, R. Westendorp and K. Reddy, “Recurrent Neural Networks for Accurate RSSI Indoor Localization,” IEEE Internet of Things Journal, 2019
4
+
5
+
6
+ # Folder Structure
7
+ * Step1_FilterDatabase.m: Filter the database with Average Weighted Filter or Mean Filter
8
+ * Step2_Create_RandomTraj.m: Generate random training trajectories under the constraints that the distance between consecutive locations is bounded by
9
+ the maximum distance a user can travel within the sample interval in practical scenarios.
10
+ * Step2_CreateInputTraining_Model5: Create the input training data for P-MIMO LSTM
11
+ * RNN models training code (Using Keras and Tensorflow)
12
+ * LSTM_Model_1.py
13
+ * LSTM_Model_2.py
14
+ * LSTM_Model_3.py
15
+ * LSTM_Model_4.py
16
+ * LSTM_Model_5.py
17
+
18
+
Step1_FilterDatabase.m ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2
+ % Wifi Indoor Localization
3
+ % Minhtu
4
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
5
+ % Filter the database with Average Weighted Filter
6
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
7
+ clear;
8
+ close all;
9
+ clc;
10
+
11
+ FILTER_OPTION = 1; % 1: Average Filter
12
+ % 0: Median Filter
13
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
14
+ % Parameter - 1 Unit = 1m
15
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
16
+ Num_Mac = 11; % Number of APs per vector in database
17
+
18
+ % Test
19
+ myFolder = 'C:\Users\minh_\Desktop\CSI_RSSI_Database\RSSI_6AP_Experiments\Nexus4_Data\'; % Database Folder
20
+ Input = importdata([myFolder 'UpdatedDatabase_24Jan2018_Full.csv']);
21
+ Database = Input.data;
22
+ Temp = size(Database);
23
+ LengthDatabase = Temp(1); % Number of RPs
24
+
25
+ %%% Spit locaion from Database
26
+ PreX = Database(1,1);
27
+ PreY = Database(1,2);
28
+ StartingPoint = 1;
29
+ CountLocation = 0;
30
+ for CountBlock = 2:LengthDatabase
31
+ X = Database(CountBlock,1);
32
+ Y = Database(CountBlock,2);
33
+ if (X ~= PreX) || (Y ~= PreY) || (CountBlock == LengthDatabase)
34
+ PreX = X;
35
+ PreY = Y;
36
+ EndingPoint = CountBlock-1;
37
+ CountLocation = CountLocation + 1 % Count Number of Location
38
+ LengthBlock = EndingPoint - StartingPoint + 1;
39
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
40
+ %%%%%%%%%%%%%%% Filter RSSI in a specific location %%%%%%%%%%%
41
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
42
+ RSSI_Original = Database(StartingPoint:EndingPoint, 3:end);
43
+ RSSI_Filtered = zeros(size(RSSI_Original));
44
+ F1_Before = zeros(1,3);
45
+ F2_Before = zeros(1,3);
46
+ RSSI_Before = zeros(1,3);
47
+
48
+ for CountMac = 1:Num_Mac
49
+ RSSI_Array_Temp = RSSI_Original(1:LengthBlock, CountMac);
50
+ RSSI_Array_After = zeros(LengthBlock, 1);
51
+ MeanValue = mean(RSSI_Array_Temp);
52
+ n = 1;
53
+ for CountPoint = 1:LengthBlock
54
+ RSSI_Temp = RSSI_Array_Temp(CountPoint);
55
+ if RSSI_Temp == -100 % Avoid -100
56
+ RSSI_Temp = MeanValue;
57
+ end
58
+ if n == 1
59
+ RSSI_Before(1) = RSSI_Temp;
60
+ elseif n == 2
61
+ RSSI_Before(2) = RSSI_Temp;
62
+ elseif n == 3
63
+ RSSI_Before(3) = RSSI_Temp;
64
+ else
65
+ RSSI_Before(1) = RSSI_Before(2);
66
+ RSSI_Before(2) = RSSI_Before(3);
67
+ RSSI_Before(3) = RSSI_Temp;
68
+ end
69
+ % Median Filter
70
+ if FILTER_OPTION == 0 % Median Filter
71
+ RSSI_After_Median = Median_Filter(RSSI_Before,n);
72
+ RSSI_Array_After(CountPoint) = round(RSSI_After_Median);
73
+ n = n + 1;
74
+ else
75
+ % Average Filter
76
+ % Filter
77
+ [RSSI_After, F1, F2, TimeCount] = Average_Filter(RSSI_Before, F1_Before, F2_Before,n);
78
+ % Updated Array
79
+ n = TimeCount;
80
+ F1_Before = F1;
81
+ F2_Before = F2;
82
+ RSSI_Array_After(CountPoint) = round(RSSI_After);
83
+ end
84
+ end
85
+ RSSI_Filtered(1:LengthBlock, CountMac) = RSSI_Array_After;
86
+ end
87
+ % Update to Database
88
+ Database(StartingPoint:EndingPoint, 3:end) = RSSI_Filtered;
89
+ StartingPoint = CountBlock; % Restart Starting Point
90
+ end
91
+ end
92
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
93
+ csvwrite([myFolder 'UpdatedDatabase_8June2018_11MAC_AverageFilter.csv'],Database);
94
+
Step2_CreateInputTraining_Model1.m ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2
+ % Wifi Indoor Localization
3
+ % Minhtu
4
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
5
+ clear;
6
+ close all;
7
+ clc;
8
+
9
+ % Database
10
+ myFolder = 'C:\Users\minh_\Desktop\CSI_RSSI_Database\RSSI_6AP_Experiments\Nexus4_Data\'; % Database Folder
11
+
12
+ Input = importdata([myFolder 'UpdatedDatabase_8June2018_11MAC_AverageFilter.csv']);
13
+ Database = Input;
14
+ L_Data = length(Database);
15
+ NumReading = 11;
16
+
17
+ TrajInput = importdata([myFolder 'Traj_10points_5k_5m_s.csv']);
18
+ TrajData = TrajInput;
19
+
20
+ SizeTraj = size(TrajData);
21
+ L = SizeTraj(1);
22
+ NumPointPerTraj = SizeTraj(2)/2;
23
+
24
+ %-----------------------------------------------------------
25
+ %%%% Add RSSI to the trajectory
26
+ %-----------------------------------------------------------
27
+ RNN_Database = zeros(L, (NumPointPerTraj-1)*NumReading);
28
+ for ii = 1:L % Scan all trajectory
29
+ CntColRNN = 1;
30
+ for jj = 1: NumPointPerTraj % Scan all points in the trajectory
31
+
32
+ X = TrajData(ii,(jj-1)*2+1);
33
+ Y = TrajData(ii,(jj-1)*2+2);
34
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
35
+ % % % X = TrajData(ii,jj*2+1); % Take the next RSSI for current point
36
+ % % % Y = TrajData(ii,jj*2+2);
37
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
38
+
39
+ for kk = 1:5:L_Data
40
+ if (abs(Database(kk,1) - X) < 10^-3)&&(abs(Database(kk,2) - Y) < 10^-3)
41
+ RandNum = round(100*rand(1)); % Randomly pick up 1 RSSI Reading Row
42
+ if kk+RandNum > length(Database)
43
+ RandNum = 0;
44
+ end
45
+ if (abs(Database(kk+RandNum,1) - X) < 10^-3)&&(abs(Database(kk+RandNum,2) - Y) < 10^-3)
46
+ for mm = 1:NumReading
47
+ % RNN_Database(ii, CntColRNN) = Database(kk+RandNum,mm+2);
48
+ RNN_Database(ii, CntColRNN) = (Database(kk+RandNum,mm+2)+100)/100;
49
+ CntColRNN = CntColRNN+1;
50
+ end
51
+ else % if out of range, pick up the 1st one
52
+ for mm = 1:NumReading
53
+ % RNN_Database(ii, CntColRNN) = Database(kk,mm+2);
54
+ RNN_Database(ii, CntColRNN) = (Database(kk,mm+2)+100)/100;
55
+ CntColRNN = CntColRNN+1;
56
+ end
57
+ end
58
+ break;
59
+ end
60
+ end
61
+ end
62
+ end
63
+ csvwrite('InputRNN_10points_RSSI.csv',RNN_Database);
Step2_CreateInputTraining_Model5.m ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2
+ % Wifi Indoor Localization
3
+ % Minhtu
4
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
5
+ clear;
6
+ close all;
7
+ clc;
8
+
9
+ % Database
10
+ myFolder = 'C:\Users\minh_\Desktop\CSI_RSSI_Database\RSSI_6AP_Experiments\Nexus4_Data\'; % Database Folder
11
+
12
+ Input = importdata([myFolder 'UpdatedDatabase_8June2018_11MAC_AverageFilter.csv']);
13
+ Database = Input;
14
+ L_Data = length(Database);
15
+ NumReading = 11;
16
+
17
+ TrajInput = importdata([myFolder 'Traj_10points_5k_5m_s.csv']);
18
+ TrajData = TrajInput;
19
+
20
+ SizeTraj = size(TrajData);
21
+ L = SizeTraj(1);
22
+ NumPointPerTraj = SizeTraj(2)/2; % How many Time Steps in LSTM trajectory
23
+
24
+ %-----------------------------------------------------------
25
+ %%%% Add RSSI to the trajectory
26
+ %-----------------------------------------------------------
27
+ RNN_Database = zeros(L, NumPointPerTraj*2+(NumPointPerTraj-1)*NumReading);
28
+ for ii = 1:L % Scan all trajectory
29
+ CntColRNN = 1;
30
+ for jj = 1: NumPointPerTraj % Scan all points in the trajectory
31
+
32
+ if jj == NumPointPerTraj % Don't take the last RSSI
33
+ break;
34
+ end
35
+ X = TrajData(ii,(jj-1)*2+1);
36
+ Y = TrajData(ii,(jj-1)*2+2);
37
+ RNN_Database(ii, CntColRNN) = X;
38
+ RNN_Database(ii, CntColRNN+1) = Y;
39
+ CntColRNN = CntColRNN + 2;
40
+
41
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
42
+ X = TrajData(ii,jj*2+1); % Take the next RSSI for current point
43
+ Y = TrajData(ii,jj*2+2);
44
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
45
+
46
+ for kk = 1:5:L_Data % Scan all
47
+ if (abs(Database(kk,1) - X) < 10^-3)&&(abs(Database(kk,2) - Y) < 10^-3)
48
+ RandNum = round(100*rand(1)); % Randomly pick up 1 RSSI Reading Row
49
+ if kk+RandNum > length(Database)
50
+ RandNum = 0;
51
+ end
52
+ if (abs(Database(kk+RandNum,1) - X) < 10^-3)&&(abs(Database(kk+RandNum,2) - Y) < 10^-3)
53
+ for mm = 1:NumReading
54
+ % RNN_Database(ii, CntColRNN) = Database(kk+RandNum,mm+2);
55
+ RNN_Database(ii, CntColRNN) = (Database(kk+RandNum,mm+2)+100)/100;
56
+ CntColRNN = CntColRNN+1;
57
+ end
58
+ else % if out of range, pick up the 1st one
59
+ for mm = 1:NumReading
60
+ % RNN_Database(ii, CntColRNN) = Database(kk,mm+2);
61
+ RNN_Database(ii, CntColRNN) = (Database(kk,mm+2)+100)/100;
62
+ CntColRNN = CntColRNN+1;
63
+ end
64
+ end
65
+ break;
66
+ end
67
+ end
68
+ end
69
+ end
70
+ csvwrite([myFolder 'Input_Location_RSSI_10points.csv'],RNN_Database);
Step2_Create_RandomTraj.m ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2
+ % WiFi Indoor Localization
3
+ % Minhtu
4
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
5
+ % % % Generate random training trajectories under the constraints
6
+ % % % that the distance between consecutive locations is bounded by
7
+ % % % the maximum distance a user can travel within the sample
8
+ % % % interval in practical scenarios.
9
+ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
10
+ clear;
11
+ close all;
12
+ clc;
13
+
14
+ % Database
15
+ Norm_On = 0; % 1: Stardadization Normalization
16
+ % 0: Mean Normalization
17
+
18
+ myFolder = 'C:\Users\minh_\Desktop\CSI_RSSI_Database\RSSI_6AP_Experiments\Nexus4_Data\'; % Database Folder
19
+ if Norm_On == 1
20
+ Input = importdata('MeanDatabase_Normalize_AverageFilter.csv');
21
+ Mean_Location = [11.1397910665714,-3.78622840030012];
22
+ Std_Location = [6.31809099260756,6.42186397145320];
23
+ else
24
+ Input = importdata([myFolder 'MeanDatabase_8June2018_11MAC.csv']);
25
+ end
26
+
27
+ Database = Input;
28
+ L = length(Database);
29
+ % Number of times which the database is repeated
30
+ NumRepeatedDatabase = 15; % Num of Trajectories = NumRepeatedDatabase*L
31
+ NumPointPerTraj = 10; % Number of Time Steps in LSTM networks
32
+
33
+ MapDistance = zeros(L+2, L+2);
34
+ NumNeighboursArray = zeros(L,1);
35
+ SumProbArray = zeros(L,1);
36
+
37
+ % Assumption
38
+ v_user_max = 5; % m/s % Bounded Speed
39
+ t_request = 1; % Period of request time is 5 seconds
40
+ distance_max = v_user_max*t_request;
41
+ sigma = distance_max;
42
+
43
+ if Norm_On == 1
44
+ distance_max = v_user_max*t_request/Std_Location(1);
45
+ sigma = distance_max;
46
+ end
47
+ %-------------------------------------------------------------
48
+ %%%% Build Map Distance
49
+ %-------------------------------------------------------------
50
+
51
+ for ii = 1:L
52
+ X = Database(ii,1);
53
+ Y = Database(ii,2);
54
+ MapDistance(ii+2,1) = X;
55
+ MapDistance(ii+2,2) = Y;
56
+ MapDistance(1,ii+2) = X;
57
+ MapDistance(2,ii+2) = Y;
58
+
59
+ CountNeighbour = 0;
60
+ Sum_Prob = 0;
61
+ for jj = 1:L
62
+ X1 = Database(jj,1);
63
+ Y1 = Database(jj,2);
64
+
65
+ % Calculate Distance
66
+ MapDistance(ii+2,jj+2) = sqrt((X-X1)^2 + (Y-Y1)^2);
67
+
68
+ if MapDistance(ii+2,jj+2)>distance_max
69
+ MapDistance(ii+2,jj+2) = 0;
70
+ else
71
+ % Weight = exp(MapDistance(ii+2,jj+3)/(2*sigma^2)); % Based on Gaussian
72
+ ProbFactor = -1/(2*(sigma^2)*(exp(-(distance_max^2)/(2*sigma^2)) - 1));
73
+ % ProbFactor = 1/(sqrt(2*pi)*sigma);
74
+ P_l = ProbFactor*exp(- MapDistance(ii+2,jj+2)/(2*sigma^2)); % Based on Gaussian
75
+ Sum_Prob = Sum_Prob+ P_l;
76
+ MapDistance(ii+2,jj+2) = P_l;
77
+ CountNeighbour = CountNeighbour + 1;
78
+ end
79
+ end
80
+ NumNeighboursArray(ii) = CountNeighbour;
81
+ SumProbArray(ii) = Sum_Prob;
82
+ end
83
+
84
+ %%% Normalized Map
85
+ for ii = 1:L
86
+ Sum_Prob = SumProbArray(ii);
87
+ for jj = 1:L
88
+ if MapDistance(ii+2,jj+2) ~= 0
89
+ MapDistance(ii+2,jj+2) = MapDistance(ii+2,jj+2)/Sum_Prob;
90
+ end
91
+ end
92
+ end
93
+
94
+ %%% Create CDF Map
95
+ for ii = 1:L
96
+ SumCDF = 0;
97
+ for jj = 1:L
98
+ if MapDistance(ii+2,jj+2) ~= 0
99
+ SumCDF = SumCDF + MapDistance(ii+2,jj+2);
100
+ MapDistance(ii+2,jj+2) = SumCDF;
101
+ end
102
+ end
103
+ end
104
+
105
+ %%% Create Map with position
106
+ Pos_Map = MapDistance;
107
+ for ii = 1:L
108
+ X = MapDistance(ii+2,1);
109
+ Y = MapDistance(ii+2,2);
110
+ for jj = 1:L
111
+ X1 = MapDistance(1,jj+2);
112
+ Y1 = MapDistance(2,jj+2);
113
+ % Searching for Position in the array
114
+ if MapDistance(ii+2,jj+2) ~= 0
115
+ for kk = 1:L
116
+ X2 = MapDistance(kk+2,1);
117
+ Y2 = MapDistance(kk+2,2);
118
+ if (X2 == X1) && (Y2 == Y1)
119
+ Pos_Map(ii+2,jj+2) = kk;
120
+ break;
121
+ end
122
+ end
123
+ end
124
+ end
125
+ end
126
+
127
+ %-------------------------------------------------------------
128
+ %%%% Structure: x1 y1 x2 y2 x3 y3 ... ----------------------------
129
+ %-------------------------------------------------------------
130
+ TrajArray = zeros(L*NumRepeatedDatabase, NumPointPerTraj*2);
131
+ TrajOrder = zeros(L*NumRepeatedDatabase, NumPointPerTraj);
132
+ %%% Generate random number to choose Trajectory
133
+ for jj = 1:NumRepeatedDatabase % Scan all Traj
134
+ for ii = 1:L % Scan all database
135
+ TrajArray(ii+(jj-1)*L,1) = MapDistance(ii+2,1); % Point 1
136
+ TrajArray(ii+(jj-1)*L,2) = MapDistance(ii+2,2);
137
+
138
+ NextPos = ii;
139
+ TrajOrder(ii+(jj-1)*L,1) = NextPos;
140
+
141
+ for NumPoint = 2: NumPointPerTraj
142
+ RanNum = rand(1);
143
+ for kk=1:L % Find the neighbour
144
+ if MapDistance(NextPos+2,kk+2) > RanNum
145
+ TrajArray(ii+(jj-1)*L,(NumPoint-1)*2+1) = MapDistance(1,kk+2); % Next Point
146
+ TrajArray(ii+(jj-1)*L,(NumPoint-1)*2+2) = MapDistance(2,kk+2);
147
+ NextPos = Pos_Map(NextPos+2,kk+2);
148
+ TrajOrder(ii+(jj-1)*L,NumPoint) = NextPos;
149
+ break;
150
+ end
151
+ end
152
+
153
+ end
154
+ end
155
+ end
156
+ csvwrite([myFolder 'Traj_10points_5k_5m_s.csv'],TrajArray);
gru_Model_5_fold_1.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a32e897683cc999c8c8450b924aff06939646f849874a705d3f9faee5e259c6
3
+ size 396064
lstm_20k_model1.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:465b7f289a3a50fc9c88033f4a488fb80b97573ce99dbeff75876ca10162f8cc
3
+ size 677280
lstm_20k_model2.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8d44f20709b4372ef23e9977653b97c35b9dbd4d573a13ae6e5fe63ab57b235
3
+ size 688480
lstm_20k_model3.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42230c5044b3d7eca47e37f509a71d4dbdf8ce35886b865721f9856e1d35dcad
3
+ size 518880
lstm_Model_5_DiffSpeed.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f78a8eba2ad37c65f99f838e30145bc6103859d436319938cd3f9b3b3a32e44
3
+ size 522080