TCN_UL_acitivity / data_loader_interface.py
liesdillen's picture
Upload 4 files
da7cd93 verified
raw
history blame
3.57 kB
import math
import random
import matplotlib.pyplot as plt
import scipy
import statistics as st
import os
import numpy as np
from sklearn.base import TransformerMixin
from sklearn.preprocessing import StandardScaler, RobustScaler, MinMaxScaler, LabelEncoder
class NDStandardScaler(TransformerMixin):
def __init__(self, **kwargs):
self._scaler = StandardScaler(copy=True, **kwargs)
self._orig_shape = None
def fit(self, X, **kwargs):
X = np.array(X)
# Save the original shape to reshape the flattened X later
# back to its original shape
if len(X.shape) > 1:
self._orig_shape = X.shape[1:]
X = self._flatten(X) # reshape data into two-dimensioal format suitable for standardScaler
self._scaler.fit(X, **kwargs)
return self
def transform(self, X, **kwargs):
X = np.array(X) # convert X to numpy array
X = self._flatten(X) # shape data into the same format used during fit
X = self._scaler.transform(X, **kwargs)
X = self._reshape(X) # reshape in original shape
return X
def _flatten(self, X):
# Reshape X to <= 2 dimensions -> [10, 28, 28, 3] to [10, 28*28*3]
if len(X.shape) > 2:
n_dims = np.prod(self._orig_shape) # calculates the number of elements in original shape
X = X.reshape(-1, n_dims) # reshapes the data into two dimensions
return X
def _reshape(self, X):
# Reshape X back to it's original shape
if len(X.shape) >= 2:
X = X.reshape(-1, *self._orig_shape)
return X
# substraction of mean of each column from the data: reduce bias, normalization
def mean_sub(data):
EPSILON = 1e-12
for i in range(data.shape[1]):
data[:, i] = data[:, i] - np.mean(data[:, i]) # / np.std(data[subject][action]['imu_signals'][:,i,c])
return data
def degree_to_radians(signals):
return np.array(list(map(lambda signal: np.array(list(map(math.radians, signal))), signals)))
def get_start_indices(n_timesteps, window_len, step_size):
n_timesteps_valid = n_timesteps - window_len + 1
if step_size <= 0:
step_size = 1
start_indices = np.arange(0, n_timesteps_valid, step_size,
dtype=int)
return start_indices
def get_data(data):
window_len = 243
step_size = 1
X = []
print("data shape: ",data.shape)
start_indices = get_start_indices(data.shape[0], window_len=window_len, step_size=step_size)
for k in start_indices:
this_window_data = data[k:k + window_len, :]
this_window_data = mean_sub(this_window_data)
X.append(this_window_data)
print("data shape after: ",np.array(X).shape)
return X
def select_random_samples(X, sample_percent):
num_samples = len(X)
num_samples_to_select = int(num_samples * sample_percent)
selected_indices = random.sample(range(num_samples), num_samples_to_select)
selected_indices.sort()
selected_samplesX = [X[i] for i in selected_indices]
return selected_samplesX
def load_imu(data_array):
test_data = []
X = get_data(data_array)
print("X shape: ",np.array(X).shape)
samplesX = select_random_samples(X, 1)
print("samplesX shape: ",np.array(samplesX).shape)
test_data.extend(samplesX)
test_data = np.array(test_data)
print('')
print(len(test_data))
return test_data