from __future__ import absolute_import, division, print_function, unicode_literals
import functools

import numpy as np
import tensorflow as tf
NAMES=['time','x1','x2','x3','x4','x5','x6','y']
def get_dataset(file_path):
    dataset = tf.data.experimental.make_csv_dataset(
        file_path,
        batch_size=32,
        column_names=NAMES,
        column_defaults=[tf.float32,tf.float32,tf.float32,tf.float32,tf.float32,tf.float32,tf.float32,tf.float32],
        label_name=NAMES[7],
        field_delim=" ",
        num_epochs=1,
        ignore_errors=True,
        header=True)
    return dataset

raw_train_data = get_dataset("train.txt")
raw_test_data = get_dataset("test.txt")
examples, labels = next(iter(raw_train_data))

def process_continuous_data(mean, data):
    data = tf.cast(data, tf.float32) * 1/(2*mean)
    return tf.reshape(data, [-1,1])

MEANS = {
    'time' : 1.09E+03,
    'x1' : 1.01E+02,
    'x2' : 1.33E+01,
    'x3' : 2.25E+00,
    'x4' : 5.94E+00,
    'x5' : 7.37E+00,
    'x6' : 9.38E+01}

numerical_columns = []

for feature in MEANS.keys():
    num_col = tf.feature_column.numeric_column(feature, normalizer_fn=functools.partial(process_continuous_data, MEANS[feature]))
    numerical_columns.append(num_col)

preprocessing_layer = tf.keras.layers.DenseFeatures(numerical_columns)

model = tf.keras.Sequential([
    preprocessing_layer,
    tf.keras.layers.Dense(16, activation='relu'),
    tf.keras.layers.Dropout(0.5),
    tf.keras.layers.Dense(16, activation='relu'),
    tf.keras.layers.Dropout(0.5),
    tf.keras.layers.Dense(8, activation='relu'),
    tf.keras.layers.Dropout(0.5),
    tf.keras.layers.Dense(8, activation='relu'),
    tf.keras.layers.Dropout(0.5),
    tf.keras.layers.Dense(4, activation='relu'),
    tf.keras.layers.Dropout(0.5),
    tf.keras.layers.Dense(4, activation='relu'),
    tf.keras.layers.Dropout(0.5),
    tf.keras.layers.Dense(1)])

optimizer = tf.keras.optimizers.RMSprop(0.001)

model.compile(
    loss='MSE',
    optimizer=optimizer,
    metrics=['mae'])

train_data=raw_train_data.shuffle(500)
test_data=raw_test_data

model.fit(train_data, epochs=40)

test_loss, test_accuracy = model.evaluate(test_data)

print('\n\nTest Loss {}, Test Accuracy {}'.format(test_loss, test_accuracy))