from __future__ import print_function

from keras.layers.recurrent import LSTM
from keras.models import Sequential, Graph
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D, AveragePooling2D
from keras.layers.core import Activation, Dense, Flatten, Dropout, Reshape, Merge
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.normalization import BatchNormalization
from keras.regularizers import l2
from keras import backend as K


def scale(x):
    return (x - K.mean(x)) / K.std(x)
    # return x


def get_model():
    conv = Graph()
    conv.add_input(Activation(activation=scale), name='conv_input', input_shape=(30, 128, 128))
    i=0
    conv.add_node(Convolution2D(512, 3, 3, border_mode='same'), name='conv_'+str(2*i), input='conv_input')
    conv.add_node(Activation('relu'), name='conv_activation_'+str(2*i), input='conv_'+str(2*i))
    conv.add_node(Convolution2D(512, 3, 3, border_mode='same'), name='conv_'+str(2*i+1), input='conv_actication_'+str(2*i))
    conv.add_node(Activation('relu'), name='conv_activation'+str(2*i+1), input='conv_'+str(2*i+1))
    conv.add_node(Merge(mode='sum'), name='merge_conv_'+str(i), inputs=['conv_input', 'conv_activation_'+str(2*i+1)])
    conv.add_node(BatchNormalization(), name='batch_norm_'+str(i), input='merge_conv_'+str(i))

    for i in range(1, 50):
        print(i)
        conv.add_node(Convolution2D(512, 3, 3, border_mode='same'), name='conv_'+str(2*i), input='batch_norm_'+str(i-1))
        conv.add_node(Activation('relu'), name='conv_activation_'+str(2*i), input='conv_'+str(2*i))
        conv.add_node(Convolution2D(512, 3, 3, border_mode='same'), name='conv_'+str(2*i+1), input='conv_actication_'+str(2*i))
        conv.add_node(Activation('relu'), name='conv_activation'+str(2*i+1), input='conv_'+str(2*i+1))
        conv.add_node(Merge(mode='sum'), name='merge_conv_'+str(i), inputs=['batch_norm_'+str(i-1), 'conv_activation_'+str(2*i+1)])
        conv.add_node(BatchNormalization(), name='batch_norm_'+str(i), input='merge_conv_'+str(i))
    conv.add_node(AveragePooling2D(pool_size=(4, 4)), name='pooling', input='batch_norm_'+str(49))
    conv.add_node(Flatten(), name='flatten', input='pooling')

    meta = Sequential()
    meta.add(Dense(512, input_dim=4))
    meta.add(Activation('relu'))
    meta.add(Dropout(0.5))

    model = Graph()
    model.add_input(name='meta_input', input_shape=(4,))
    model.add_node(conv, name='conv', input='conv_input')
    model.add_node(meta, name='meta', input='meta_input')
    model.add_node(Reshape((1, 1024)), name='merge', inputs=['conv', 'meta'], merge_mode='concat')
    model.add_node(LSTM(1024, return_sequences=True), name='lstm_input', input='merge')
    model.add_node(Dropout(0.55), name='merge_do', input='lstm_input')
    model.add_node(LSTM(1024, return_sequences=False), name='lstm_1', input='merge_do')
    model.add_node(Dropout(0.55), name='lstm_1_do', input='lstm_1')
    model.add_node(Dense(1), name='merge_out', input='lstm_1_do')
    model.add_output(name='output', input='merge_out')
    print(model.summary())
    return model
