# libraries
#import tensorrt as trt
import torch
import numpy as np
from tensorflow.keras import optimizers
import matplotlib.pyplot as plt
from tensorflow.keras import initializers
from tensorflow.keras import regularizers
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.layers import concatenate
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.layers import Dense, Flatten, Dropout, Input, BatchNormalization, PReLU
import tensorflow as tf

print(tf.test.is_gpu_available())
# for running on multiple GPU
import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
import threading
os.environ["CUDA_VISIBLE_DEVICES"]="0"

input_text = Input(shape=(50,768))
text_flat = Flatten()(input_text)
dense_text = Dense(1000,activation='relu',kernel_regularizer=regularizers.l2(0.01), kernel_initializer=initializers.he_normal(seed=0))(text_flat)
#dense_text = Dropout(0.4)(dense_text)
dense_text = Dense(500,activation='relu',kernel_regularizer=regularizers.l2(0.01), kernel_initializer=initializers.he_normal(seed=0))(dense_text)
#dense_text = Dropout(0.4)(dense_text)
dense_text = Dense(100,activation='relu',kernel_regularizer=regularizers.l2(0.01), kernel_initializer=initializers.he_normal(seed=0))(dense_text)
dense_text = BatchNormalization()(dense_text)
dense_text_drop = Dropout(0.4)(dense_text)

input_image = Input(shape=(4096,))
dense_image = Dense(2000,activation='relu',kernel_regularizer=regularizers.l2(0.01), kernel_initializer=initializers.he_normal(seed=0))(input_image)
#dense_image = Dropout(0.4)(dense_image)
dense_image = Dense(1000, activation='relu',kernel_regularizer=regularizers.l2(0.01), kernel_initializer=initializers.he_normal(seed=0))(dense_image)
#dense_image = Dropout(0.4)(dense_image)
dense_image = Dense(100,activation='relu',kernel_regularizer=regularizers.l2(0.01), kernel_initializer=initializers.he_normal(seed=0))(dense_image)
dense_image = BatchNormalization()(dense_image)
dense_image_drop = Dropout(0.4)(dense_image)

concat = concatenate([dense_text_drop,dense_image_drop])

inter1_dense = Dense(200,activation='relu',kernel_regularizer=regularizers.l2(0.01), kernel_initializer=initializers.he_normal(seed=0))(concat)
inter1_dense = Dense(100,activation='relu',kernel_regularizer=regularizers.l2(0.01), kernel_initializer=initializers.he_normal(seed=0))(inter1_dense)
final_dense = Dense(50,activation='relu',kernel_regularizer=regularizers.l2(0.01), kernel_initializer=initializers.he_normal(seed=0))(inter1_dense)
final_dropout = Dropout(0.4)(final_dense)
output = Dense(2, activation='softmax')(final_dropout)

model = Model(inputs=[input_text,input_image], outputs=output)
adam = optimizers.Adam(learning_rate=1e-4)
#adagrad = optimizers.Adagrad(lr=1e-4)
#adamax = optimizers.Adamax(lr=0.002, beta_1=0.9, beta_2=0.999)
checkpoint = ModelCheckpoint(filepath='/root/autodl-tmp/checkpoints_polity/dense_MM_model.pt', monitor='val_accuracy', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]

#sgd = optimizers.SGD(lr=1e-4, clipnorm=1.)
model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])
model.summary()

from data import DataProcess

dp_train = DataProcess("/root/autodl-tmp/fakeddit/train.tsv","/root/autodl-tmp/fakeddit",100)
train_text_matrix, train_image_matrix, train_label = dp_train.getdata()
dp_test = DataProcess("/root/autodl-tmp/fakeddit/test.tsv","/root/autodl-tmp/fakeddit")
test_text_matrix, test_image_matrix, test_label = dp_test.getdata()

history = model.fit([train_text_matrix, train_image_matrix],
                    train_label,validation_data=([test_text_matrix,test_image_matrix],test_label),batch_size =32,epochs =100,callbacks=callbacks_list)