import logging
import concurrent.futures
import redis
import mysql.connector
import pika
import math
import json
import time

import numpy as np
import collections 
from numpy import newaxis
from scipy.stats import multivariate_normal

from pika.adapters import SelectConnection, BlockingConnection
from pika.exceptions import AMQPConnectionError
#from pika.reconnection_strategies import SimpleReconnectionStrategy

from concurrent.futures import ThreadPoolExecutor

tp = ThreadPoolExecutor(10)  # max 10 threads
def threaded(fn):
    def wrapper(*args, **kwargs):
        return tp.submit(fn, *args, **kwargs)  # returns Future object
    return wrapper

log = logging.getLogger(__name__)

PARAM_DIM = 6

class LTSM_s2s_AD:
    def __init__(self, seq_len, equip):
        self.seq_len = seq_len
        self.load_trained_model()
        self.connect_db()
        self.equip = equip
        self.keys = self.read_conf()

    def __call__(self, equip):
        self.kickoff_ad_process()

    def connect_db(self):
        self.R = redis.StrictRedis(host='10.168.2.210', port=6379, password='qhlk@2017', db=0)
        self.Conn = mysql.connector.connect(host='10.168.2.210', port='3306', user='root', password='1234', database='cqew_dev')
        
    def read_conf(self):
        cursor = self.Conn.cursor()
        query = "select code,index_type from point where object_code= '"  + (self.equip) + "'"
        print(query)
        cursor.execute(query)
        values = cursor.fetchall()
        keys = []
        for key in np.array(values).T[0]:
            key = 'point:' + key + ':value'
            keys.append(key)
        return keys 

    def check_redis_queue_len(self, key):
    	len = self.R.llen(key)
    	return len

    def read_data(self, point_list):
        data = []
        timestamps = []
        for i, point in enumerate(point_list):
            ts, vals = self.read_a_serial(point)
            data.append(vals)
            timestamps.append(ts)

        # A, B, C, KV, KVar, PF
        data = np.array(data)
        timestamps = np.array(timestamps) 

        #Sanity Check

        #Align the data

        #Padding the data

        return data, timestamps
    
    def read_a_serial(self, point):
        seq = []
        for i in range(self.seq_len):
            vals = []
            vals_str = self.R.lpop(point) # format: timestape, value

            if vals_str != None :
                vals_str = vals_str.decode('utf-8').split(',')
                for val_str in vals_str:
                    val = float(val_str)
                    vals.append(val)
            else:
               #Padding with 0s
               vals = [0,0]

            seq.append(vals)

        seq = np.array(seq)
        timestamps = seq.T[0]
        vals = seq.T[1]
    
        return vals, timestamps    

    def encode_decode(self, seq):
         
        #
        # Encode -> Decode
        #
        states_value = self.encoder_model.predict(seq)
        decoder_target_data = seq
        output_seq, h, c = self.decoder_model.predict([decoder_target_data] + states_value)

        return output_seq

    def compute_error(self, input_seq, output_seq):
        e=abs(input_seq-output_seq)[0]
        e_sum = np.sum(e)
        error_index = e_sum / (e.shape[0] * e.shape[1])
        return error_index
        #ais = [] 
        #for ei in e:
        #    ai = np.dot(np.divide(ei,e_sum), (ei).T)
        #    ais.append(abs(ai))
        #return np.array(ais)

    def load_trained_model(self):

        from keras.models import Model
        from keras.layers import Input, LSTM, Dense
        from keras.models import load_model

        num_var = 6
        latent_dim = 256

        #
        #Load the model weights from the training model first
        #

        # Define an input sequence and process it.
        encoder_inputs = Input(shape=(None, num_var))
        encoder = LSTM(latent_dim, return_state=True)
        encoder_outputs, state_h, state_c = encoder(encoder_inputs)
        # We discard `encoder_outputs` and only keep the states.
        encoder_states = [state_h, state_c]

        # Set up the decoder, using `encoder_states` as initial state.
        decoder_inputs = Input(shape=(None, num_var))
        # We set up our decoder to return full output sequences,
        # and to return internal states as well. We don't use the
        # return states in the training model, but we will use them in inference.
        decoder_lstm = LSTM(latent_dim, return_sequences=True, return_state=True)
        decoder_outputs, _, _ = decoder_lstm(decoder_inputs,
                                             initial_state=encoder_states)
        decoder_dense = Dense(num_var, activation='relu')
        decoder_outputs = decoder_dense(decoder_outputs)

        # Define the model that will turn
        # `encoder_input_data` & `decoder_input_data` into `decoder_target_data`
        model = Model([encoder_inputs, decoder_inputs], decoder_outputs)

        model.load_weights('s2s.h5.weights')

        #
        # combine the encoder decoder model
        #
        self.encoder_model = Model(encoder_inputs, encoder_states)

        decoder_state_input_h = Input(shape=(latent_dim,))
        decoder_state_input_c = Input(shape=(latent_dim,))
        decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c]
        decoder_outputs, state_h, state_c = decoder_lstm(
            decoder_inputs, initial_state=decoder_states_inputs)
        decoder_states = [state_h, state_c]
        decoder_outputs = decoder_dense(decoder_outputs)
        self.decoder_model = Model(
            [decoder_inputs] + decoder_states_inputs,
            [decoder_outputs] + decoder_states)

    def kickoff_ad_process(self):

        equips   = ['EQ7QN1K', 'EQDKMS3']

        # with concurrent.futures.ProcessPoolExecutor() as executor:
        #     collections.deque( executor.map(self.test, equips), maxlen=0 )

        err_thr = 12

        while(True):

            # Wait for the redis queue to be filled up at least [seq_len] messages
            if self.check_redis_queue_len(self.keys[0]) < self.seq_len:
                time.sleep(10)
                continue

            timestamps, seq = self.read_data(self.keys)

            # No data is read from the redis queue
            if np.sum(seq) == 0:
                time.sleep(1)
                continue
            
            #????    
            if len(seq)>0:
                seq = seq.T[newaxis, ...]

                output_seq = self.encode_decode(seq)
                ais = self.compute_error(output_seq,seq)
                
            if ais > err_thr:
                error = True

            try:
                if error == True:
                    #Send Alarm to Message Queue
                    credentials = pika.PlainCredentials('qhlk', 'qhlk')
                    parameters  = pika.ConnectionParameters('10.168.2.210',
                                                            5672,
                                                            'ndew',
                                                            credentials)

                    connection = pika.BlockingConnection(parameters)

                    channel = connection.channel()

                    #The sequence is hard coded as this
                    seq = seq[0].T
                    electricA = seq[0].tolist()
                    electricB = seq[1].tolist()
                    electricC = seq[2].tolist()
                    KVar      = seq[3].tolist()
                    KW        = seq[4].tolist()
                    PFar      = seq[5].tolist()

                    payload = {"equipCode":equips[0], "alarmCode":"123", "title":"风机电力数据异常", "serverity":1, "dataType":"electric_ai",
                               "data": {"electricA":electricA, "electricB":electricB, "electricC":electricC, "PFar":PFar, "KVAR":KVar, "KW":KW,
                               "timestamps":timestamps[0].tolist()} }
                    channel.basic_publish(exchange='',
                                          routing_key='hjzk.alarm.queue',
                                          body=json.dumps(payload).encode("utf-8"))
                    print('Sent an alarm with error index %f\n', ais)

                    connection.close()
            except Exception:
                    log.error('connection failure', exc_info=True)
                    time.sleep(1)
                    continue

def main():

    equips   = ['EQ7QN1K', 'EQDKMS3']

    ad = LTSM_s2s_AD(20, equips[1])

    ad.kickoff_ad_process()
    # ts, seq = ad.read_data(ad.read_conf()) 

    # seq = seq.T[newaxis, ...]

    # seq = seq[0].T

    # print(seq[1].tolist())
 
if __name__ == '__main__':
    main()