#__author__ = 'Ion Morozan'
#__author__ = 'Hector Fernandez'
from os import remove

import ProfileDataManager
import re
import collections
from collections import defaultdict
from ProfileDataManager import ProfileDataManager
from Virtual_Machine import Virtual_Machine

#Virtual Machines IPs
# WEB, BACKEND, PROXY have the following __structure__:
#   - IP
#   - (constraint1, constraint2, etc.)
#
# Example:
#   - 10.141.0.143
#   - (IP, ([timestamps], [cpu_user], [web_response_time], [web_request_rate]))
#
#   Where: * IP -       String
#          * cpu_user - Float List (ex: [8.12, 7.22, 8.27, etc.])

#SERVICES
WEB = []
max_resp = max_req = max_cpu = 0.0

BACKEND = []
PROXY = []

#Service Level Objective(ms), not so realistic for WEB
SLO = 700.00

web_desc = '**** Web monitoring data: *****'
backend_desc = '**** Backend monitoring data: *****'
proxy_desc = '**** Proxy monitoring data: *****'

INPUT_F = 'tests/profiling.log'

#NotDuplicates = {'192.168.1.2':[x,y,z], '192.168.1.3':[t,x,w], etc}
# every entry in the dictionary contains a key representing the IP and  a list representing the
# timestamps that had appear once. So if it happens to a timestamp to appear again (in practice is
# not possible) it will be erased along with paired constraints
#! used for every service, it is cleared every time we have finished the analyse of a service
NotDuplicates = defaultdict(list)

class Main:

    def __init__(self):
        # erase content from the files that were previous tested
        open('analyse_Step1', 'w').close()
        open('analyse_Step2', 'w').close()
        open('analyse_Step3', 'w').close()
        open('analyse_Step4', 'w').close()

        self.starting_time = 0
        self.backend_vmes = {}
        self.web_vmes = {}
        self.proxy_vmes = {}

    # Match IP addresses as a list from a string.
    #    @data:   string to parse
    #    @return: list of IP addresses
    def IP_Match(self, data):
        return re.findall(r'\d+[\.-]\d+[\.-]\d+[\.-]\d+', data)

    # Match the list of a desired constraint(ex: cpu_user)
    #   @pattern: constraint to search
    #   @data:    data to process
    def Constraint_Match(self, pattern, data):
        #extract the list as a string corresponding to the pattern
        match = re.search(r'(\'' + pattern + '\'): \[([^\]]*)', data);

        # transform the string to float
        return [float(x) for x in str(match.group(2)).split(',')]

    # Check if a service has duplicate timestamps
    # if so add remove the timestamp along with paired constraints
    # otherwise add the timestamp to a dictionary for further checks.
    def checkForDuplicates(self, tmp, IP):
        global NotDuplicates

        if tmp in NotDuplicates[IP]:
            return False
        else:
            NotDuplicates[IP].append(tmp)
        return True

    def webServer(self, data):
        global max_resp, max_cpu, max_req

        try:
            # if there is no data, just IPs then is no need to analyse the Service
            # ex: {u'ec2-23-20-71-31.compute-1.amazonaws.com': {}}
            if(data.__contains__('timestamps')):
                #extract IPs of the VM which are using a web server
                IPs_VM = self.IP_Match(data)
                for IP in IPs_VM:
                    # extract timestamps
                    timestamps = self.Constraint_Match('timestamps', data)

                    # extract list cpu_user
                    cpu_user = self.Constraint_Match('cpu_user', data)

                    # extract list web_response_time
                    web_request_rate = self.Constraint_Match('web_request_rate', data)

                    # extract list web_response_time
                    web_response_time = self.Constraint_Match('web_response_time', data)

                    m = max(web_response_time)  # compute the maximum response time of all the executions and store it
                    if m > max_resp:        # as a tuple (max_resp, max_cpu, max_req)
                        i = web_response_time.index(m)
                        max_resp = m
                        max_cpu = cpu_user[i]
                        max_req = web_request_rate[i]

                    #add the tuple to the list
                    WEB.append((IP, (timestamps, cpu_user, web_response_time, web_request_rate)))

        except Exception as e:
            print 'webServer Error: ' + e.message

    def webAnalyse(self):
        global NotDuplicates

        #initialize the dictionary with IPs of Proxy service
        # NotDuplicates[192.168.1.2]=[]
        NotDuplicates= {IP : [] for IP, constraints in  WEB}
        try:
            for IP, (timestamps, cpu_user, web_response_time, web_request_rate) in WEB:
                positions=[]
                web_slo_violations = web_pages_served = 0
                # extract positions where response time exceeds SLO
                for pos in range(len(web_response_time)):
                    if web_response_time[pos] > SLO and self.checkForDuplicates(timestamps[pos], IP):
                        web_slo_violations +=1
                        positions.append(pos)
                    elif  web_response_time[pos] > 1:
                        web_pages_served +=1


                # remove elements that are below SLO
                timestamps[:] = [timestamps[pos] for pos in positions]
                cpu_user[:] = [cpu_user[pos] for pos in positions]
                web_response_time[:] = [web_response_time[pos] for pos in positions]
                web_request_rate[:] = [web_request_rate[pos] for pos in positions]

                try:
                    if self.web_vmes[IP]:
                        machine_aux = self.web_vmes[IP]
                        machine_aux.slo_violations = machine_aux.slo_violations + web_slo_violations
                        machine_aux.served_pages = machine_aux.served_pages + web_pages_served
                        self.web_vmes[IP]=machine_aux
                except:
                    machine = Virtual_Machine(IP, web_slo_violations, web_pages_served)
                    self.web_vmes[IP]=machine

            # remove tuples which contains null constraints(cpu_user, req_rate, resp_time)
            WEB[:] = [(IP, CONSTRAINTS) for IP, CONSTRAINTS in WEB  if CONSTRAINTS[2]] #CONSTRAINTS[2] = resp_time

            #clear the dictionary so that can pe used by other services
            NotDuplicates.clear()
        except Exception as e:
            print 'webAnalyse Error: ' + e.message

    def backendServer(self, data):
        try:
            # if there is no data, just IPs then is no need to analyse the Service
            # ex: {u'ec2-23-20-71-31.compute-1.amazonaws.com': {}}
            if(data.__contains__('timestamps')):
                #extract IPs of the VM which are using a web server
                IPs_VM = self.IP_Match(data)
                for IP in IPs_VM:
                    # extract timestamps
                    timestamps = self.Constraint_Match('timestamps', data)

                    # extract list cpu_user
                    cpu_user = self.Constraint_Match('cpu_user', data)

                    # extract list php_response_time
                    php_response_time = self.Constraint_Match('php_response_time', data)

                    # extract list php_request_rate
                    php_request_rate = self.Constraint_Match('php_request_rate', data)

                    #add the tuple to the list
                    BACKEND.append((IP, (timestamps, cpu_user, php_response_time, php_request_rate)))

        except Exception as e:
            print 'backendServer Error: ' + e.message


    def backendAnalyse(self):
        global NotDuplicates
        #initialize the dictionary with IPs of Proxy service
        # NotDuplicates[192.168.1.2]=[]
        NotDuplicates= {IP : [] for IP, constraints in  BACKEND}

        try:
            for IP, (timestamps, cpu_user, php_response_time, php_request_rate) in BACKEND:
                positions=[]
                backend_slo_violations = backend_pages_served = 0
                # extract positions where response time exceeds SLO
                for pos in range(len(php_response_time)):
                    if php_response_time[pos] > SLO and self.checkForDuplicates(timestamps[pos], IP):
                        backend_slo_violations +=1
                        positions.append(pos)
                    elif  php_response_time[pos] > 1:
                        backend_pages_served +=1


                # remove elements that are below SLO
                timestamps[:] = [timestamps[pos] for pos in positions]
                cpu_user[:] = [cpu_user[pos] for pos in positions]
                php_response_time[:] = [php_response_time[pos] for pos in positions]
                php_request_rate[:] = [php_request_rate[pos] for pos in positions]

                try:
                    if self.backend_vmes[IP]:
                        machine_aux = self.backend_vmes[IP]
                        machine_aux.slo_violations = machine_aux.slo_violations + backend_slo_violations
                        machine_aux.served_pages = machine_aux.served_pages + backend_pages_served
                        self.backend_vmes[IP]=machine_aux
                except:
                    machine = Virtual_Machine(IP,backend_slo_violations, backend_pages_served)
                    self.backend_vmes[IP]=machine

                    # remove tuples which contains null constraints(timestamps, cpu_user, req_rate, resp_time)
            BACKEND[:] = [(IP, CONSTRAINTS) for IP, CONSTRAINTS in BACKEND  if
                          CONSTRAINTS[2]] #CONSTRAINTS[2] = resp_time

            #clear the dictionary so that can pe used by other services
            NotDuplicates.clear()
        except Exception as e:
            print 'backendAnalyse Error: ' + e.message

    def proxyServer(self, data):
        try:
            # if there is no data, just IPs then is no need to analyse the Service
            # ex: {u'ec2-23-20-71-31.compute-1.amazonaws.com': {}}
            if(data.__contains__('timestamps')):
                #extract IPs of the VM which are using a backend server
                IPs_VM = self.IP_Match(data)
                for IP in IPs_VM:
                    # extract timestamps
                    timestamps = self.Constraint_Match('timestamps', data)

                    # extract list php_request_rate_lb
                    php_request_rate_lb = self.Constraint_Match('php_request_rate_lb', data)

                    # extract list web_response_time_lb
                    web_response_time_lb = self.Constraint_Match('web_response_time_lb', data)

                    # extract list php_response_time_lb
                    php_response_time_lb = self.Constraint_Match('php_response_time_lb', data)

                    # extract list web_request_rate_lb
                    web_request_rate_lb = self.Constraint_Match('web_request_rate_lb', data)

                    #add the tuple to the list
                    PROXY.append(
                        (IP, (
                            timestamps, php_response_time_lb, web_response_time_lb, php_request_rate_lb,
                            web_request_rate_lb)))

        except Exception as e:
            print 'proxyServer Error: ' + e.message

    def proxyAnalyse(self):
        global NotDuplicates
        #initialize the dictionary with IPs of Proxy service
        # NotDuplicates[192.168.1.2]=[]
        NotDuplicates= {IP : [] for IP, constraints in  PROXY}

        try:
            for IP, (
                timestamps, php_response_time_lb, web_response_time_lb, php_request_rate_lb,
                web_request_rate_lb) in PROXY:

                positions=[]
                proxy_slo_violations = proxy_pages_served = 0
                # extract positions where response time exceeds SLO
                #!! take the minimum between web and php because some time they are not the same - not sure
                for pos in range(min(len(web_response_time_lb), len(php_response_time_lb), len(timestamps))):
                    if (php_response_time_lb[pos] > SLO or web_response_time_lb[pos] > SLO) and \
                        self.checkForDuplicates(timestamps[pos], IP):
                        proxy_slo_violations +=1
                        positions.append(pos)
                    elif php_response_time_lb[pos] > 1 or web_response_time_lb[pos] > 1:
                        proxy_pages_served +=1

                # remove elements that are below SLO
                timestamps[:] = [timestamps[pos] for pos in positions]
                php_response_time_lb[:] = [php_response_time_lb[pos] for pos in positions]
                web_response_time_lb[:] = [web_response_time_lb[pos] for pos in positions]
                php_request_rate_lb[:] = [php_request_rate_lb[pos] for pos in positions]
                web_request_rate_lb[:] = [web_request_rate_lb[pos] for pos in positions]

                try:
                    if self.proxy_vmes[IP]:
                        machine_aux = self.proxy_vmes[IP]
                        machine_aux.slo_violations = machine_aux.slo_violations + proxy_slo_violations
                        machine_aux.served_pages = machine_aux.served_pages + proxy_pages_served
                        self.proxy_vmes[IP]=machine_aux
                except:
                    machine = Virtual_Machine(IP,proxy_slo_violations, proxy_pages_served)
                    self.proxy_vmes[IP]=machine

            # remove tuples which contains null constraints(cpu_user, req_rate, resp_time)
            PROXY[:] = [(IP, CONSTRAINTS) for IP, CONSTRAINTS in PROXY  if CONSTRAINTS[2]] #CONSTRAINTS[2] = resp_time

            #clear the dictionary so that can pe used by other services
            NotDuplicates.clear()
        except Exception as e:
            print 'proxyAnalyse Error: ' + e.message


    #print the result in file "result"
    def W_analyse_Step1(self):
        try:
            f = open('analyse_Step1', 'w')
            try:
                f.write(web_desc + '\n')
                for IP, CONSTRAINTS in WEB:
                    f.write('VM IP addr: ' + IP + '\n')

                    f.write('timestamps: ' + str(CONSTRAINTS[0]) + '\n')
                    f.write('cpu_user: ' + str(CONSTRAINTS[1]) + '\n')
                    f.write('web_resp_time: ' + str(CONSTRAINTS[2]) + '\n')
                    f.write('web_req_rate: ' + str(CONSTRAINTS[3]) + '\n')
                    f.write('\n\n')

                f.write(backend_desc + '\n')
                for IP, CONSTRAINTS in BACKEND:
                    f.write('VM IP addr: ' + IP + '\n')

                    f.write('timestamps: ' + str(CONSTRAINTS[0]) + '\n')
                    f.write('cpu_user: ' + str(CONSTRAINTS[1]) + '\n')
                    f.write('php_resp_time: ' + str(CONSTRAINTS[2]) + '\n')
                    f.write('php_req_rate: ' + str(CONSTRAINTS[3]) + '\n')
                    f.write('\n\n')

                f.write(proxy_desc + '\n')
                for IP, CONSTRAINTS in PROXY:
                    f.write('VM IP addr: ' + IP + '\n')

                    f.write('timestamps: ' + str(CONSTRAINTS[0]) + '\n')
                    f.write('php_response_time_lb: ' + str(CONSTRAINTS[1]) + '\n')
                    f.write('web_response_time_lb: ' + str(CONSTRAINTS[2]) + '\n')
                    f.write('php_request_rate_lb: ' + str(CONSTRAINTS[3]) + '\n')
                    f.write('web_request_rate_lb: ' + str(CONSTRAINTS[4]) + '\n')
                    f.write('\n\n')


            finally:
                f.close()
        except IOError as e:
            print 'I/O Error: ({0}): {1}'.format(e.errno, e.strerror)

    def W_analyse_StepX(selfs, file, SERVICE, description):
        try:
            f = open(file, 'a')
            try:
                f.write('\n\n' + description + '\n\n')       #type of Service printed(WEB, BACKEND)
                for IP in SERVICE:                           #iterate through the keys(IPs)
                    f.write(IP + ' :' + '\n')
                    for pos in range(len(SERVICE[IP].items())):  # iterate through the constraints(cpu_user, etc.)
                        f.write('\t' + str(SERVICE[IP].items()[pos]) + '\n')
            finally:
                f.close()
        except Exception as e:
            print 'W_analyse_Step2 Error: ' + e.message


    def mergeData(self, SERVICE, names):
        service_IPs = collections.defaultdict(lambda: collections.defaultdict(list))

        for IP, CONSTRAINTS in SERVICE:
            for i in range(len(CONSTRAINTS)):
                service_IPs[IP][names[i]].extend(CONSTRAINTS[i])

        return service_IPs


    def gatherData(self):
        global WEB, BACKEND, PROXY
        file = 'analyse_Step2'

        try:
            d = {}  # aux dictionary that holds intermediate results

            d = self.mergeData(WEB, ['timestamps', 'cpu_user', 'web_response_time', 'web_request_rate'])
            del WEB                             #remove the list from memory
            WEB = d                             # from now on WEB will contain a multi-dictionary with merged data
            self.W_analyse_StepX(file, WEB, web_desc) # write results from step 2 of the analyse

            d = self.mergeData(BACKEND, ['timestamps', 'cpu_user', 'php_response_time', 'php_request_rate'])
            del BACKEND
            BACKEND = d
            self.W_analyse_StepX(file, BACKEND, backend_desc)

            d = self.mergeData(PROXY,
                ['timestamps', 'php_response_time_lb', 'web_response_time_lb', 'php_request_rate_lb',
                 'web_request_rate_lb'])
            del PROXY
            PROXY = d
            self.W_analyse_StepX(file, PROXY, proxy_desc)

        except Exception as e:
            print 'gatherData Error: ' + e.message

    # computation of the filter
    def filter(self, SERVICE):
        time_to_settle = 1800.0     # 1800 sec = 30 min time to consider that services are settled
                                    # and the response times could be interpreted as realistic
        i = 0                       # first position beyond settle
        null_items = set()          # IPs with null data after filtering

        try:
            for IP in SERVICE:      #iterate through the keys(IPs)

                # Indices in dictionary: timestamps = 0; request_rate = 1; response_time = 3
                # those vectors are matching WEB and BACKEND services, PROXY does not have cpu values
                # and the values are placed different in the dictionary, anyway below we iterate over
                # all the constraints because we are using "for statement"
                timestamps      =   SERVICE[IP].items()[0][1]

                starting_time = timestamps[0] # = first timestamp for each IP(Service)

                # iterate through first  starting_time + time_to_settle [as minutes] of the execution
                while i < len(timestamps) and timestamps[i] < (starting_time + time_to_settle):
                    i = i + 1

                # remove the first "i" positions from each constraint because it holds unrealistic data
                for pos in range(len(SERVICE[IP].items())):
                    constraint = SERVICE[IP].items()[pos][1]
                    constraint[:] = constraint[i:]


                if not timestamps:  # mark null data to be removed afterwards
                    null_items.add(IP)

            # remove data which contains null constraints(cpu_user, req_rate, resp_time)
            for IP in null_items:
                SERVICE.__delitem__(IP)

            return SERVICE
        except Exception as e:
            print 'filter Error: ' + e.message

    def plotData(self, file, SERVICE):
        try:
            f = open(file, 'w')
            try:
                for IP in SERVICE:                           #iterate through the keys(IPs)
                    cpu_user =          SERVICE[IP].items()[2][1]
                    php_response_time = SERVICE[IP].items()[3][1]
                    for i in range(len(php_response_time)):  # iterate through the constraints(cpu_user, etc.)
                        f.write(str(cpu_user[i]) + ' ' + str(php_response_time[i]) + '\n')
            finally:
                f.close()
        except Exception as e:
            print 'plotData Error: ' + e.message



    # remove first X minutes from the execution, it is considered not realistic because of the flash crowds
    def filterServices(self):
        file = 'analyse_Step3'
        inputFile = INPUT_F[INPUT_F.find('/')+1:]   # extract the name of the inputfile
        try:
            self.filter(WEB)
            self.W_analyse_StepX(file, WEB, web_desc)

            self.filter(BACKEND)
            self.W_analyse_StepX(file, BACKEND, backend_desc)
            self.plotData('plot/' + inputFile + '_Backend_Cpu_Resp.dat',BACKEND)

            self.filter(PROXY)
            self.W_analyse_StepX(file, PROXY, proxy_desc)
        except Exception as e:
            print 'filterData Error: ' + e.message


    # compute weight of the cpu  and weight average for each IP form each Service
    def weight(self, SERVICE, vm, description):
        #w = [0 for x in range(101)] # initialize the array with zeros (from 0 to 100)
        average = 0.0
        try:

            f = open('analyse_Step4', 'a')
            f.write('~~~~~~~~~~~~~~~~~~  FILE : ' + INPUT_F + '  ~~~~~~~~~~~~~~~~~~\n')
            f.write(description + '\n')
            try:
                for IP in SERVICE:      #iterate through the keys(IPs)
                    # initialize the array with zeros (from 0 to 100)
                    w=[0 for x in range(101)]

                    # timestamps = 0; request_rate = 1; response_time = 3
                    cpu_user = SERVICE[IP].items()[2][1]

                    # compute the weight for each IP
                    for i in range(len(cpu_user)):
                        w[int(cpu_user[i]) % 100] +=1

                    # We only analyze vmes that caused more than 20% of the total slo violations.
                    if sum(w) > (20 * vm[IP].slo_violations/100):

                        # 10% of sla violation with this metric
                        redArea_lower = 10*sum(w) / 100
                        redArea_upper = 60*sum(w) / 100
                        yellowArea_upper = 75*sum(w) / 100

                        minVal = 100;
                        maxVal = 0;
                        for it in range(len(w)):
                        # Maximum and Minimum metric values
                            if w[it] > 0 and maxVal < it:
                                maxVal = it
                            if w[it] > 0 and minVal > it:
                                minVal = it

                        #write result to file
                        f.write('IP: ' + IP + ' ----> ' + str(w) + '\n')
                        f.write('Min_cpu_value: ----> ' + str(minVal) + '\n')
                        f.write('Max_cpu_value: ----> ' + str(maxVal) + '\n')
                        f.write('RedArea_lower '+ str(redArea_upper) + '\n')
                        f.write('Total weight values '+ str(sum(w)) + '\n')


                        temp = redArea_lower_bound = redArea_upper_bound = 0
                        if minVal > 20:
                            redArea_lower_bound = minVal - ((30*minVal)/100)
                        else:
                            redArea_lower_bound = 20

                        temp=0
                        for it in reversed(range(len(w))):
                            temp += w[it]
                            if temp <= redArea_upper:
                                redArea_upper_bound = it
                            if temp <= yellowArea_upper:
                                yellowArea_upper_bound = it

                        f.write('yellowArea_lower_bound '+ str(minVal)+ ' yellowArea_Upper_bound '+ str(yellowArea_upper_bound)+'\n')
                        f.write('redArea_lower_bound '+ str(redArea_lower_bound)+ ' redArea_Upper_bound '+ str(redArea_upper_bound)+'\n')
            finally:
                f.close()
        except Exception as e:
            print 'weight Error: ' + e.message

    # compute cpu thresholds
    def computeThresholds(self):
        try:
            # compute the weight of the cpu
            self.weight(WEB, self.web_vmes, web_desc)
            self.weight(BACKEND, self.backend_vmes, backend_desc)

            # not aplicable because PROXY does not contain values for cpu
            # self.weight(PROXY, self.proxy_vmes, proxy_desc)

        except Exception as e:
            print 'computeThresholds Error: ' + e.message

    #extract data from VMs that exceed SLO
    def analyseServices(self):
        try:
            # extract data that violates SLO
            self.webAnalyse()
            self.backendAnalyse()
            self.proxyAnalyse()

            # write intermediate result to file
            self.W_analyse_Step1()

            # gather the information as a dictionary and write intermediate results to file
            self.gatherData();

        except Exception as e:
            print 'analyseData Error: ' + e.message


    # Read the FILE, line by line and then extract constraints at each iteration
    def parseFile(self):
        try:
            f = open(INPUT_F, 'rU')
            try:
                #read file line by line
                line = str(f.readline())
                while line:
                    # match the web server
                    if line.find(web_desc) != -1:
                        line = str(f.readline())
                        self.webServer(line)
                    #match the backend
                    elif line.find(backend_desc) != -1:
                        line = str(f.readline())
                        self.backendServer(line)
                    #match the proxy
                    elif line.find(proxy_desc) != -1:
                        line = str(f.readline())
                        self.proxyServer(line)

                    #go to the next line
                    line = str(f.readline())

                print 'Web Server max (resp_time, cpu_user, req_rate) :', max_resp, max_cpu, max_req
            finally:
                f.close()
        except IOError as e:
            print 'Error opening file: ({0}): {1}'.format(e.errno, e.strerror)

    # this method will be called to make results persistent
    def databaseHandling(self):
        db = ProfileDataManager()

        # create database and drops it if already exists
        db.createDatabase()
        # create table and drops it if already exists
        db.createTable()

        #add entry to TABLE
        db.add('10.10.2.2', 10, 1, 15, 70, 'SMALL', 2, 2000)
        db.printTable()
        print

        #update entry
        db.update('10.10.2.2', 10, 1, 15, 70, 'HUGE', 2, 2000)
        print
        db.printTable()

        #delete entry
        db.delete('10.10.2.2')
        print
        db.printTable()



    def main(self):
        global BACKEND

        # 1 step - parse the input file and gather data as lists
        self.parseFile()

        #2 step - extract data for VMs that exceed the SLO
        self.analyseServices()

        #3 step - remove unrealistic data and create data for graphics
        self.filterServices()

        #4 step - compute cpu thresholds
        self.computeThresholds()

        #5 step - database handling mysqldb
#        self.databaseHandling()

if __name__ == '__main__':
    o = Main()
    o.main();