# -*- coding: utf8 -*-

import os, sys, configparser, yara
import logging, logging.handlers
import hashlib, json, binascii
import syslog
import thread
import time
import Queue
import multiprocessing
from processconsumer import *
from yararulemysql import *

yara_rule = YaraRuleMysql()
thread.start_new_thread(yara_rule.job, ())

global_rule = yara_rule.ruleMysqlInfo()

def read(p):
    timeseries = {}
    prevticks = int(time.time())-10

    while True:
        #print ('Read---Before get value')

        # wait timeout till 5 seconds
        try:
            value = p.get(True, 5)

            #print ('Read---After get value---Get %s from queue.' % value.yid)
            #print (str(value))

            # keep all record in current second

            ticks = value.ticks

            tps = value.tuplesig()

            if ticks in timeseries:
                if tps in timeseries[ticks]:
                    nv = timeseries[ticks][tps]
                    nv.hitcount = nv.hitcount + 1

                    timeseries[ticks][tps] = nv
                else:
                    timeseries[ticks][tps] = value
            else:
                timeseries[ticks] = {}
                #https: // stackoverflow.com / questions / 16333296 / how - do - you - create - nested - dict - in -python
                timeseries[ticks][tps] = value


            # compare previous second

            # save to log the aggregationed data
            # FIXME: some delayed data may be dropped!
            for num in range(prevticks, ticks - 1):
                if num in timeseries:
                    for tps in timeseries[num].keys():
                        alarm = timeseries[num][tps]
                        #FIXME: log alarm (including hitcount)
                        syslog.syslog("LOG_YARA_CONTENT;{str_alarm};".format(str_alarm=str(alarm)))
                        #print("save log "+str(alarm))

            prevticks = ticks - 1

            # remove previous second records
            # before 10 seconds
            oldts = []
            for ts in timeseries.keys():
                if ts < (ticks - 10):
                    oldts.append(ts)

            for o in oldts:
                del timeseries[o]
        except Queue.Empty:
            # print ('Read---After get value---Get %s from queue.' % value.yid)


            ticks = int(time.time())


            # compare previous second

            # save to log the aggregationed data
            # FIXME: some delayed data may be dropped!
            for num in range(prevticks, ticks - 1):
                if num in timeseries:
                    for tps in timeseries[num].keys():
                        alarm = timeseries[num][tps]
                        # FIXME: log alarm (including hitcount)
                        syslog.syslog("LOG_YARA_CONTENT;{str_alarm};".format(str_alarm=str(alarm)))
                        #print("save log " + str(alarm))
                        
            prevticks = ticks - 1

            # remove previous second records
            # before 10 seconds
            oldts = []
            for ts in timeseries.keys():
                if ts < (ticks - 10):
                    oldts.append(ts)

            for o in oldts:
                print("remove ts: %x" %o)
                del timeseries[o]

processlog = multiprocessing.Queue(6400)
processread = multiprocessing.Process(target=read,args=(processlog,))
processread.start()	
                                  
class YaraScanner():
    def __init__(self):
        self.maindir = os.path.dirname(os.path.abspath(__file__))
        self.log_path = os.path.join(self.maindir, 'yaraicap.log')
        self.config_path = os.path.join(self.maindir, 'config.ini')
        self.config = configparser.ConfigParser()
        self.config.read(self.config_path)
        self.initLogging()
        self.initYara()

    def initLogging(self):
        logging.captureWarnings(True)
        self.logger = logging.getLogger('YaraICAP')
        self.logger.setLevel(logging.DEBUG)

        # log file header
        fileHandler = logging.handlers.RotatingFileHandler(self.log_path, maxBytes=2000000, backupCount=5)
        formatter = logging.Formatter('[%(asctime)s][%(name)s][%(levelname)s] %(message)s')
        fileHandler.setFormatter(formatter)
        self.logger.addHandler(fileHandler)

        # log console handler 
        consoleHandler = logging.StreamHandler()
        consoleHandler.setFormatter(formatter)
        self.logger.addHandler(consoleHandler)

    def initYara(self):
        content_rules = self.config.get('config', 'content_rules')
        url_rules = self.config.get('config', 'url_rules')
        if not os.path.isfile(content_rules):
            self.logger.error('Content YARA rules not found')
            exit()

        if not os.path.isfile(url_rules):
            self.logger.error('URL YARA rules not found')
            exit()

        self.cyara = yara.compile(content_rules)
        self.uyara = yara.compile(url_rules)

    def SaveContent(self, chash, content, request_header, response_header, sig):
        writepath = "{path}/{hash}.json".format(path=self.config.get('config', 'content_dir'), hash=chash)
        if not os.path.isfile(writepath):
            data = {}
            data['rules'] = ','.join(str(x) for x in sig).split(',')
            data['request_header'] = request_header
            data['response_header'] = response_header
            data['content'] = binascii.hexlify(content)
            f = open(writepath, 'w')
            f.write(json.dumps(data, indent=4, sort_keys=True))
            f.close()

    def Scan(self, content, request, request_header, response_header, clientip, action):
        if self.config.getboolean('config', 'scan_url'):
            self.__ScanUrl(content, request, request_header, response_header, clientip, action)

        self.__ScanContent(content, request, request_header, response_header, clientip, action)

    def __ScanUrl(self, content, request, request_header, response_header, clientip, action):
        url = str(request[1])
        murl = self.uyara.match(data=url)
        murl_total = len(murl)
        if murl_total > 0:
            contentmd5 = hashlib.md5(url).hexdigest()
            self.SaveContent(contentmd5, content, request_header, response_header, murl)
            rule = murl[0].rule
            ip = str(request_header['host'][0])
            port = str(request[1])[:str(request[1]).find(':')]
            index = next((i for i,d in enumerate(yara_rule.yara_rule[(ip, port)]) if rule in d), None)
            if (yara_rule.yara_rule[(ip, port)][index][rule][2] == 1) \
                and (yara_rule.yara_rule[(ip, port)][index][rule][1] == 1):
                self.logger.info("[URL][{hash}][{rules}] {clientip} - {url}".format(hash=contentmd5, clientip=str(clientip[0]), url=url, rules=','.join(str(x) for x in murl)));
                syslog.syslog("LOG_YARA_URL;[URL];{clientip};{serverip};{method};{url};{rule};{level};{act};".format(clientip=str(clientip[0]), serverip=str(request_header['host'][0]), method=str(request[0]), url=url, rule=rule, level=str(global_rule[rule]), act=str(yara_rule.yara_rule[(ip, port)][index][rule][0])));
            action[0] = yara_rule.yara_rule[(ip, port)][index][rule][0]

    def __ScanContent(self, content, request, request_header, response_header, clientip, action):
        url = str(request[1])
        mcontent = self.cyara.match(data=content)
        mcontent_total = len(mcontent)
        if mcontent_total > 0:
            contentmd5 = hashlib.md5(content).hexdigest()
            self.SaveContent(contentmd5, content, request_header, response_header, mcontent)
            rule = mcontent[0].rule
            ip = str(request_header['host'][0])
            port = str(request[1])[:str(request[1]).find(':')]
            index = next((i for i,d in enumerate(yara_rule.yara_rule[(ip, port)]) if rule in d), None)
            if (yara_rule.yara_rule[(ip, port)][index][rule][2] == 1) \
                and (yara_rule.yara_rule[(ip, port)][index][rule][1] == 1):
                #self.logger.info("[Content][{hash}][{rules}] {clientip} - {url}".format(hash=contentmd5, clientip=str(clientip[0]), url=url, rules=','.join(str(x) for x in mcontent)));
                print(url)
                url = url.replace(';',' ')
                value = Alarm(time.time(),str(clientip[0]),str(request_header['host'][0]),str(request[0]),url,rule,str(global_rule[rule]),str(yara_rule.yara_rule[(ip, port)][index][rule][0]))
                if not processlog.full():
                    processlog.put(value)
                #syslog.syslog("LOG_YARA_CONTENT;{time};{clientip};{serverip};{method};{url};{rule};{level};{act};".format(time=time.time(), clientip=str(clientip[0]), serverip=str(request_header['host'][0]), method=str(request[0]), url=url, rule=rule, level=str(global_rule[rule]), act=str(yara_rule.yara_rule[(ip, port)][index][rule][0])));
            action[0] = yara_rule.yara_rule[(ip, port)][index][rule][0]

if __name__ == '__main__':
    pass
