from lib.utils.utils import *

import re
import time
import logging
import threading
from collections import Iterable

class baseStn:
    # member variable
    jsonCfg = logger = loggerName = thread = defaultValue = profile_name = buffer = userAgent = None
        
    def __init__(self, jsonCfg, profile_name, loggerName):
        self.jsonCfg = jsonCfg
        self.profile_name = profile_name
        
        # zero-fill shared buffer
        self.buffer = []
        for i in range(0,20):
            self.buffer.append('')
        # init logger
        self.loggerName = loggerName
        self.logger =  logging.getLogger(self.loggerName + '.' + self.profile_name)
        self.logger.info('Read profile: %s' % profile_name)
        
    #def __del__(self):
        # close thread
        # self.thread.cancel()
        # pass
        
    def run(self):
        profile = self.jsonCfg[self.profile_name]
        
        # gap time before re-check
        gap_check_second = int(self.jsonCfg.evalGet(profile, 'gap_check_second'))
        self.userAgent = self.jsonCfg.evalGet(profile, 'user_agent')
        
        # list of urls
        urls = self.jsonCfg.evalGet(profile, 'urls')
        
        self.logger.log(logging.DEBUG_01,'Url(s) (%s) each %s seconds' % (urls, gap_check_second))
        
        # if not a list...make it
        if(type(urls) is not list): urls = [urls]
    
        #self.__check(profile, urls, gap_check_second)
        # Uso il thread così che possa "rilasciare" e fare il fork
        self.thread = threading.Timer(0, self.__check, [profile, urls, gap_check_second]).start(); 
    
    def __check(self, item, urls, gap_check_second):
        while True:
            for url in urls:
                self.logger.log(logging.DEBUG,"Get url '%s'" % url)
                htmlPage = getPage(url, self.userAgent);
                
                if(htmlPage is False):
                    self.logger.error("Can't get page: %s \n" % url) 
                
                # set buffer[0] to html for default
                self.buffer[0] = htmlPage
                
                # recursive deep visit through scrap
                self.__callScraper(item['scrap'])
            
            time.sleep(gap_check_second)
    
    # TODO: si dovrebbe spostare tutto in una classe scraper di base
    # Navigate scrap (anidated scrap they get evaluated from the outer ones to the inner ones)
    def __callScraper(self, scrap):
           # if not a list...make it
        if(type(scrap) is not list): scrap = [scrap]
        
        for item in scrap:
            self.__json2scrap(item)
            
    # evalutate scrap child and call real __scrap function
    def __json2scrap(self, scrap):
            """ SCRAP CHILD """
            notifySubItem = trigSubItem = scrapSubItem = None
            if('notify' in scrap):
                notifySubItem = scrap['notify']
            
            if('trig' in scrap):
                trigSubItem = scrap['trig']
                
            if('scrap' in scrap):
                scrapSubItem = scrap['scrap']
            
            destAppend = False
            destCleaned = self.jsonCfg.evalGet(scrap, 'dest')
            
            if('dest' in scrap):
                # append to dest
                if(str(scrap['dest']).find('+') > 0):
                    destAppend = True
                    # remove '+' at the end of dest
                    destCleaned = destCleaned[:len(scrap['dest'])-1]
            
            # init param with default value or from config file
            regex = self.jsonCfg.evalGet(scrap, 'regex')
            fIn = self.jsonCfg.evalGet(scrap, 'in')
            inIsLink = self.jsonCfg.evalGet(scrap, 'in_is_link')
            fOut = self.jsonCfg.evalGet(scrap, 'out')
            dest = int(destCleaned)
            repeat = self.jsonCfg.evalGet(scrap, 'repeat') 
            cs = self.jsonCfg.evalGet(scrap, 'cs')
            htmlEntities = self.jsonCfg.evalGet(scrap, 'html_entities')
            noclean = self.jsonCfg.evalGet(scrap, 'noclean')
            clear = self.jsonCfg.evalGet(scrap, 'clear')
            
            """ CALL REAL SCRAP FUNCTION """
            return self.__scrap(regex, fIn, inIsLink, fOut, dest, destAppend, repeat, cs, htmlEntities, noclean, clear, notifySubItem, trigSubItem, scrapSubItem)
    
    # default value are setted in jsonCfg class
    def __scrap(self, regex, fIn='{0}', inIsLink=False, fOut='\\1', dest=1, destAppend=False, repeat=False, cs=False, htmlEntities=True, noclean=False, clear=False, notify=None, trig=None, scrap=None):
        
        # TODO: move scrap logic to another class (appiccico con logger)
        logger = logging.getLogger(self.loggerName + '.' + self.profile_name + '.scraper' )
        logger.log(logging.DEBUG_01,"regex: %s, in: %s, in_is_link: %s, out: %s, dest: %s%s, cs: %s, repeat: %s, html_entities: %s, noclean: %s, clear: %s)" 
                          % (regex, fIn, inIsLink, fOut, dest, '+' if destAppend is True else '' ,repeat, cs, htmlEntities, noclean, clear))
        
        # get and format input (interpolate with buffer)
        input = fIn.format(*self.buffer)
        # input is link
        if(inIsLink is True):
            logger.log(logging.DEBUG_01, "Get url '%s'" % input)
            input = self.getPages(input, self.userAgent)
            
        defaultReFlags = re.DOTALL
        
        # not case sensitive
        if(cs is False):
            defaultReFlags |= re.IGNORECASE
        
        matchs = None
        if(repeat is True):
            matchs = re.finditer(regex, input, defaultReFlags)
        else:
            matchs = re.search(regex, input, defaultReFlags)
        
        # matched something
        if(matchs):
            logger.log(logging.DEBUG_01,"Matched! :)")
            
            # little trick for don't write twice same code
            """ for iterator case and single match """
            if(isinstance(matchs, Iterable) is False):
                # create list with one element
                matchs = [matchs]

            for match in matchs:
                output = None
                # there are sub-group
                if(match.lastindex is not None):
                    # backslash substitution 
                    output = match.expand(fOut)
                    logger.log(logging.DEBUG_02,"Matched > groups expanded output: %s" % output)
                else:
                    output = match.group(0)
                    logger.log(logging.DEBUG_02,"Matched > string: %s" % output)
                
                # html entities decode
                if(htmlEntities is True):
                    output = htmlEntityDecode(output)
                    
                # strip html tags
                if(noclean is False):
                    output = re.sub('<[^>]*>', '', output)
                    
                # set/append output buffer
                if(destAppend is True):
                    self.buffer[dest] += output
                else:    
                    self.buffer[dest] = output
                
                # call sub-childs in order: scrap, trig and notify    
                if(scrap is not None): self.__callScraper(scrap)
                if(trig is not None): self.__callTrigger(trig)
                if(notify is not None): self.__callNotifier(notify)
            
            return True
                        
        else:
            logger.log(logging.DEBUG_01,"Not Matched! :(")
            
            # clear buffer if not match
            if(clear is True):
                self.buffer[dest] = ''
            
            return False
        
    def __callTrigger(self, trig):
        # if not a list...make it
        if(type(trig) is not list): trig = [trig]
        
        for item in trig:
            # get trig type
            trigType=self.jsonCfg.evalGet(item, 'type', 'trig')
            
            # instance and call
            triggerClass = getTrigger(trigType)
            triggerClass = triggerClass(self.jsonCfg, item, self.userAgent, self.buffer, self.loggerName + '.' + self.profile_name + '.scraper')
            return triggerClass.run()

    def __callNotifier(self, notify):
        # if not a list...make it
        if(type(notify) is not list): notify = [notify]
        
        for item in notify:
            # get notify type
            notifyType=self.jsonCfg.evalGet(item, 'type', 'notify')
            
            # instance and call
            notifierClass = getNotifier(notifyType)
            notifierClass = notifierClass(self.jsonCfg, item, self.buffer, self.loggerName + '.' + self.profile_name + '.scraper')
            return notifierClass.run()
        
