# -*- coding: utf-8 -*-

import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
import commonlib
import re
import datetime
import time
import traceback
import threading
from MongoModel import MongoModel
from difflib import SequenceMatcher
from workerpool import WorkerPool
from termcolor import cprint
from CrawlerLib import Log, Http

MONGO_SERVER = 'beta.mana.vn'
#MONGO_SERVER = '27.0.12.106'
MONGO_PORT = 27017

class Crawler(MongoModel):
    
    listGiaiDau = {55: ['92', 'Anh'], 2: ['34', 'Ý'], 9: ['39', 'Đức'], 30: ['93', 'Pháp'], 3: ['85', 'Tây Ban Nha'], 6: ['74', 'Champions League'], 7: ['58', 'Europa League']}
    url = 'http://www.asianbookie.com/index.cfm?league={0}&tz=7'
    tYear = datetime.datetime.now().year
    
    def __init__(self, host, port):
        MongoModel.__init__(self, host, port)
        self.fixture = {}
        self.min_max_fixture = {}
        
    def standardizeTimeValue(self, timeString):
        elems = timeString.split(',')
        for i in range(len(elems)):
            elems[i] = int(elems[i])
        myTime = datetime.datetime(elems[0], elems[1], elems[2], elems[3], elems[4], elems[5]) - datetime.timedelta(hours=1)
        return myTime + datetime.timedelta(seconds=time.timezone)
    
    def standardizeTimeAsian(self, timeString):
        timeString = "{0}/{1}".format(self.tYear, timeString)
        try:
            return datetime.datetime.strptime(timeString, "%Y/%d/%b  %H:%M") + datetime.timedelta(seconds=time.timezone)
        except:
            print timeString
            raise Exception("ERROR: error occurred when standardize {timeStr=({0})}".format(timeString))
       
    def parent(self, node, tag, maxLevel=3):
        ilv = 0
        pnode = node
        while pnode.getparent() != None and ilv <= maxLevel:
            ilv += 1
            if pnode.tag == tag: break
            pnode = pnode.getparent()
        return pnode
    
    def getFixture2(self, g7mId):
        logger.debug('call getFixture2({0})'.format(g7mId))
        url = 'http://data.7m.cn/matches_data/{0}/vn/matches.js?nocache={1}'.format(g7mId, time.time())
        try:
            patterns = {'var\s*': '', '\[\]': '{}', 'new Array\(\)': '{}', ';': ';\n'}
            html = Http.getHtml(url)
            defaultord = 0; TeamB_arr = {}; TeamA_arr = {}
            Start_time_arr = {}; live_bh_arr = {}
            for i, v in patterns.items(): html = re.sub(i, v, html)
            for line in html.splitlines():
                exec(line)
            for i in range(len(live_bh_arr[defaultord])):
                self.fixture[live_bh_arr[defaultord][i]] = {'teamA': TeamA_arr[defaultord][i], 'teamB': TeamB_arr[defaultord][i], 'time': self.standardizeTimeValue(Start_time_arr[defaultord][i])}
        except:
            logger.error(traceback.format_exc())
            
    def getFixture(self, g7mId):
        logger.debug('start getFixture({0})'.format(g7mId))
        try:
            url = 'http://data.7m.cn/matches_data/{0}/vn/fixture.js?a={1}'.format(g7mId, time.time())
            html = Http.getHtml(url)
            html = html.replace(';',';\n')
            lines = re.findall(r".*var\s(.+);", html)
            Tmp_bh_Arr,  TeamA_Arr, TeamB_Arr, Time_Arr = [], [], [], []
            for line in lines:
                exec(line)
            soTranDau = len(Tmp_bh_Arr)
            for i in range(soTranDau):
                self.fixture[Tmp_bh_Arr[i]] = {'teamA': TeamA_Arr[i], 'teamB': TeamB_Arr[i], 'time': self.standardizeTimeValue(Time_Arr[i])}
        except:
            logger.error(traceback.format_exc())
    
    def compare(self, d1, d2):
        if d1.month == d2.month:
            if d1.day > d2.day: return -1
            elif d1.day < d2.day: return 1
            else: return 0
        elif d1.month > d2.month: return -1
        else: return 1
    
    def getRangeOfMatchDate(self, minDate, maxDate):
        for k, v in self.fixture.items():
            cdate = v['time']
            minc = self.compare(minDate, cdate)
            maxc = self.compare(cdate, maxDate)
            if ((minc==0 or minc==1) and (maxc==0 or maxc==1)):
                self.min_max_fixture[k] = v
    
    def detectIdOfMatch(self, timeOfMatch, teamVsteam):
        logger.debug('detectIdOfMatch({0}, {1})'.format(timeOfMatch, teamVsteam))
        if len(self.min_max_fixture) == 0: return
        data = []
        for k, v in self.min_max_fixture.items():
            if (v['time'].day == timeOfMatch.day) and (v['time'].month == timeOfMatch.month): data.append(k)
        sq = SequenceMatcher()
        cratio = 0.0
        fId = ''
        for i in data:
            v = self.min_max_fixture[i]
            teamVsteam7m = "{0} vs {1}".format(v['teamA'], v['teamB'])
#            print "{0} <> {1}".format(teamVsteam, teamVsteam7m)
            sq.set_seqs(teamVsteam, teamVsteam7m)
            iratio = sq.ratio()
            if cratio < iratio: 
                cratio = iratio
                fId = i
        return fId
    
    def tyLeDo(self, giaiDauId):
        logger.debug('---------- Giải đấu {0} -----------'.format(self.listGiaiDau[giaiDauId][1]))
        url = self.url.format(giaiDauId)
        try:
            if giaiDauId in [6, 7]:
                self.getFixture2(self.listGiaiDau[giaiDauId][0])
            else:
                self.getFixture(self.listGiaiDau[giaiDauId][0])
            tree = Http.getXMLTree(url)
            if tree == '' or tree == None: return
            # ----------------------------------
            # lay phan asian handicap, fixed odd
            # ----------------------------------
            print '------------ Lấy thông tin của từng trận của giải đấu {0} trên trang AsianBookie -------------'.format(self.listGiaiDau[giaiDauId][1])
            data = {}
            for item in tree.xpath("//div[@id='masterdiv']/div/table[1]/tr[position()>3]"):
                timeOfMatch = self.standardizeTimeAsian(commonlib.getElementText(item.xpath("./td[1]"), descendant=1))
                nameNode = item.xpath("./td[2]")
                if len(nameNode) == 0: continue
                commonlib.cleanElementWithTag(nameNode[0], ['table'])
                teamVsteam = re.sub(r'\[\d+\]', '', commonlib.getElementText(nameNode, descendant=1)).strip()
                teamvsArr = teamVsteam.split(' vs ')
                epTeam = commonlib.getElementText(item.xpath("./td[2]//font[@color='000080']"), descendant=1)
                expertsPick = ''
                if epTeam != '':
                    expertsPick = 'home' if epTeam == teamvsArr[0] else 'away'
                statLink = nameNode[0].xpath(".//a[contains(@href, 'asianbookie.com/h2h.cfm?id=')]")
                statLink = commonlib.urlJoin(url, commonlib.getAttribText(statLink[0], 'href'))
                statData = self.statistict(statLink)
                tkScript = nameNode[0].xpath(".//img[contains(@src, 'asianbookie.com/charticon.gif')]/..")
                tkScript = commonlib.getAttribText(tkScript, 'onclick')
                tkLink = commonlib.extractWithRegEx(r"(http://stats.asianbookie.com/oddschart.cfm\?id=\d+)", tkScript, 1)
                tkTyLeDo = self.thongkeTyLeDo(tkLink)
                handicapOdds = {}
                handicapOdds['home'] = commonlib.getElementText(item.xpath("./td[3]"), descendant=1)
                handicapOdds['handicap'] = commonlib.getElementText(item.xpath("./td[4]"), descendant=1)
                handicapOdds['away'] = commonlib.getElementText(item.xpath("./td[5]"), descendant=1)
                fixOdds = {}
                fixOdds['home'] = commonlib.getElementText(item.xpath("./td[6]"), descendant=1)
                fixOdds['draw'] = commonlib.getElementText(item.xpath("./td[7]"), descendant=1)
                fixOdds['away'] = commonlib.getElementText(item.xpath("./td[8]"), descendant=1)
                data[hash(teamVsteam)] = {'time': timeOfMatch, 'handicap': handicapOdds, 
                        'fixodds': fixOdds, 'name': teamVsteam, 
                        'statistics': statData, 'thongkedo': tkTyLeDo, 'expertsPick': expertsPick}
            # ----------------------------------
            # lay phan over/under
            # ----------------------------------
            for item in tree.xpath("//div[@align='center']/table/tr[position()>3]"):
                nameNode = item.xpath("./td[2]")
                if len(nameNode) == 0: continue
                commonlib.cleanElementWithTag(nameNode[0], ['table'])
                teamVsteam = re.sub(r'\[\d+\]', '', commonlib.getElementText(nameNode, descendant=1)).strip()
                if teamVsteam == '': continue
                hashKey = hash(teamVsteam)
                if not data.has_key(hashKey): continue
                overUnder = {}
                overUnder['over'] = commonlib.getElementText(item.xpath("./td[3]"), descendant=1)
                overUnder['handicap'] = commonlib.getElementText(item.xpath("./td[4]"), descendant=1)
                overUnder['under'] = commonlib.getElementText(item.xpath("./td[5]"), descendant=1)
                data[hashKey]['overUnder'] = overUnder
            
            if len(data) == 0:
                logger.info("Giải đấu {0} không có trận đấu nào".format(self.listGiaiDau[giaiDauId][1]))
                return
            
            minDate = ''
            maxDate = ''
            for v in data.values():
                if (minDate == ''): minDate = v['time']
                if (maxDate == ''): maxDate = v['time']
                if (v['time'] < minDate): minDate = v['time']
                if (v['time'] > maxDate): maxDate = v['time'] 
            print 'maxDate: {0}'.format(maxDate)
            print 'minDate: {0}'.format(minDate)
            self.getRangeOfMatchDate(minDate, maxDate)
            db = self.connection['bongda']
            collection = db['tyledo']
            for v in data.values():
                _id = self.detectIdOfMatch(v['time'], v['name'])
                if _id != '':
                    cprint('INFO: id of match: {0}'.format(_id), 'yellow')
                    collection.save({
                        '_id': _id,
                        'handicap': v['handicap'],
                        'overUnder': v['overUnder'],
                        'fixodds': v['fixodds'],
                        'time': v['time'],
                        'teamA': self.min_max_fixture[_id]['teamA'],
                        'teamB': self.min_max_fixture[_id]['teamB'],
                        'timestamp': time.time(),
                        'lastupdate': datetime.datetime.now(),
                        'giaidau': self.listGiaiDau[giaiDauId][1],
                        'statistics': v['statistics'],
                        'thongkedo': v['thongkedo'],
                        'expertsPick': v['expertsPick'] 
                    })
        except:
            logger.error(unicode(traceback.format_exc(), 'utf8'))
            return

    def thongkeTyLeDo(self, url):
        return []
#        logger.debug('start thongkeTyLeDo url={0}'.format(url))
#        id = commonlib.extractWithRegEx(r'\?id=(\d+)', url, 1)
#        data = []
#        try:
#            reqUrl = 'http://www.asianbookie.com/graphfeed.cfm?id={0}'.format(id)
#            script = commonlib.getHTML(reqUrl, headers={'Referer': url, 'X-Requested-With': 'XMLHttpRequest'})
#            for line in re.findall(r'"(.+)"', script):
#                lineArr = line.split(' ')
#                atTime = int(commonlib.extractWithRegEx(r'(\d+)', lineArr[1], 1))
#                atDate = re.sub(r'(\d{4})-(\d+)-(\d+)', lambda m: "{0}/{1}/{2}".format(m.group(3), m.group(2), m.group(1)), lineArr[0])
#                data.append({'date': atDate, 'time': lineArr[1], 'xtime': atTime, 'tyle': lineArr[3]})
#            print data
#        except:
#            print 'ERROR: thongkeTyLeDo with url='.format(url)
#            logger.error(traceback.format_exc())
#        finally:
#            return data

    def statistict(self, url):
        logger.debug('start statistict url={0}'.format(url))
        result = {'head2head': {}, 'last10result': {}}
        try:
            tree = Http.getXMLTree(url)
            if tree == None or tree == '': return
            lblItem = tree.xpath("//td/font/b//*[contains(., '10-Year Head-to-Head History')]")
            headToHead = []
            if len(lblItem) > 0:
                tableNode = self.parent(lblItem[0], 'table', 5)
                for item in tableNode.xpath("./following-sibling::*[1]/tr"):
                    timeOfMatch = commonlib.getElementText(item.xpath("./td[1]"), descendant=1)
                    timeOfMatch = re.sub(r'[^\w\s]', '', timeOfMatch).strip()
                    timeOfMatch = datetime.datetime.strptime(timeOfMatch, "%B %d %Y").strftime("%d/%m/%Y")
                    teamA = commonlib.getElementText(item.xpath("./td[2]"), descendant=1)
                    teamB = commonlib.getElementText(item.xpath("./td[3]"), descendant=1)
                    score = commonlib.getElementText(item.xpath("./td[4]"), descendant=1)
                    headToHead.append({'teamA': teamA, 'teamB': teamB, 'score': score, 'date': timeOfMatch})
            last10result = []
            for lblItem in tree.xpath("//td/font/b//*[contains(., 'Last 10 results')]"):
                teamName = commonlib.getElementText(lblItem, descendant=1).replace('Last 10 results', '').strip()
                tableNode = self.parent(lblItem, 'table', 5)
                data = []
                for item in tableNode.xpath("./following-sibling::*[1]/tr"):
                    timeOfMatch = commonlib.getElementText(item.xpath("./td[1]"), descendant=1)
                    timeOfMatch = re.sub(r'[^\w\s]', '', timeOfMatch).strip()
                    if timeOfMatch == '': continue
                    timeOfMatch = datetime.datetime.strptime(timeOfMatch, "%b %d %y").strftime("%d/%m/%Y")
                    teamA = commonlib.getElementText(item.xpath("./td[2]"), descendant=1)
                    teamB = commonlib.getElementText(item.xpath("./td[3]"), descendant=1)
                    score = commonlib.getElementText(item.xpath("./td[4]"), descendant=1)
                    status = commonlib.getElementText(item.xpath("./td[5]"), descendant=1)
                    data.append({'teamA': teamA, 'teamB': teamB, 'score': score, 'date': timeOfMatch, 'status': status})
                last10result.append({'team': teamName, 'data': data})
            result['head2head'] = headToHead
            result['last10result'] = last10result
        except:
            logger.error(traceback.format_exc())
        finally:
            return result

def quitIfTimeout():
    logger.debug('------------- check Timeout -------------')
    while True:
        delta = time.time() - lastaction
        if delta > 900:
            logger.info('process timeout {0}'.format(delta))
            os._exit
        time.sleep(10)

       
if __name__ == '__main__':
    logger = Log.getLogger('AsianBookie')
    logger.info('start crawler asianbookie')
    crawler = Crawler(MONGO_SERVER, MONGO_PORT)
    lastaction = time.time()
    threading.Thread(target=quitIfTimeout).start()
#    crawler.statistict('http://stats.asianbookie.com/h2h.cfm?id=7631990')
#    crawler.tyLeDo(55)
#    crawler.thongkeTyLeDo('http://stats.asianbookie.com/oddschart.cfm?id=7632935')
    try:
        pool = WorkerPool(size=2)
        pool.map(crawler.tyLeDo, crawler.listGiaiDau.keys())
        pool.shutdown()
        pool.wait()
    except:
        logger.error(traceback.format_exc())
    logger.info('finished crawler asianbookie at {0}'.format(datetime.datetime.now()))
    os._exit(1)
