# -*- coding: UTF-8 -*-
import re
import time
import sys

from datetime import datetime, timedelta
from selenium import webdriver
from selenium.common.exceptions import TimeoutException, NoSuchElementException, WebDriverException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC

from dash_lib.analyse_data import get_lastest_granded_pick
from dash_lib.mongodb_helper import bulk_insert
from config import config
from pyquery import PyQuery as pq


def is_not_finished(last_pick):
    # if is_hot_update and (count > 4):
    #     return False
    # else:
    if last_pick and is_reach_last_fetch_page(last_pick):
        return False
    else:
        try:
            WebDriverWait(driver, 10).until(
                EC.visibility_of_element_located((By.XPATH, "//a[contains(text(),'See older')]")))
            return True
        except TimeoutException:
            return False


def loadNext():
    driver.find_element_by_xpath("//a[contains(text(),'See older')]").click()
    time.sleep(1)


def not_empty(s):
    return s and s.strip()


def is_reach_last_fetch_page(last_pick):
    # print('last_pick', last_pick)
    resultList = handling()
    print(LOG_THREAD_TAG, curent_tipper, u'----- index:', len(resultList))

    result = list(filter(lambda
                             x: x[0] == last_pick['match'] and x[1] == last_pick['pick'] and x[3] == last_pick[
        'stake'] and x[10] == last_pick['kickoff_time']
                         , resultList))
    # if not result:
    #     print('found no last pick')

    return True if result else False


def get_last_updat_time(parent_view):
    find = parent_view.find('.title-name').find('.col-sm-5.col-lg-6.title-age')
    find_find = find.find('.bet-age.text-muted')
    now = datetime.now()
    last_update_str = find_find.text().replace('st', "").replace('nd', "").replace('rd', "").replace('th', "")
    if 'secs ago' in last_update_str:
        _delta = int(last_update_str.replace('secs ago', ''))
        _datetime = now - timedelta(seconds=_delta)
    elif 'mins ago' in last_update_str or 'min ago' in last_update_str:
        _delta = int(last_update_str.replace('mins ago', '').replace('min ago', ''))
        _datetime = now - timedelta(minutes=_delta)
    elif 'hrs ago' in last_update_str or 'hr ago' in last_update_str:
        _delta = int(last_update_str.replace('hrs ago', '').replace('hr ago', ''))
        _datetime = now - timedelta(hours=_delta)
    else:
        _datetime = datetime.strptime(last_update_str, '%a, %b %d, %Y, %H:%M')
    return _datetime


def handling():
    html = driver.page_source
    resultList = []
    if html:
        doc = pq(html)
        _last_kickoff_date = None
        items = doc('.media-body')
        for item in items.items():
            try:
                last_updat_time = get_last_updat_time(item)
                last_update = last_updat_time.strftime('%Y-%m-%d %H:%M')
                item_view = item.find('.feed-pick-title').find('.col-xs-12.no-padding')
                plain_item_view_txt = re.sub(r'\r\n|\n|\r/gm', ";", item_view.text())
                if item_view.find('h3').find('.combo-toggle') or plain_item_view_txt.find(
                        'Combo') != -1:
                    pick_type = 'Combo'
                    match = 'Combo'
                    sport = 'Combo'
                    category = ''
                    if not _last_kickoff_date:
                        _last_kickoff_date = last_updat_time
                else:
                    match = item_view.find('h3').find('a').text()
                    if not match:
                        match = list(filter(not_empty, plain_item_view_txt.split(';')))[0]
                    pick_type = 'LIVE' if item_view.find('.labels').find('.label.label-danger') else 'Single'
                    sport_line = re.sub(r'\r\n|\n|\r/gm', "",
                                        item_view.find('.sport-line').text().replace('Kick off:', ''))
                    sport_line_splits = sport_line.split('/')
                    sport = sport_line_splits[0]
                    category = sport_line_splits[1]
                    _last_kickoff_date = datetime.strptime(not_empty(sport_line_splits[2]), '%d %b %Y, %H:%M')

                kickoff = _last_kickoff_date.strftime('%Y-%m-%d')
                kickoff_time = _last_kickoff_date.strftime('%H:%M')
                pick_txt = item_view.find('.pick-line').text()
                pick = pick_txt.split('@')[0]
                odd = float(pick_txt.split('@')[1])
                stake = int(item_view.find('.labels').find('.label.label-default').text().replace('/10', ''))
                book_maker = item_view.find('.labels').find('.label.label-primary').text()

                win = item_view.find('.labels').find('.enable-tooltip.text-green')
                lost = item_view.find('.labels').find('.enable-tooltip.text-danger')
                try:
                    profit = float(win.text() if win else lost.text())
                except ValueError:
                    profit = 0
                result = win.attr('data-original-title') if win else lost.attr('data-original-title')

                resultList.append((
                    not_empty(match), not_empty(pick), odd, stake, pick_type, profit,
                    not_empty(result), not_empty(sport), not_empty(category), not_empty(kickoff), kickoff_time,
                    last_update,book_maker))

            except (IndexError, ValueError) as e:
                print(LOG_THREAD_TAG, str(e))
    else:
        print(LOG_THREAD_TAG, curent_tipper, u'抓取网页失败，跳过')
    return resultList


def save_db(blogname, rowlist):
    bulk_insert(blogname, rowlist)


def fetchBlog(index, blog_array):
    blogname = blog_array[index]
    last_pick = get_lastest_granded_pick(blogname)
    time.sleep(2)
    driver.get(config.get_search_link(blogname))
    try:
        driver.find_element_by_xpath('//button[@id="impliedsubmit"]').click()
    except NoSuchElementException as e:
        pass
    except WebDriverException as e:
        pass
    time.sleep(2)
    driver.find_element_by_xpath('//a[@class="btn btn-default btn-blogmenu"]').click()
    time.sleep(2)
    WebDriverWait(driver, 30).until(
        EC.visibility_of_element_located((By.XPATH, '//a[@data-value="picks"]')))
    driver.find_element_by_xpath('//a[@data-value="picks"]').click()

    WebDriverWait(driver, 20).until(
        EC.visibility_of_element_located((By.XPATH, '//div[@id="_blogInnerContent"]')))
    # wait = ui.WebDriverWait(driver, 10)
    # wait.until(lambda driver: driver.find_element_by_xpath('//tr[@class="tr-filter"]'))

    try:
        driver.find_element_by_xpath('//span[@data-original-title="Tipster offers paid service"]')
        driver.find_element_by_class_name('tr-filter').click()
        time.sleep(2)
    except NoSuchElementException as e:
        pass

    hot_update_page_count = 0

    while is_not_finished(last_pick):
        loadNext()
        hot_update_page_count += 1
    resultList = handling()
    # print(LOG_THREAD_TAG, blogname, u' === total:', len(resultList))

    save_db(blogname, resultList)

    print(LOG_THREAD_TAG, blogname + ' fetched and save ！! ',LOG_THREAD_TAG, 'now , tippers index is ', index, ' , ', len(blog_array) - index - 1, ' left')

def fetch(blog_array):
    for index in range(len(blog_array)):
        global curent_tipper
        curent_tipper = blog_array[index]

        try:
            fetchBlog(index, blog_array)
        except TimeoutException as e:
            print(LOG_THREAD_TAG, str(e))
            print(LOG_THREAD_TAG, 'time out! retrying ... ', curent_tipper)
            fetchBlog(index, blog_array)
        except NoSuchElementException:
            pass


driver = webdriver.Chrome()
LOG_THREAD_TAG = 'Thread ' + sys.argv[1] + ' : '
curent_tipper = ''
print(LOG_THREAD_TAG, '参数个数为:', len(sys.argv), '个参数。')
print(LOG_THREAD_TAG, '参数列表:', str(sys.argv))
fetch(sys.argv[2:len(sys.argv)])
