import datetime
import queue
import re
import time

import pymysql as pymysql
from dateutil.relativedelta import relativedelta
from fake_useragent import UserAgent
from lxml import etree
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By

import spiderConfig


class footballFullFuture:
    chrome_path = r'./chromedriver.exe'

    def __init__(self, url):
        chrome_options = Options()
        chrome_options.add_argument('--headless')
        chrome_options.add_argument('--disable-gpu')
        self.q = queue.Queue()
        self.driver = webdriver.Chrome(chrome_options=chrome_options)
        self.base_url = url
        self.__host = spiderConfig.host
        self.__port = spiderConfig.port
        self.__user = spiderConfig.user
        self.__password = spiderConfig.password
        self.__db = spiderConfig.db
        self.__charset = spiderConfig.charset
        self.lists = []
        self.dicts = {}
        self.s = ""
        self.ua = UserAgent()

    def sql_operate(self, lists):
        sql__conn = pymysql.connect(
            host=self.__host,
            port=self.__port,
            user=self.__user,
            password=self.__password,
            db=self.__db,
            charset=self.__charset
        )
        cursor = sql__conn.cursor()

        # 先delete tp_game_next 表
        start_dates, end_dates = self.create_yesterday()
        sql_delete = "DELETE FROM tp_game where  tag = 1 and  create_time BETWEEN '{}' AND '{}'".format(
            start_dates, end_dates)
        cursor.execute(sql_delete)
        sql__conn.commit()

        # 插入數據
        for single in lists:
            sql_inert = "insert into tp_game values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
            data = [(single['id'], single['game_type'], single['match_name'], single['match_date'], single['home_team'], \
                     single['away_team'], single['half_score'], single['final_score'], single['match_status'],
                     single['minutes'], \
                     single['fx_id'], single['hc'], single['hc_home'], single['hc_away'], single['ou'], single['ou_o'],
                     single['ou_u'], \
                     single['hc_half'], single['hc_home_half'], single['hc_away_half'], single['ou_half'],
                     single['ou_o_half'], \
                     single['ou_u_half'], single['style'], single['sort'], single['create_time'], single['update_time'],
                     single['tag'])]
            print(data)
            print('正在插入中')
            cursor.executemany(sql_inert, data)
            sql__conn.commit()
        cursor.close()
        sql__conn.close()

    def create_yesterday(self):
        curr_day = datetime.datetime.now()
        curr_hour = str(curr_day.hour)
        current_day = datetime.date.today()
        yesterday = str(current_day - relativedelta(days=1))
        start_dates_time = "1970-1-20" + " " + "00:00:00"
        end_dates_time = yesterday + " " + "23:59:00"
        start_dates_time = time.strptime(start_dates_time, "%Y-%m-%d %H:%M:%S")
        end_dates_time = time.strptime(end_dates_time, "%Y-%m-%d %H:%M:%S")
        start_dates = int(time.mktime(start_dates_time))
        end_dates = int(time.mktime(end_dates_time))
        return start_dates, end_dates

    def get_html(self):
        try:
            self.driver.get(self.base_url)
            complete_data = self.driver.page_source
            return complete_data
        except Exception:
            self.driver.quit()
            time.sleep(1)
            self.main()

    # 解析出详情页url
    def parse_detail_url(self, complete_data):
        complete_html = etree.HTML(complete_data)
        total_tr = complete_html.xpath('//tbody/tr[@align="center"]')
        return total_tr

    # 组装数据
    def assemble_data(self, total_tr):
        for tr in total_tr[1:]:
            if not tr.xpath('@style'):
                tr_id = "".join(tr.xpath('@id'))
                match_name = "".join(tr.xpath('td[1]/font/text()'))
                curr_day = datetime.datetime.now()
                match_date = "".join(tr.xpath('td[2]/text()'))
                current_dates = str(curr_day.year) + '-' + match_date
                # 转为时间数组
                timeArray = time.strptime(current_dates, "%Y-%m-%d %H:%M")
                # 转为时间戳
                match_date = int(time.mktime(timeArray))
                match_status = "".join(tr.xpath('td[3]/text()'))
                home_team = "".join(tr.xpath('td[4]/text()'))
                final_score = "".join(tr.xpath('td[5]/b/text()'))
                away_team = "".join(tr.xpath('td[6]/text()'))
                half_score = "".join(tr.xpath('td[7]/text()'))
                is_hc_half_path = '//*[@id="{}"]/td[8]/a[1]'.format(tr_id)
                hc, hc_home, hc_away, hc_half, hc_home_half, hc_away_half = self.get_selenium_contorl(is_hc_half_path)
                create_id = time.time()
                times = str(create_id)[:10]
                self.dicts = {
                    "id": 0,
                    "game_type": 1,
                    "match_name": match_name,
                    "match_date": match_date,
                    "home_team": home_team,
                    "away_team": away_team,
                    "half_score": half_score,
                    "final_score": final_score,
                    "match_status": 0,
                    "minutes": 0,
                    "fx_id": 0,
                    "hc": hc,
                    "hc_home": hc_home,
                    "hc_away": hc_away,
                    "ou": 0,
                    "ou_o": "",
                    "ou_u": "",
                    "hc_half": hc_half,
                    "hc_home_half": hc_home_half,
                    "hc_away_half": hc_away_half,
                    "ou_half": 0,
                    "ou_o_half": "",
                    "ou_u_half": "",
                    "style": 0,
                    "sort": 1,
                    "create_time": times,
                    "update_time": times,
                    "tag": 1
                }
                self.lists.append(self.dicts)
                print(self.dicts)
        self.driver.close()
        return self.lists

    def get_selenium_contorl(self, is_hc_half_path):
        global is_window
        global is_win_xpath
        hc = ''
        hc_home = ''
        hc_away = ''
        hc_half = ''
        hc_home_half = ''
        hc_away_half = ''
        try:
            self.driver.find_element(By.XPATH, is_hc_half_path).click()
            windows_all = self.driver.window_handles
            if len(windows_all) == 2:
                is_window = 2
                time.sleep(1)
                self.driver.implicitly_wait(10)
                self.driver.switch_to.window(self.driver.window_handles[1])
                self.driver.implicitly_wait(10)
                complete_data = self.driver.page_source
                pattern = re.compile(
                    '''36\*.*?<td oddstype="wholeLastOdds">(.*?)</td>.*?<td goals=".*?" oddstype="wholeLastOdds">(.*?)</td>.*?<td oddstype="wholeLastOdds">(.*?)</td>''',
                    re.S)
                rets = re.findall(pattern, complete_data)
                if rets:
                    hc = str(rets[0][0]).strip()
                    if hc == '&nbsp;':
                        hc = ''
                    hc_home = str(rets[0][1]).strip()
                    if hc_home == '&nbsp;':
                        hc_home = ''
                    hc_away = str(rets[0][2]).strip()
                    if hc_away == '&nbsp;':
                        hc_away = ''
                self.driver.find_element(By.XPATH, '//*[@id="tabs"]/a[2]').click()
                self.driver.implicitly_wait(10)
                hc_complete_data = self.driver.page_source
                hc_pattern = re.compile(
                    '''36\*.*?<td oddstype="wholeLastOdds">(.*?)</td>.*?<td goals=".*?" oddstype="wholeLastOdds">(.*?)</td>.*?<td oddstype="wholeLastOdds">(.*?)</td>''',
                    re.S)
                hc_ret = re.findall(hc_pattern, hc_complete_data)
                if hc_ret:
                    hc_home_half = str(rets[0][0]).strip()
                    if hc_home_half == '&nbsp;':
                        hc_home_half = ''
                    hc_half = str(rets[0][1]).strip()
                    if hc_half == '&nbsp;':
                        hc_half = ''
                    hc_away_half = str(rets[0][2]).strip()
                    if hc_away_half == '&nbsp;':
                        hc_away_half = ''
                self.driver.close()
                self.driver.switch_to.window(self.driver.window_handles[0])
                return hc, hc_home, hc_away, hc_half, hc_home_half, hc_away_half
        except Exception:
            print("操作太频繁了，请先歇一歇。")
            time.sleep(1)
            if is_window == 2:
                self.driver.close()
                self.driver.switch_to.window(self.driver.window_handles[0])
            return hc, hc_home, hc_away, hc_half, hc_home_half, hc_away_half

    def main(self):
        complete_data = self.get_html()
        total_tr = self.parse_detail_url(complete_data)
        lists = self.assemble_data(total_tr)
        self.sql_operate(lists)
        self.driver.quit()


if __name__ == '__main__':
    current_date = str(datetime.date.today())
    current_date = re.sub('-', "", current_date)
    current_date = ["https://bf.titan007.com/football/Next_{0}.htm".format(int(current_date) + i) for i in range(1, 4)]
    for url in current_date:
        run = footballFullFuture(url)
        run.main()
