import queue

import pymysql as pymysql
from fake_useragent import UserAgent
from lxml import etree
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
import spiderConfig

class footballSimple():
    chrome_path = r'./chromedriver.exe'

    def __init__(self, url):
        chrome_options = Options()
        chrome_options.add_argument('--headless')
        chrome_options.add_argument('--disable-gpu')
        self.driver = webdriver.Chrome(chrome_options=chrome_options)
        self.base_url = url
        self.__host = spiderConfig.host
        self.__port = spiderConfig.port
        self.__user = spiderConfig.user
        self.__password = spiderConfig.password
        self.__db = spiderConfig.db
        self.__charset = spiderConfig.charset
        self.lists = []
        self.dicts = {}

    def sql_operate(self, lists):
        sql__conn = pymysql.connect(
            host=self.__host,
            port=self.__port,
            user=self.__user,
            password=self.__password,
            db=self.__db,
            charset=self.__charset
        )
        cursor = sql__conn.cursor()
        for single in lists:
            is_update_sql = "select * from tp_game where fx_id = %s"
            is_update = cursor.execute(is_update_sql, single['fx_id'])
            if is_update == 1:
                update_datas = {
                    'style': single['style']
                }
                sql = "update tp_game set"
                update = ''.join(
                    [" {key} = '{values}',".format(key=key, values=values) for key, values in update_datas.items()])
                wheres = " where fx_id = '{}'".format(single['fx_id'])
                updats_sql = sql + update[:-1] + wheres
                cursor.execute(updats_sql)
                print('正在更新{}的数据'.format(single['fx_id']))
                sql__conn.commit()
        cursor.close()
        sql__conn.close()

    def get_html(self):
        try:
            self.driver.get(self.base_url)
            self.driver.find_element(By.ID, 'button7').click()
            self.driver.find_element(By.XPATH, '//*[@id="tools"]/ul/li[1]').click()
            complete_data = self.driver.page_source
            return complete_data
        except Exception:
            print("访问过于频繁，IP暂时被该网站Ban掉,请稍后再试")

    # 解析出详情页url
    def parse_detail_url(self, complete_data):
        complete_html = etree.HTML(complete_data)
        total_tr = complete_html.xpath('//table[@id="table_live"]/tbody/tr[@align="center"]')
        return total_tr

    # 组装数据
    def assemble_data(self, total_tr):
        for tr in total_tr:
            if 'height: 18px; background-color:' in str(tr.xpath('@style')):
                sid = "".join(tr.xpath('td[11]/@id'))
                self.dicts = {
                    'fx_id': sid,
                    'style': 1
                }
                self.lists.append(self.dicts)
        self.driver.close()
        return self.lists

    def main(self):
        complete_data = self.get_html()
        total_tr = self.parse_detail_url(complete_data)
        lists = self.assemble_data(total_tr)
        self.sql_operate(lists)


if __name__ == '__main__':
    run = footballSimple('http://live.titan007.com/indexall.aspx')
    run.main()
