# -*- coding:utf8 -*-
import re
import json
import copy
import time
import pyssdb
import traceback
from datetime import datetime
from lxml import etree
from dateutil.relativedelta import relativedelta
from scrapy import Request, FormRequest
from squirrel_core.commons.utils.get_config import get_config
from squirrel_core.commons.utils.tools import calc_str_md5
from squirrel_core.frame.spider_makaka import MakakaSpider


class itra_race(MakakaSpider):
    name = "itra_race"
    start_url = "https://itra.run/"
    race_url = "https://itra.run/Races/RaceCalendar"
    login_url = "https://itra.run/Account/Login"
    header = {
        "Host": "itra.run",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0",
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
        "Accept-Language": "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2",
        "Accept-Encoding": "gzip, deflate, br, zstd",
    }

    race_header = {
        "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "accept-encoding": "gzip, deflate, br",
        "accept-language": "zh,zh-CN;q=0.9",
        "cache-control": "max-age=0",
        "content-type": "application/x-www-form-urlencoded",
        "upgrade-insecure-requests": "1",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
        "Host": "itra.run"
    }
    trace_header = {
        "Host": "tracedetrail.fr",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36",
        "Accept": "application/json, text/javascript, */*"
    }
    data = {
        "Input.SearchTerms": "",
        "Input.Country": "CN",
        "Input.DateStart": "",
        "Input.DateEnd": "",
        "Input.DateValue": "",
        "Input.resultcount": "",
        "Input.MinDistance": "",
        "Input.MaxDistance": "",
        "Input.RaceValue": "",
        "Input.MinElevationGain": "",
        "Input.MaxElevationGain": "",
        "Input.MinElevationLoss": "",
        "Input.MaxElevationLoss": "",
        "Input.ItraPtsValue": "",
        "Input.Longitude": "0",
        "Input.Latitude": "0",
        "ZoomLevel": "2",
        "type": "",
        "Input.NorthEastLat": "",
        "Input.NorthEastLng": "",
        "Input.SouthWestLat": "",
        "Input.SouthWestLng": "",
        "Input.MinItraPts": "",
        "Input.MaxItraPts": "",
        "countMap": "0",
        "Input.NationalLeagues": "false",
        "Input.NationalLeague": "false"
    }

    specific_settings = {
        'COOKIES_ENABLED': True,
        'HTTPERROR_ALLOWED_CODES': [302]
    }
    handle_httpstatus_list = [302]
    need_ssdbstore_dup = True
    serialNumber = ""
    base_config = get_config(sections="base")
    ssdb_host = base_config.get('ssdb_host')
    ssdb_port = base_config.get('ssdb_port')
    conn = pyssdb.Client(host=ssdb_host, port=ssdb_port)
    i = 0
    while i < 3:
        try:
            cookie = conn.qfront("itra_cookie")
            if cookie:
                break
            else:
                i += 1
                continue
        except:
            i += 1
            time.sleep(1)
            continue
        finally:
            conn.disconnect()

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        self.serialNumber = data.get("serialNumber", "")

        self.upload_procedure({"serialNumber": self.serialNumber, "code": 100, "message": "任务启动成功"})
        email = data.get("spider_config", {}).get("email", "")
        pwd = data.get("spider_config", {}).get("password", "")
        start_date = data.get("spider_config", {}).get("start_date", "")
        end_date = data.get("spider_config", {}).get("end_date", "")
        yield Request(url=self.start_url, callback=self.parse, dont_filter=True, errback=self.err_parse,
                      headers=self.header, meta={"email": email, "pwd": pwd, "start_date": start_date, "end_date": end_date})

    def parse(self, response, **kwargs):
        start_date = response.meta.get("start_date", "")
        end_date = response.meta.get("end_date", "")
        if not start_date and not end_date:
            today = datetime.now().date()
            one_month = today + relativedelta(months=-1)
            year, m, d = str(one_month).split("-")
            start_date = f"{d}-{m}-{year}"
            three_month = today + relativedelta(months=3)
            year, m, d = str(three_month).split("-")
            end_date = f"{d}-{m}-{year}"
        try:
            self.logger.info(f"开始请求{start_date}到{end_date}的赛事信息")
            post_data = copy.deepcopy(self.data)
            post_data["Input.DateStart"] = start_date
            post_data["Input.DateEnd"] = end_date
            post_data["__RequestVerificationToken"] = response.xpath("//input[@name='__RequestVerificationToken']/@value").extract_first()
            self.race_header["Cookie"] = self.cookie
            yield FormRequest(url=self.race_url, headers=self.race_header, formdata=post_data, dont_filter=True,
                              errback=self.err_parse, callback=self.parse_race, meta={"start_date": start_date, "end_date": end_date})
        except Exception:
            self.logger.info(f"请求{start_date}到{end_date}的赛事信息时出错：{traceback.format_exc()}")
        # email = response.meta.get("email", "")
        # pwd = response.meta.get("pwd", "")
        # cookie = "; ".join([i.decode().split(";")[0] for i in response.headers.getlist("Set-Cookie")])
        # self.race_header["Cookie"] = cookie
        # token = response.xpath("//input[@name='__RequestVerificationToken']/@value").extract_first()
        # login_data = {"Input.Email": email, "Input.Password": pwd, "__RequestVerificationToken": token}
        # yield FormRequest(url=self.login_url, headers=self.race_header, formdata=login_data,
        #                   meta={"token": token, "start_date": start_date, "end_date": end_date},
        #                   errback=self.err_parse, callback=self.parse_login, dont_filter=True)


    # def parse_login(self, response):
    #     start_date = response.meta.get("start_date", "")
    #     end_date = response.meta.get("end_date", "")
    #     if response.status == 302:
    #         cookie = "; ".join([i.decode().split(";")[0] for i in response.headers.getlist("Set-Cookie")])
    #         self.race_header["Cookie"] = cookie
    #         location_link = response.headers["Location"].decode("utf-8")
    #         location_url = response.urljoin(location_link.strip())
    #         yield Request(url=location_url, headers=self.race_header,
    #                       meta={"start_date": start_date, "end_date": end_date},
    #                       errback=self.err_parse, callback=self.parse_login, dont_filter=True)
    #     else:
    #         token = response.xpath("//input[@name='__RequestVerificationToken']/@value").extract_first()
    #
    #         # current_date = time.strftime("%Y-%m-%d", time.localtime())
    #         # year, m, d = current_date.split("-")
    #         # start_date = f"{d}-{m}-{year}"
    #         # today = datetime.now().date()
    #         # one_month = today + relativedelta(months=-1)
    #         # year, m, d = str(one_month).split("-")
    #         # start_date = f"{d}-{m}-{year}"
    #         # three_month = today + relativedelta(months=3)
    #         # year, m, d = str(three_month).split("-")
    #         # end_date = f"{d}-{m}-{year}"
    #
    #         post_data = copy.deepcopy(self.data)
    #         post_data["Input.DateStart"] = start_date
    #         post_data["Input.DateEnd"] = end_date
    #         post_data["__RequestVerificationToken"] = token
    #         yield FormRequest(url=self.race_url, headers=self.race_header, formdata=post_data,
    #                           errback=self.err_parse, callback=self.parse_race)


    def parse_race(self, response):
        start_date = response.meta.get("start_date", "")
        end_date = response.meta.get("end_date", "")
        try:
            if response.status == 302:
                location_link = response.headers["Location"].decode("utf-8")
                if "SessionExpired" in location_link:
                    self.logger.info(f"请求时会话过期，需重新登陆")
                    result_dict = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                   "crawlerType": self.name_second, "data": "会话过期，请重新登陆"}
                    result = self.result_item_assembler(response)
                    result['result_data'] = result_dict
                    yield result
            else:
                race_str = re.findall(r'var raceSearchJsonSidePopupNew\s*=\s*(\[[\s\S]*?\]);', response.text)[0]
                race_list = re.findall(r'\[(.*)\]', race_str)
                self.logger.info(f"{start_date}到{end_date}共有{len(race_list)}条比赛记录")
                for race in race_list:
                    tree = etree.HTML(race)
                    detail_url = tree.xpath("//div[@class='event_name']/a/@href")[0]
                    detail_url = response.urljoin(detail_url)
                    yield Request(url=detail_url, headers=self.race_header, dont_filter=True,
                                  errback=self.err_parse, callback=self.parse_race_detail, priority=100)
        except Exception:
            self.logger.info(f"{start_date}到{end_date}比赛记录出错：{traceback.format_exc()}")

    def parse_race_detail(self, response, **kwargs):
        try:
            self.logger.info(f"开始获取比赛详情：{response.url}")
            address_country = "".join(response.xpath("//div[@class='col-lg-3']/text()").extract()).strip()
            race_date = "".join(response.xpath("//div[@class='col-lg-3']/div//text()").extract()).strip()
            race_name = "".join(response.xpath("//div[@class='col-lg-7']/h1//text()").extract()).strip()
            race_introduce = "".join(response.xpath("//div[@class='col-lg-9']//text()").extract()).strip()
            race_data = {"race_name": race_name, "address_country": address_country,
                         "race_date": race_date, "race_introduce": race_introduce}
            group_list = response.xpath("//div[@class='row pt-2']//div[@class='btn-group']/a/@href").extract()
            group_list = list(filter(lambda x: x != "", group_list))

            group_data_list = response.meta.get("group_data_list", [])
            if group_list:
                group_url = response.urljoin(group_list.pop(0))
                yield Request(url=group_url, headers=self.race_header, meta={"race_data": race_data,
                                                                             "group_list": group_list,
                                                                             "group_data_list": group_data_list},
                              errback=self.err_parse, callback=self.parse_group_detail, dont_filter=True, priority=200)
        except Exception:
            self.logger.info(f"获取比赛详情出错{response.url}：{traceback.format_exc()}")

    def parse_group_detail(self, response, **kwargs):
        try:
            self.logger.info(f"开始获取组别信息：{response.url}")
            race_data = response.meta.get("race_data", {})
            group_list = response.meta.get("group_list", [])
            group_data_list = response.meta.get("group_data_list", [])
            group_name = "".join(response.xpath("//div[@class='row mt-4 mb-4']//div[@class='col-lg-5']//text()").extract()).strip()
            points_link = "".join(
                response.xpath("//div[@class='row mt-4 mb-4']/div[@class='col']")[0].xpath(".//img/@src").extract()).strip()
            itra_points = response.urljoin(points_link)
            mountain_level = "".join(
                response.xpath("//div[@class='row mt-4 mb-4']/div[@class='col']")[1].xpath(
                    ".//span[@class='input-group-text']//text()").extract()).strip()
            finisher_level = "".join(
                response.xpath("//div[@class='row mt-4 mb-4']/div[@class='col']")[2].xpath(
                    ".//span[@class='input-group-text']//text()").extract()).strip()
            league = "".join(
                response.xpath("//div[@class='row mt-4 mb-4']/div[@class='col']")[3].xpath(
                    ".//img/@src|.//span//text()").extract()).strip()
            if "images" in league:
                national_league = response.urljoin(league)
            else:
                national_league = league
            group_data = {"group_name": group_name, "itra_points": itra_points, "mountain_level": mountain_level,
                          "finisher_level": finisher_level, "national_league": national_league}
            race_detail_dict = {}
            race_detail = response.xpath("//div[@id='rdetails']")
            race_date = "".join(race_detail.xpath(".//div[contains(string(), 'Date')]/span/text()").extract()).strip()
            start_time = "".join(race_detail.xpath(".//div[contains(string(), 'Start')]/span/text()").extract()).strip()
            participation = "".join(race_detail.xpath(".//div[contains(string(), 'Participation')]/span/text()").extract()).strip()
            distance = "".join(race_detail.xpath(".//div[contains(string(), 'Distance')]/span/text()").extract()).strip()
            elevation_gain = "".join(race_detail.xpath(".//div[contains(string(), 'Gain')]/span/text()").extract()).strip()
            elevation_loss = "".join(race_detail.xpath(".//div[contains(string(), 'Loss')]/span/text()").extract()).strip()
            time_limit = "".join(race_detail.xpath(".//div[contains(string(), 'Limit')]/span/text()").extract()).strip()
            number_of_aid_stations = "".join(race_detail.xpath(".//div[contains(string(), 'Stations')]/span/text()").extract()).strip()
            number_of_participants = "".join(race_detail.xpath(".//div[contains(string(), 'Participants')]/span/text()").extract()).strip()
            race_detail_dict.update({"race_date": race_date, "start_time": start_time, "participation": participation,
                                     "distance": distance,"elevation_gain": elevation_gain, "elevation_loss": elevation_loss,
                                     "time_limit": time_limit, "number_of_aid_stations": number_of_aid_stations,
                                     "number_of_participants": number_of_participants})
            group_data["race_details"] = race_detail_dict

            btn_urls = response.xpath("//div[@class='row mt-4 mb-4 text-left']//div[@class='btn-group']//a/@href").extract()
            btn_urls = list(filter(lambda x: x != '', btn_urls))
            if btn_urls:
                btn_url = response.urljoin(btn_urls.pop(0))
                if "RaceResults" in btn_url:
                    cookie_dict = {}
                    cookie_str = self.cookie.decode()
                    cookies = cookie_str.split(";")
                    for c in cookies:
                        if "=" in c:
                            name, value = c.split("=", 1)
                        else:
                            name = c
                            value = ""
                        cookie_dict.update({name: value})
                    yield Request(url=btn_url, headers=self.race_header, cookies=cookie_dict,
                                  meta={"race_data": race_data, "group_list": group_list,
                                        "group_data": group_data, "btn_urls": btn_urls, "group_data_list": group_data_list},
                                  errback=self.err_parse, callback=self.parse_race_btn, dont_filter=True)
                else:
                    yield Request(url=btn_url, headers=self.race_header, meta={"race_data": race_data, "group_list": group_list,
                                                                               "group_data": group_data, "btn_urls": btn_urls, "group_data_list": group_data_list},
                                  errback=self.err_parse, callback=self.parse_race_btn, dont_filter=True)
            else:
                pass
        except Exception:
            self.logger.info(f"获取组别信息出错{response.url}：{traceback.format_exc()}")

    def parse_race_btn(self, response, **kwargs):
        try:
            self.logger.info(f"开始获取子标签信息:{response.url}")
            race_data = response.meta.get("race_data", {})
            group_list = response.meta.get("group_list", [])
            group_data = response.meta.get("group_data", {})
            btn_urls = response.meta.get("btn_urls", [])
            group_data_list = response.meta.get("group_data_list", [])
            if "RaceCourse" in response.url:
                txt_list = response.xpath("//div[@id='rresults']//h4//text()").extract()
                txt_list = [txt.strip() for txt in txt_list]
                trace_id = "".join(response.xpath("//div[@id='widgetTdt']/@data-traceid").extract())
                trace_url = f"https://tracedetrail.fr/trace/getTraceItra/{trace_id}"
                group_data["race_course"] = {"content": txt_list, "trace_url": trace_url}
            if "RaceResults" in response.url:
                table = response.xpath("//table[@id='RunnerRaceResults']")
                title_list = table.xpath("./thead/tr//text()").extract()
                title_list = [t.strip() for t in title_list]
                title_list = list(filter(lambda t: t != "", title_list))
                title_list.insert(0, "Index")
                race_result_list = []
                tr_list = table.xpath("./tr")
                for tr in tr_list:
                    val_list = []
                    td_list = tr.xpath("./td")
                    for td in td_list:
                        val_list.append("".join(td.xpath("string()").extract()).strip())
                    score_dict = dict(zip(title_list, val_list))
                    race_result_list.append(score_dict)
                # txt_list = [txt.strip() for txt in txt_list]
                group_data["race_results"] = race_result_list
            if btn_urls:
                btn_url = response.urljoin(btn_urls.pop(0))
                if "RaceResults" in btn_url:
                    cookie_dict = {}
                    cookie_str = self.cookie.decode()
                    cookies = cookie_str.split(";")
                    for c in cookies:
                        if "=" in c:
                            name, value = c.split("=", 1)
                        else:
                            name = c
                            value = ""
                        cookie_dict.update({name: value})
                    yield Request(url=btn_url, headers=self.race_header, cookies=cookie_dict,
                                  meta={"race_data": race_data, "group_list": group_list, "btn_urls": btn_urls, "group_data": group_data, "group_data_list": group_data_list},
                                  errback=self.err_parse, callback=self.parse_race_btn, dont_filter=True)
                else:
                    yield Request(url=btn_url, headers=self.race_header,
                                  meta={"race_data": race_data, "group_list": group_list, "btn_urls": btn_urls, "group_data": group_data, "group_data_list": group_data_list},
                                  errback=self.err_parse, callback=self.parse_race_btn, dont_filter=True)
            else:
                group_data_list.append(group_data)
                if group_list:
                    group_url = response.urljoin(group_list.pop(0))
                    yield Request(url=group_url, headers=self.race_header, meta={"race_data": race_data,
                                                                                 "group_list": group_list, "group_data_list": group_data_list},
                                  errback=self.err_parse, callback=self.parse_group_detail, dont_filter=True)
                else:
                    race_data["group_info"] = group_data_list
                    if group_data_list:
                        group = group_data_list.pop(0)
                        trace_url = group.get("race_course", {}).get("trace_url", "")
                        yield FormRequest(url=trace_url, headers=self.trace_header,
                                          meta={"race_data": race_data, "group_data": group, "group_data_list": group_data_list},
                                          formdata={}, errback=self.err_parse, callback=self.parse_trace, dont_filter=True)
        except Exception:
            self.logger.info(f"获取获取子标签信息出错{response.url}：{traceback.format_exc()}")

    def parse_trace(self, response, **kwargs):
        try:
            self.logger.info(f"开始获取轨迹信息：{response.url}")
            race_data = response.meta.get("race_data", {})
            group_data = response.meta.get("group_data", {})
            group_data_list = response.meta.get("group_data_list", [])
            new_group_data_list = response.meta.get("new_group_data_list", [])
            trace_data = json.loads(response.text)
            group_data["race_course"].update({"trace_data": trace_data})
            new_group_data_list.append(group_data)
            if group_data_list:
                group = group_data_list.pop(0)
                trace_url = group.get("race_course", {}).get("trace_url", "")
                yield FormRequest(url=trace_url, headers=self.trace_header,
                                  meta={"race_data": race_data, "group_data": group, "group_data_list": group_data_list,
                                        "new_group_data_list": new_group_data_list},
                                  formdata={}, errback=self.err_parse, callback=self.parse_trace, dont_filter=True)
            else:
                self.logger.info(f'准备发送数据：{race_data.get("race_name", "")}')
                race_data["group_info"] = new_group_data_list
                result = self.result_item_assembler(response)
                result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                         "crawlerType": self.name_second, "data": str(race_data)}
                result["_dup_str"] = calc_str_md5(str(race_data))
                yield result
        except Exception:
            self.logger.info(f"获取轨迹信息出错{response.url}：{traceback.format_exc()}")

    def err_parse(self, failure):
        self.logger.warning(f"请求失败：{failure.request.url},错误原因:{traceback.format_exc()}")
