# -*- coding:utf8 -*-
import json
import traceback
import os
import re
import typing
from scrapy import Request
from squirrel_core.commons.utils.tools import calc_str_md5, upload_file
from squirrel_core.frame.spider_makaka import MakakaSpider

file_path = os.environ.get("FILE_PATH", "/")


class luojiweiye_pic(MakakaSpider):
    name = "luojiweiye_pic"
    serialNumber = ""
    all_flag = False

    header = {
        "Host": "sport.luojiweiye.com",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36",
        "Accept": "*/*",
        "Content-Type": "application/x-www-form-urlencoded;charset=UTF-8"
    }
    game_dict = {
        "胶马联赛-莱州站暨2025莱州（石都）半程马拉松": "4511"
    }

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        if data and isinstance(data, typing.MutableMapping):
            self.serialNumber = data.get("serialNumber", "")
            self.all_flag = data.get("spider_config", {}).get("all", False)
            race_id = data.get("spider_config", {}).get("race_id", "")
            user_id = data.get("spider_config", {}).get("user_id", "")
            race_no = data.get("spider_config", {}).get("race_no", "")
            user_name = data.get("spider_config", {}).get("user_name", "")
            race_name = data.get("spider_config", {}).get("race_name", "").replace(" ", "")
            home_url = data.get("spider_config", {}).get("url", "")
            if not user_name and not race_no:
                self.logger.info("user_name和race_no必须存在一个")
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 101, "message": "user_name和race_no必须存在一个"})
                self.close_after_idle = True
                self.force_to_close_spider = True
            else:
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 100, "message": "任务启动成功"})
                keyword = race_no or user_name
                game_id = self.game_dict.get(race_name, "")
                url = f"https://sport.luojiweiye.com/api/photo/216/applist?game_id={game_id}&keywords={keyword}&album_id=0&page_size=200&page=1&app_id=wx924e0b1059481b25&appid=wx924e0b1059481b25&api_token=2j8MtqUoEVUQvZrtSJSBVch4nPrBM9eUwwwyXwlCrm5XGlalpD&platform=&lang=&web_version=1.1.0"

                yield Request(url=url, callback=self.parse, dont_filter=True, errback=self.err_parse, headers=self.header,
                              meta={"race_id": race_id, "race_no": race_no, "user_id": user_id, "user_name": user_name,
                                    "race_name": race_name, "keyword": keyword})

    def parse(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_name = response.meta.get("race_name", "")
        keyword = response.meta.get("keyword", "")
        try:
            self.logger.info(f"开始解析{keyword}的照片")
            data_list = json.loads(response.text).get("data", {}).get("data", [])
            if data_list:
                for data in data_list:
                    pic_name = data.get("local_name", "")
                    status = True
                    if pic_name:
                        status = self.server.set_add(f'{self.name}:storedupefilter', calc_str_md5(pic_name))
                        if not status:
                            self.logger.info("重复照片")
                    if status:
                        pic_url = data.get("logo_name", "")
                        if pic_url:
                            yield Request(url=pic_url, callback=self.parse_pic, errback=self.err_parse, dont_filter=self.all_flag,
                                          meta={"race_id": race_id, "pic_name": pic_name, "user_id": user_id, "race_name": race_name, "keyword": keyword})
            else:
                result = self.send_data(resp=response, serial_number=self.serialNumber,
                                        user_id=user_id, race_id=race_id)
                yield result
        except Exception:
            self.logger.info(f"解析{keyword}的照片时出错{response.url}：{traceback.format_exc()}")

    def parse_pic(self, response):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_name = response.meta.get("race_name", "")
        keyword = response.meta.get("keyword", "")
        pic_name = response.meta.get("pic_name", "")
        try:
            self.logger.info(f"开始下载{keyword}的照片：{response.url}")
            pic_name = f"{pic_name}.jpg"
            dir_path = os.path.join(file_path, "picture", self.name_first)
            if not os.path.exists(dir_path):
                os.makedirs(dir_path)
            save_path = os.path.join(dir_path, pic_name)
            if not os.path.exists(save_path):
                with open(save_path, "wb") as f:
                    f.write(response.body)
                upload_path = f"flow/{race_id}/{user_id}/pic/{pic_name}"
                upload_flag = upload_file(save_path, upload_path)
                if upload_flag:
                    self.logger.info(f"{save_path}上传成功：{upload_path}")
                else:
                    self.logger.info(f"{save_path}上传失败：{upload_path}")
                result_dict = {"pic_name": pic_name, "pic_type": "jpg", "url_address": upload_path, "race_id": race_id, "race_name": race_name}

                result = self.send_data(resp=response, serial_number=self.serialNumber, result_data=result_dict,
                                        user_id=user_id, race_id=race_id, dup_str=str(result_dict))
                yield result
        except Exception:
            self.logger.info(f"下载{keyword}照片时出错{response.url}：{traceback.format_exc()}")

    def send_data(self, resp=None, serial_number=None, result_data=None, user_id=None, race_id=None, dup_str=None):
        if result_data is None:
            result_data = {"msg": "未查到照片信息"}
        result_dict = {"serialNumber": serial_number, "webType": self.name_first,
                       "userId": user_id, "raceId": race_id, "code": 200,
                       "crawlerType": self.name_second, "data": str(result_data)}
        result = self.result_item_assembler(resp)
        result['result_data'] = result_dict
        if not self.all_flag and dup_str is not None:
            result['_dup_str'] = calc_str_md5(dup_str)
        return result

    def err_parse(self, failure):
        self.logger.warning(f"请求失败：{failure.request.url},错误原因:{traceback.format_exc()}")

