# -*- coding:utf8 -*-
import json
import traceback
import os
import re
import typing
from scrapy import Request
from urllib.parse import urlparse
from squirrel_core.commons.utils.tools import calc_str_md5, upload_file
from squirrel_core.frame.spider_makaka import MakakaSpider

file_path = os.environ.get("FILE_PATH", "/")


class paoye_pic(MakakaSpider):
    name = "paoye_pic"
    serialNumber = ""
    all_flag = False
    need_ssdbstore_dup = True
    flag = True
    specific_settings = {
        'DOWNLOAD_MAXSIZE': 0,
        'DOWNLOAD_TIMEOUT': 60
    }

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        if data and isinstance(data, typing.MutableMapping):
            self.serialNumber = data.get("serialNumber", "")
            self.all_flag = data.get("spider_config", {}).get("all", False)
            race_id = data.get("spider_config", {}).get("race_id", "")
            user_id = data.get("spider_config", {}).get("user_id", "")
            race_no = data.get("spider_config", {}).get("race_no", "")
            source_url = data.get("spider_config", {}).get("url", "")
            if not all([race_id, user_id, race_no, source_url]):
                self.logger.info("参数不全，请仔细核对")
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 101, "message": "参数不全，请仔细核对"})
                self.close_after_idle = True
                self.force_to_close_spider = True
            else:
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 100, "message": "任务启动成功"})
                domain = urlparse(source_url).netloc
                race_code = "".join(re.findall(r"/live/(?:pc/)?(\d+)", source_url))
                pic_url = f"https://{domain}/home/pic/self/recognize?activityNo={race_code}&number={race_no}&faceUrl=&faceHash=&ppSign="
                video_url = f"https://{domain}/home/find/me/video/num?activityNo={race_code}&number={race_no}"
                for url in [pic_url, video_url]:
                    yield Request(url=url, callback=self.parse, dont_filter=True, errback=self.err_parse,
                                  meta={"race_id": race_id, "race_no": race_no, "user_id": user_id})

    def parse(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_no = response.meta.get("race_no", "")
        self.flag = False
        try:
            self.logger.info(f"开始解析：{response.url}")
            content = json.loads(response.text).get("result", {})
            if "recognize" in response.url:
                data_list = content.get("pics_array", [])
                if data_list:
                    for data in data_list:
                        pic_link = data.get("big_img", "")
                        pic_hash = data.get("pic_hash", "")
                        if pic_hash:
                            status = self.server.set_add(f'{self.name}:storedupefilter', calc_str_md5(pic_hash))
                        else:
                            status = False
                        if self.all_flag:
                            status = True
                        if pic_link:
                            if not status:
                                self.logger.info("重复照片")
                            else:
                                url = f'https:{pic_link}'
                                yield Request(url=url, callback=self.parse_pic, errback=self.err_parse, dont_filter=self.all_flag,
                                              meta={"race_id": race_id, "race_no": race_no, "user_id": user_id, "pic_hash": pic_hash})
                else:
                    result_dict = {"race_id": race_id, "msg": "无照片"}
                    result = self.send_data(resp=response, serial_number=self.serialNumber, result_data=result_dict,
                                            user_id=user_id, race_id=race_id, dup_str=str(result_dict))
                    yield result
            else:
                data_list = content.get("list", [])
                if data_list:
                    for data in data_list:
                        video_link = data.get("url", "")
                        video_id = data.get("id", "")
                        if video_link and "mp4" in video_link:
                            video_url = f'https:{video_link.split("mp4")[0]}mp4'
                            yield Request(url=video_url, callback=self.parse_video, errback=self.err_parse, dont_filter=self.all_flag,
                                          meta={"race_id": race_id, "race_no": race_no, "user_id": user_id, "video_id": video_id, "use_proxy": False})
                else:
                    result_dict = {"race_id": race_id, "msg": "无视频"}
                    result = self.send_data(resp=response, serial_number=self.serialNumber, result_data=result_dict,
                                            user_id=user_id, race_id=race_id, dup_str=str(result_dict))
                    yield result
        except Exception:
            self.logger.info(f"解析{response.url}时出错：{traceback.format_exc()}")

    def parse_pic(self, response):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_no = response.meta.get("race_no", "")
        pic_hash = response.meta.get("pic_hash", "")
        try:
            self.logger.info(f"开始下载照片：{response.url}")
            if pic_hash:
                pic_md5 = calc_str_md5(pic_hash)
            else:
                pic_md5 = calc_str_md5(response.url)
            pic_name = f"{race_no}_{pic_md5}.jpg"
            dir_path = os.path.join(file_path, "picture", self.name_first)
            if not os.path.exists(dir_path):
                os.makedirs(dir_path)
            save_path = os.path.join(dir_path, pic_name)
            if not os.path.exists(save_path):
                with open(save_path, "wb") as f:
                    f.write(response.body)
                upload_path = f"flow/{race_id}/{user_id}/pic/{pic_name}"
                upload_flag = upload_file(save_path, upload_path)
                if upload_flag:
                    self.logger.info(f"{save_path}上传成功：{upload_path}")
                else:
                    self.logger.info(f"{save_path}上传失败：{upload_path}")
                result_dict = {"pic_name": pic_name, "pic_type": "jpg", "url_address": upload_path, "race_id": race_id}
                result = self.send_data(resp=response, serial_number=self.serialNumber, result_data=result_dict,
                                        user_id=user_id, race_id=race_id, dup_str=pic_name)
                yield result
        except Exception:
            self.logger.info(f"下载照片时出错{response.url}：{traceback.format_exc()}")

    def parse_video(self, response):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_no = response.meta.get("race_no", "")
        video_id = response.meta.get("video_id", "")
        try:
            self.logger.info(f"开始下载视频：{response.url}")
            if video_id:
                pic_md5 = calc_str_md5(str(video_id))
            else:
                pic_md5 = calc_str_md5(response.url)
            pic_name = f"{race_no}_{pic_md5}.mp4"
            dir_path = os.path.join(file_path, "picture", self.name_first)
            if not os.path.exists(dir_path):
                os.makedirs(dir_path)
            save_path = os.path.join(dir_path, pic_name)
            if not os.path.exists(save_path):
                with open(save_path, "wb") as f:
                    f.write(response.body)
                upload_path = f"flow/{race_id}/{user_id}/pic/{pic_name}"
                upload_flag = upload_file(save_path, upload_path)
                if upload_flag:
                    self.logger.info(f"{save_path}上传成功：{upload_path}")
                else:
                    self.logger.info(f"{save_path}上传失败：{upload_path}")
                result_dict = {"pic_name": pic_name, "pic_type": "mp4", "url_address": upload_path, "race_id": race_id}
                result = self.send_data(resp=response, serial_number=self.serialNumber, result_data=result_dict,
                                        user_id=user_id, race_id=race_id, dup_str=pic_name)
                yield result
        except Exception:
            self.logger.info(f"下载视频时出错{response.url}：{traceback.format_exc()}")

    def send_data(self, resp=None, serial_number=None, result_data=None, user_id=None, race_id=None, dup_str=None):
        if result_data is None:
            result_data = {"msg": "未查到照片信息"}
        result_dict = {"serialNumber": serial_number, "webType": self.name_first,
                       "userId": user_id, "raceId": race_id, "code": 200,
                       "crawlerType": self.name_second, "data": str(result_data)}
        result = self.result_item_assembler(resp)
        result['result_data'] = result_dict
        if not self.all_flag and dup_str is not None:
            result['_dup_str'] = calc_str_md5(dup_str)
        return result

    def err_parse(self, failure):
        request = failure.request
        change = request.meta
        r_time = change.get("r_time", 1)
        self.logger.warning(f"请求失败：{failure.request.url},错误原因:{traceback.format_exc()},准备第{r_time}次重试")
        if r_time <= 3:
            r_time += 1
            try:
                change.update({"change_proxy": True, "r_time": r_time})
                yield request.replace(meta=change)
            except Exception:
                pass

