import datetime
import json
import logging
import random
import traceback

import requests
from requests import JSONDecodeError
from urllib3.exceptions import ReadTimeoutError

from config import user_agents


class Spider(object):
    def __init__(self, task, cookies):
        self._url = "https://www.douyin.com/aweme/v1/web/general/search/single/"
        self._task = task
        self._keyword = task[1]
        self._product = task[2]
        self._product_id = task[9]
        self._keyword_id = task[8]
        self._params = json.loads(task[3])
        self._cookies = cookies
        self._task_id = task[0]
        pass

    def _gen_url(self, offset=0, count=10):
        return f'{self._url}?device_platform=webapp&aid=6383&channel=channel_pc_web&search_channel=aweme_general&sort_type=0&publish_time=0&query_correct_type=1&is_filter_search=0&from_group_id=&pc_client_type=1&version_code=170400&version_name=17.4.0&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Firefox&browser_version=107.0&browser_online=true&engine_name=Gecko&engine_version=107.0&os_version=10.15&cpu_core_num=10&device_memory=&platform=PC&webid=7166456458761389575&keyword={self._keyword}&search_source=search_sug&offset={offset}&count={count}&os_name=Mac OS'

    def process(self):
        data = []
        i = self._params["offset"]
        skipe = 0
        u = self._gen_url(self._params["offset"], self._params["count"])
        l = random.randint(1, len(user_agents))
        h = {
            "Host": "www.douyin.com",
            "User-Agent": user_agents[l - 1],
            'cookie': self._cookies,
            'referer': 'https://www.douyin.com/search/%E5%85%B1%E5%92%8C%E5%9B%BD%E4%B9%8B%E8%BE%89?aid=d95a0147-9d14-46d5-b1c3-3fd680c9329d&publish_time=0&sort_type=0&source=normal_search&type=general'
        }
        logging.info(f"{self._params['offset']},{self._params['count']}")
        logging.info(f"agent {user_agents[l-1]}")
        try:
            r = requests.get(u, timeout=10, headers=h, data={})

            now_time = datetime.datetime.now()
            now_time_str = now_time.strftime("%Y-%m-%d %H:%M:%S")
            if r.status_code != 200:
                logging.error(f"{self._keyword},http code is {r.status_code}")
                return []
            if r.text == "":
                logging.error(f"{self._keyword} response is empty")
                return []
            response_json = r.json()
        except ReadTimeoutError:
            logging.error(f"{self._keyword} http time out")
            return []
        except JSONDecodeError:
            logging.error(f"{self._keyword} json format err")
            return []
        except BaseException as e:
            logging.error(f"{e}")
            logging.error(traceback.format_exc())
            return []
        if response_json["status_code"] != 0:
            logging.error(f"{self._keyword} status code is not 0")
            return []

        if len(response_json["data"]) == 0:
            logging.warning(f"{self._keyword} data is empty")
            return []
        for index, item in enumerate(response_json['data']):
            fitem = {}
            if item["type"] != 1 and item["type"] != 16:
                skipe += 1
                continue
            # 16是合集
            if item["type"] == 16:
                mix_info = item["aweme_mix_info"]["mix_items"][0]
                if "desc" not in mix_info:
                    skipe += 1
                    continue
                if mix_info["text_extra"] is None:
                    mix_info["text_extra"] = []
                fitem = self.fix_heji_item(mix_info, i + index + 1 - skipe, now_time_str)
            else:
                aweme_info = item["aweme_info"]
                if "desc" not in aweme_info:
                    skipe += 1
                    continue
                if aweme_info["text_extra"] is None:
                    aweme_info["text_extra"] = []
                fitem = {
                    "product": self._product,
                    "keyword": self._keyword,
                    "rank": i + index + 1 - skipe,
                    "desc": aweme_info["desc"],
                    "video_create_time": aweme_info["create_time"],
                    "video_url": aweme_info["video"]["play_addr"]["url_list"][0],
                    "video_duration": aweme_info["video"]["duration"],
                    "author": aweme_info["author"]["nickname"],
                    "statistics_digg_count": aweme_info["statistics"]["digg_count"],
                    "statistics_play_count": aweme_info["statistics"]["play_count"],
                    "statistics_share_count": aweme_info["statistics"]["share_count"],
                    "statistics_collect_count": aweme_info["statistics"]["collect_count"],
                    "text_extra": ",".join(
                        [o["hashtag_name"] if "hashtag_name" in o else "" for o in aweme_info["text_extra"]]),
                    "create_time": now_time_str,
                    "task_id": self._task_id,
                    "video_type": '1',
                    "dy_product_id": self._product_id,
                    "dy_keyword_id": self._keyword_id
                }
            data.append(fitem)
        return data

    def fix_heji_item(self, mix_info, rank, now_time_str):
        fitem = {
            "product": self._product,
            "keyword": self._keyword,
            "rank": rank,
            "desc": mix_info["desc"],
            "video_create_time": mix_info["create_time"],
            "video_url": mix_info["video"]["play_addr"]["url_list"][0],
            "video_duration": mix_info["video"]["duration"],
            "author": mix_info["author"]["nickname"],
            "statistics_digg_count": mix_info["statistics"]["digg_count"],
            "statistics_play_count": mix_info["statistics"]["play_count"],
            "statistics_share_count": mix_info["statistics"]["share_count"],
            "statistics_collect_count": mix_info["statistics"]["collect_count"],
            "text_extra": ",".join(
                [o["hashtag_name"] if "hashtag_name" in o else "" for o in mix_info["text_extra"]]),
            "create_time": now_time_str,
            "task_id": self._task_id,
            "video_type": '16',
            "dy_product_id": self._product_id,
            "dy_keyword_id": self._keyword_id
        }
        return fitem
