# -*- coding: utf-8 -*-
"""
Created on 2025-06-21 12:06:11
---------
@summary:
---------
@author: M
"""
import time

import feapder
import requests
from items.fanqie_ip_book_info_item import FanqieIpBookInfoItem
from items.fanqie_short_drama_detail_item import FanqieShortDramaDetailItem, FanqieShortDramaFlowAnalysisItem
from items.fanqie_short_drama_info_item import FanqieShortDramaInfoItem
from utils.upload_file_tos import upload
from utils.utils import book_detail_date


def parse_thumb_url(thumb_url):
    resposne = requests.get(thumb_url)
    return upload(resposne.content, 'jpg')


class FanqieNeirongCenter(feapder.Spider):
    ip_page_size = 60
    data_center_page_size = 20

    def start_requests(self):
        cookies = {
            "biz_trace_id": "4c9d15e3",
            "passport_csrf_token": "b6994315b9da8e43e703eac5b0e7cbc9",
            "passport_csrf_token_default": "b6994315b9da8e43e703eac5b0e7cbc9",
            "n_mh": "XKutLHrFWXxvtsRAOgO42oBvB_2oj9w7eU134_3lnFI",
            "sid_guard": "fa2d65ddb0e4dd8344fbec8c5a54e21e%7C1760583847%7C5184000%7CMon%2C+15-Dec-2025+03%3A04%3A07+GMT",
            "uid_tt": "2aecd7bdbbba72d39203a860c22e8bc6",
            "uid_tt_ss": "2aecd7bdbbba72d39203a860c22e8bc6",
            "sid_tt": "fa2d65ddb0e4dd8344fbec8c5a54e21e",
            "sessionid": "fa2d65ddb0e4dd8344fbec8c5a54e21e",
            "sessionid_ss": "fa2d65ddb0e4dd8344fbec8c5a54e21e",
            "session_tlb_tag": "sttt%7C15%7C-i1l3bDk3YNE--yMWlTiHv_________AC6qTjUjpUh7LZq58H_GpGWqJAkfBRPBzAF5OtSHzWDg%3D",
            "is_staff_user": "false",
            "sid_ucp_v1": "1.0.0-KDE2NTVmM2ZmN2RlN2Q5OGExMDdkNjI4ZDYyZmMyOGEwZWI3ZTBiNjgKFwjHoZD-qoyTAhCnwcHHBhibFjgCQO8HGgJsZiIgZmEyZDY1ZGRiMGU0ZGQ4MzQ0ZmJlYzhjNWE1NGUyMWU",
            "ssid_ucp_v1": "1.0.0-KDE2NTVmM2ZmN2RlN2Q5OGExMDdkNjI4ZDYyZmMyOGEwZWI3ZTBiNjgKFwjHoZD-qoyTAhCnwcHHBhibFjgCQO8HGgJsZiIgZmEyZDY1ZGRiMGU0ZGQ4MzQ0ZmJlYzhjNWE1NGUyMWU"
        }
        IP_url = 'https://novelcp.toutiao.com/api/origin/cp/playlet/ip/list/v:version/'
        IP_params = {
            "page_no": 1,
            "page_count": self.ip_page_size,
            "image_fmt": "180x240",
        }
        yield feapder.Request(IP_url, params=IP_params, cookies=cookies, callback=self.parse_ip)

        # data_center_url = 'https://novelcp.toutiao.com/api/novel/provider/book-stats/list/v:version'
        # data_center_params = {
        #     "genre": 12,
        #     "need_right_groups": '1',
        #     "page_index": 0,
        #     "page_size": self.data_center_page_size,
        #     "sort_type": 2,
        # }
        # yield feapder.Request(data_center_url, params=data_center_params, cookies=cookies,
        #                       callback=self.parse_book_list)

    def parse_ip(self, request, response):
        data = response.json
        IP_url = request.url
        cookies = request.cookies
        data_info = data['data']
        for item in data_info['ip_list']:
            ip_detail_params = {
                "ip_id": item['ip_id']
            }
            ip_detail_url = 'https://novelcp.toutiao.com/api/origin/cp/playlet/ip/detail/v:version/'
            yield feapder.Request(ip_detail_url, params=ip_detail_params, cookies=cookies,
                                  callback=self.parse_ip_detail)

        total = data_info['total']
        total_page = (total + self.ip_page_size - 1) // self.ip_page_size
        if request.params['page_no'] < total_page:
            ip_copy_params = request.params.copy()
            ip_copy_params['page_no'] = request.params['page_no'] + 1
            yield feapder.Request(IP_url, params=ip_copy_params, cookies=cookies, callback=self.parse_ip)

    def parse_ip_detail(self, request, response):
        data = response.json
        data_info = data['data']
        item = data_info['ip_data']

        to_dict = {
            "author_name": item['author_name'],
            "book_desc": item['book_desc'],
            "book_score": item['book_score'],
            "category_list": item['category_list'],
            "creation_status": item['creation_status'],
            "first_online_month": item['first_online_month'],
            "gender": item['gender'],
            "ip_id": item['ip_id'],
            "ip_name": item['ip_name'],
            "ip_selected_count": item['ip_selected_count'],
            "ip_shelved_count": item['ip_shelved_count'],
            "ip_type": item['ip_type'],
            "is_history_peak_rank": item['is_history_peak_rank'],
            "middle_read_duration_rate": round(item['middle_read_duration_rate'], 2),
            "old_read_duration_rate": round(item['old_read_duration_rate'], 2),
            "read_listen_dcnt_14d": item['read_listen_dcnt_14d'],
            "thumb_url": item['thumb_url'],
            "word_num": item['word_num'],
            "young_read_duration_rate": round(item['young_read_duration_rate'], 2)
        }
        yield FanqieIpBookInfoItem(**to_dict)

    def parse_book_list(self, request, response):
        data_center_url = request.url
        cookies = request.cookies
        data = response.json
        if data.get('code') == 0:
            data_info = data['data']
            for content in data_info['contents']:
                real_content = content['content']
                list_content = content['list_content']
                # 从API响应中提取数据
                to_short_drama_info_dic = {
                    "book_id": real_content.get('BookId'),
                    "cp_book_id": real_content.get('cp_book_id', ''),
                    "creation_status": real_content.get('creation_status'),
                    "thumb_uri": real_content.get('thumb_uri'),
                    "thumb_url": parse_thumb_url(real_content.get('thumb_url')),
                    "anchor_name": real_content.get('主播名'),
                    "avg_watch_duration": real_content.get('人均播放时长') or None,
                    "avg_watch_episodes": real_content.get('人均播放集数') or None,
                    "drama_name": real_content.get('作品名称'),
                    "author_name": real_content.get('作者名'),
                    "data_create_time": real_content.get('创建时间'),
                    "contract_id": real_content.get('合同ID'),
                    "post_rating": real_content.get('后验评级'),
                    "sub_account_name": real_content.get('子账号名称'),
                    "today_play_count": real_content.get('当日播放量') or None,
                    # "today_play_change_rate": real_content.get('当日播放量比前日'),
                    "full_play_rate": real_content.get('整剧完播率（%）') or None,
                    # "traffic_analysis": real_content.get('流量分析'),
                    "click_rate": real_content.get('点击率（%）') or None,
                    "drama_category": real_content.get('短剧分类'),
                    "total_duration": real_content.get('短剧总时长') or None,
                    "total_episodes": real_content.get('短剧集数') or None,
                    "chapter_count": real_content.get('章节数量') or None,
                    # "cumulative_ctr": real_content.get('累计CTR'),
                    # "cumulative_ctr_change": real_content.get('累计CTR比前日'),
                    # "cumulative_fav": real_content.get('累计收藏量'),
                    # "cumulative_fav_change": real_content.get('累计收藏量比前日'),
                    "cumulative_click_rate": real_content.get('累计点击率') or None,
                    "first_visible_time": real_content.get('首次可见时间') or None,
                    "first_episode_finish_rate": real_content.get('首集完播进度') or None,
                    "finish_rate_list": list_content.get('完播率') or None
                }
                yield FanqieShortDramaInfoItem(**to_short_drama_info_dic)
                if content.get('has_detail'):
                    # time_list = book_detail_date(real_content.get('首次可见时间'))
                    time_list = book_detail_date(2)
                    for t in time_list:
                        if real_content.get('首次可见时间') and t >= real_content.get('首次可见时间'):
                            # 将数据存储在 meta 中，用于后续合并
                            detail_params = {
                                "genre": 12,
                                "need_right_groups": "1,2",
                                "page_size": 1,
                                "page_index": 0,
                                "book_id": real_content.get('BookId'),
                                "date": t.replace('-', '')
                            }
                            book_detail_url = 'https://novelcp.toutiao.com/api/novel/provider/book-stats/list/v:version'
                            meta_1 = {'data_time': t}
                            yield feapder.Request(book_detail_url, params=detail_params, cookies=cookies, meta=meta_1,
                                                  callback=self.parse_book_detail)
                            # 流量分析请求
                            flow_params = {
                                "book_id": real_content.get('BookId'),
                                "genre": 12,
                                "start_date": t,
                                "end_date": t

                            }
                            meta_2 = {'data_time': t}
                            flow_url = 'https://novelcp.toutiao.com/api/novel/provider/flow-analysis/get/v:version'
                            yield feapder.Request(flow_url, params=flow_params, cookies=cookies, meta=meta_2,
                                                  callback=self.parse_flow)

            total = data_info['total']
            total_page = (total + self.data_center_page_size - 1) // self.data_center_page_size

            if request.params['page_index'] < total_page - 1:
                data_center_copy_params = request.params.copy()
                data_center_copy_params['page_index'] = request.params['page_index'] + 1
                yield feapder.Request(data_center_url, params=data_center_copy_params, cookies=cookies,
                                      callback=self.parse_book_list)

    def parse_flow(self, request, response):
        data = response.json
        data_info = data['data']
        data_index_list = data_info.get('data_index_list')
        date_item_list = data_info.get('date_item_list')
        date_item_rate = data_info.get('date_item_rate')

        to_db_dic = {
            'book_id': request.params['book_id'],
            'data_index': data_index_list,
            'date_item': date_item_list,
            'date_item_rate': date_item_rate,
            "data_time": request.meta.get('data_time'),
        }
        yield FanqieShortDramaFlowAnalysisItem(**to_db_dic)

    def parse_book_detail(self, request, response):
        data = response.json
        data_info = data['data']
        for content in data_info['contents']:
            detail_real_content = content['content']
            detail_list_content = content['list_content']

            # 直接创建详情Item，只包含详情数据
            to_list_content_dic = {
                "book_id": detail_real_content.get('BookId'),
                "today_play_change": detail_real_content.get('当日播放量') or None,
                "today_play_change_rate": detail_real_content.get('当日播放量比前日') or None,
                "traffic_analysis": detail_real_content.get('流量分析') or None,
                "cumulative_ctr": detail_real_content.get('累计CTR') or None,
                "cumulative_ctr_change": detail_real_content.get('累计CTR比前日') or None,
                "cumulative_fav": detail_real_content.get('累计收藏量') or None,
                "cumulative_fav_change": detail_real_content.get('累计收藏量比前日') or None,
                "finish_rate_list": detail_list_content.get('完播率') or None,
                "age_distribution": detail_list_content.get('年龄分布') or None,
                "gender_distribution": detail_list_content.get('性别分布') or None,
                "continue_watch_rate_list": detail_list_content.get('本集续看率') or None,
                "episode_watch_progress_list": detail_list_content.get('本集观看进度') or None,
                "data_time": request.meta.get('data_time'),
            }
            yield FanqieShortDramaDetailItem(**to_list_content_dic)


if __name__ == "__main__":
    FanqieNeirongCenter(redis_key="feapder:fanqie_nr_center:data_ip", delete_keys=True).start()
