import sys
import os
# 获取当前文件所在目录的父目录（即项目根目录）
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if project_root not in sys.path:
    sys.path.append(project_root)

import ast
import json
import queue
import threading
import time
import uuid
import concurrent.futures
import requests
from requests.exceptions import ProxyError, Timeout, SSLError
from datetime import datetime, timedelta
from concurrent.futures import ThreadPoolExecutor, as_completed
import common.feishu_api as feishu_api
from common.oss import OSSManager, get_token
from common.start import STOP_EVENT, bilibili_logger, config, add_proxy_to_blacklist, get_random_proxy, THIS_UPDATE_TIME, \
    THIS_REQUEST_TIME
from common.kafka_client import send_bilibili_user_detail_message_to_kafka, send_bilibili_video_details_message_to_kafka
from common.trino_client import get_creators_by_dt_bilibili, get_previous_day_update_creators_by_media, get_tags_by_id_and_media, get_this_day_update_creators_by_media, insert_bilibili_user_detail,  \
    insert_bilibili_video_details, insert_success_log_by_media, update_tags_by_id_and_media
import urllib
from functools import reduce
from http.client import responses
from urllib.parse import urlencode
from openpyxl import Workbook
import concurrent.futures
import hashlib
from openpyxl.reader.excel import load_workbook
from requests.exceptions import ProxyError, Timeout, SSLError,ConnectionError



order_by_views = 'click'
order_by_danmaku = 'dm'




SESSDATA = "e47e5c5a%2C1758012514%2C226c4%2A32CjCgKpONGrwaR3G7XDPoRYhMKB2Jk6uMD-0y1rh50AP8D8y0vJmPGOyi9Gg_0Lt7nxoSVmhJN1RIUWlDS3lIZjVPQnVFNDFoZi1CRlRpU3ZEV01JSk1pZTVxY25DUzQzS3FjUzBlTDNKRm5IY2oxa09RM3l6cVUzY3lhdnhnc0JDSGdNWnQ3SVdnIIEC"
bili_jct = "6d7e78fd47604f1e9a00516db22acdc9"

mixinKeyEncTab = [
    46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49,
    33, 9, 42, 19, 29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40,
    61, 26, 17, 0, 1, 60, 51, 30, 4, 22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11,
    36, 20, 34, 44, 52
]

def getMixinKey(orig: str):
    '对 imgKey 和 subKey 进行字符顺序打乱编码'
    return reduce(lambda s, i: s + orig[i], mixinKeyEncTab, '')[:32]

def encWbi(params: dict, img_key: str, sub_key: str):
    '为请求参数进行 wbi 签名'
    mixin_key = getMixinKey(img_key + sub_key)
    curr_time = round(time.time())
    params['wts'] = curr_time                                   # 添加 wts 字段
    params = dict(sorted(params.items()))                       # 按照 key 重排参数
    # 过滤 value 中的 "!'()*" 字符
    params = {
        k : ''.join(filter(lambda chr: chr not in "!'()*", str(v)))
        for k, v
        in params.items()
    }
    query = urllib.parse.urlencode(params)                      # 序列化参数
    wbi_sign = hashlib.md5((query + mixin_key).encode()).hexdigest()    # 计算 w_rid
    params['w_rid'] = wbi_sign
    return params

def get_wbi_keys(session):
    """获取WBI签名所需的img_key和sub_key"""
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3',
        'Referer': 'https://www.bilibili.com/'
    }
    url = "https://api.bilibili.com/x/web-interface/nav"
    response = session.get(url, headers=headers)
    response.raise_for_status()
    json_content = response.json()
    img_url: str = json_content['data']['wbi_img']['img_url']
    sub_url: str = json_content['data']['wbi_img']['sub_url']
    img_key = img_url.rsplit('/', 1)[1].split('.')[0]
    sub_key = sub_url.rsplit('/', 1)[1].split('.')[0]
    return img_key, sub_key

def getWbiKeys() -> tuple[str, str]:
    '获取最新的 img_key 和 sub_key'
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3',
        'Referer': 'https://www.bilibili.com/'
    }
    resp = requests.get('https://api.bilibili.com/x/web-interface/nav', headers=headers)
    resp.raise_for_status()
    json_content = resp.json()
    img_url: str = json_content['data']['wbi_img']['img_url']
    sub_url: str = json_content['data']['wbi_img']['sub_url']
    img_key = img_url.rsplit('/', 1)[1].split('.')[0]
    sub_key = sub_url.rsplit('/', 1)[1].split('.')[0]
    return img_key, sub_key

def generate_wbi_sign(params, img_key, sub_key):
    """生成WBI签名（w_rid和wts）"""
    mixin_key = img_key + sub_key
    params = dict(sorted(params.items()))  # 参数按字典序排序
    params_str = urlencode(params)         # 转换为URL查询字符串
    wts = str(int(time.time()))            # 当前时间戳
    params_str += f"&wts={wts}"
    w_rid = hashlib.md5((params_str + mixin_key).encode()).hexdigest()
    return w_rid, wts

def get_bvid_by_mid_wbi(mid):
    session = requests.Session()
    session.cookies.update({
        "SESSDATA": SESSDATA,
        "bili_jct": bili_jct,
    })
    img_key,sub_key = getWbiKeys()
    bilibili_logger.info(f"img_key: {img_key}, sub_key: {sub_key}")
    params = {
        'mid' : mid,
        'ps' : 1,
        'pn' : 2,
    }
    signed_params = encWbi(params, img_key, sub_key)
    query = urllib.parse.urlencode(signed_params)
    bilibili_logger.info(f"Signed params: {signed_params}")
    bilibili_logger.info(f"Query: {query}")

    proxy = get_random_proxy()
    bilibili_logger.info(f"Using proxy: {proxy}")
    proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
    proxies = {
        'http': proxy_url,
        'https': proxy_url
    }
    headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
        "X-UA": "V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC",
        "Referer" : f"https://space.bilibili.com/{mid}/video",
        # 'Origin' : 'https://space.bilibili.com'
    }


    response = session.get("https://api.bilibili.com/x/space/wbi/arc/search", headers=headers, params=params,proxies=proxies)
    bilibili_logger.info(f"Response status code: {response.status_code}")
    return response.json()

def save_videos(mid,video_details):
    json_str = json.dumps(video_details, indent=4, ensure_ascii=False)
    dir_name = "./data/videos" + "/" + str(datetime.now().date())
    if not os.path.exists(dir_name):
        os.makedirs(dir_name)
    file_name = dir_name + "/" + str(mid) + ".json"
    with open(file_name, "w", encoding="utf-8") as f:
        f.write(json_str)

def save_video_response(bvid,response):
    json_str = json.dumps(response, indent=4, ensure_ascii=False)
    dir_name = "./data/responses" + "/" + "videos" + "/" + str(datetime.now().date())
    if not os.path.exists(dir_name):
        os.makedirs(dir_name)
    file_name = dir_name + "/" + str(bvid) + ".json"
    with open(file_name, "w", encoding="utf-8") as f:
        f.write(json_str)

def save_video_response_to_oss(bvid,response,request_id):
    with oss_lock:
        oss_manager.upload_video_response(THIS_REQUEST_TIME,request_id,bvid,response)
    bilibili_logger.info("文件保存到:"+f"bilibili/posts/{THIS_REQUEST_TIME}/{request_id}/{bvid}.json")


def save_up_response(mid,response):
    json_str = json.dumps(response, indent=4, ensure_ascii=False)
    dir_name = "./data/responses" + "/" + "ups" + "/" + str(datetime.now().date())
    if not os.path.exists(dir_name):
        os.makedirs(dir_name)
    file_name = dir_name + "/" + str(mid) + ".json"
    with open(file_name, "w", encoding="utf-8") as f:
        f.write(json_str)

def save_up_response_to_oss(mid,response,request_id):
    with oss_lock:
        oss_manager.upload_user_response(THIS_REQUEST_TIME,request_id,mid,response)
    bilibili_logger.info("文件保存到:"+f"bilibili/users/{THIS_REQUEST_TIME}/{request_id}/{mid}.json")


def get_video_tag_names_by_bvid(bvid):
    tag_names = []
    retries = 0
    max_retries = 3
    while retries < max_retries:
        proxy = get_random_proxy()

        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            headers = {
                'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
                'X-UA': 'V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC',
                'Referer': "https://www.bilibili.com",
            }
            params = {
                "bvid": bvid,
            }
            response = requests.get("https://api.bilibili.com/x/tag/archive/tags", headers=headers,
                                        params=params,proxies=proxies,timeout=4)

            if response.status_code == 200:
                data = response.json()
                if data.get("data"):
                    tags = response.json().get("data")

                    res = {
                        "code": response.status_code,
                        "router": "https://api.bilibili.com/x/tag/archive/tags",
                        "params": params,
                        "data": response.json(),
                        "created_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                    }
                    for tag in tags:
                        tag_names.append(tag.get("tag_name"))
                    tag_names = tag_names[:-1]
                    return tag_names, res
                bilibili_logger.warning("API response structure unexpected")
                return [],{}

            bilibili_logger.warning(f"Non-200 status code: {response.status_code}")
            return [],{}

        except (ProxyError, ConnectionError, Timeout, SSLError) as e:
            bilibili_logger.warning(f"Proxy connection failed ({type(e).__name__}): {str(e)}")
            retries += 1
            add_proxy_to_blacklist(proxy["ip"])
            time.sleep(1)  # 失败后等待1秒再重试

        except Exception as e:
            bilibili_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            break

    bilibili_logger.error(f"Failed after {max_retries} attempts")
    return [],{}

def get_top100_videos_by_keyword_order(keyword,order):
    videos = []
    for page in range(1,3):
        videos.extend(get_videos_by_keyword_page_order(keyword,page,order))
    print(len(videos))
    return videos

def get_videos_by_keyword_page_order(keyword,page,order):
    retries = 0
    max_retries = 3
    while True:
        proxy = get_random_proxy()
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}

            headers = {
                'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
                'X-UA': 'V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC',
                'Referer': "https://www.bilibili.com",
                "Cookie": "buvid3=CF1D2375-5A25-04F7-A134-09A5ED231D0669802infoc; b_nut=1742454569; _uuid=8ED5E56E-DB910-10523-9DFF-3D99102A9645D69718infoc; enable_web_push=DISABLE; enable_feed_channel=ENABLE; buvid4=9E1A028F-4954-44E6-19E4-55798AD4506970258-025032007-f8lIvECHhcHCtGbJq6TCqA%3D%3D; SESSDATA=e47e5c5a%2C1758012514%2C226c4%2A32CjCgKpONGrwaR3G7XDPoRYhMKB2Jk6uMD-0y1rh50AP8D8y0vJmPGOyi9Gg_0Lt7nxoSVmhJN1RIUWlDS3lIZjVPQnVFNDFoZi1CRlRpU3ZEV01JSk1pZTVxY25DUzQzS3FjUzBlTDNKRm5IY2oxa09RM3l6cVUzY3lhdnhnc0JDSGdNWnQ3SVdnIIEC; bili_jct=6d7e78fd47604f1e9a00516db22acdc9; DedeUserID=31555843; DedeUserID__ckMd5=30381648deb37a25; buvid_fp_plain=undefined; header_theme_version=CLOSE; rpdid=|(umYuuJk)l)0J'u~RkRYRmk); home_feed_column=4; browser_resolution=1264-754; fingerprint=7f196f359a713617dfbf9312790eda46; buvid_fp=7f196f359a713617dfbf9312790eda46; bili_ticket=eyJhbGciOiJIUzI1NiIsImtpZCI6InMwMyIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3NDk3MTg0OTEsImlhdCI6MTc0OTQ1OTIzMSwicGx0IjotMX0.lBdVGzFak3RwaBTDIZXH4B_iHFL-WnjJMP90YJ2xrec; bili_ticket_expires=1749718431; CURRENT_FNVAL=2000; sid=fv549ylg"
            }

            params = {'search_type': "video", 'keyword': keyword, 'page': page, 'page_size': 50, 'order': order}
            img_key, sub_key = getWbiKeys()
            signed_params = encWbi(params, img_key, sub_key)

            with requests.Session() as session:
                # session.get("https://bilibili.com", headers=headers, timeout=5,proxies=proxies)
                response = session.get(
                    "https://api.bilibili.com/x/web-interface/wbi/search/type",
                    headers=headers,
                    params=signed_params,
                    proxies=proxies,
                )

            bilibili_logger.info(f"Response status code: {response.status_code}")

            # 处理响应数据异常
            if response.status_code == 200:
                data = response.json()
                if data.get("data") and data["data"].get("result"):
                    return data["data"]["result"]
                bilibili_logger.warning("API response structure unexpected")
                return []

            bilibili_logger.warning(f"Non-200 status code: {response.status_code}")
            retries += 1


        except (ProxyError, ConnectionError, Timeout, SSLError) as e:
            bilibili_logger.warning(f"Proxy connection failed ({type(e).__name__}): {str(e)}")
            add_proxy_to_blacklist(proxy["ip"])
            retries += 1

        except Exception as e:
            bilibili_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            break

    bilibili_logger.error(f"Failed after {max_retries} attempts")
    return []


def get_top100_videos_owner_mid_by_keyword_order(keyword,order):
    videos = get_top100_videos_by_keyword_order(keyword,order)
    authors_dict = {}
    i = 1
    for video in videos:
        if video == {} or video == None:
            continue
        if video.get("mid") == None:
            continue
        if video.get("author") == None:
            continue
        mid = video.get("mid")
        authors_dict[mid] = {
            "index": i,
            "mid" : video.get("mid"),
            "name" : video.get("author"),
        }
        i += 1
    return list(authors_dict.values())

def get_up_card_data_by_mid(mid,request_id):
    """
    获取指定mid的B站用户名片详情
    """
    retires = 0
    while retires < 3:
        proxy = get_random_proxy()
        bilibili_logger.info(f"Using proxy: {proxy}")
        try:
            params = {
                'mid': mid,
            }
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {
                'http': proxy_url,
                'https': proxy_url
            }
            headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
                "X-UA": "V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC",
            }
            response = requests.get("https://api.bilibili.com/x/web-interface/card", headers=headers, params=params,
                                    proxies=proxies)
            bilibili_logger.info(f"Response status code: {response.status_code}")
            res = {
                "code" : response.status_code,
                "router" : "https://api.bilibili.com/x/web-interface/card",
                "params" : params,
                "data" : response.json(),
                "created_at" : datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
            }
            # save_up_response_to_oss(mid,res,request_id)
            return response.json().get("data")

        except (ProxyError, ConnectionError, Timeout, SSLError) as e:
            bilibili_logger.warning(f"Proxy connection failed ({type(e).__name__}): {str(e)}")
            add_proxy_to_blacklist(proxy["ip"])
            retires += 1
            time.sleep(1)  # 失败后等待1秒再重试

        except Exception as e:
            bilibili_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            retries += 1
    return {}

def get_videos_count_by_mid(mid):
    """
    获取指定mid的视频数量
    """
    retries = 0
    max_retries = 3
    while retries < max_retries:
        proxy = get_random_proxy()
        bilibili_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {
                'http': proxy_url,
                'https': proxy_url
            }
            headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
                "X-UA": "V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC",
                "Referer": f"https://space.bilibili.com/{mid}/video",
            }
            params = {
                'mid': mid,
                'ps': 1,
                'pn': 1,
                "keywords": "",
            }
            response = requests.get("https://api.bilibili.com/x/series/recArchivesByKeywords", headers=headers,
                                    params=params, proxies=proxies)
            bilibili_logger.info(f"Response status code: {response.status_code}")

            return response.json().get("data").get("page").get("total")

        except (ProxyError, ConnectionError, Timeout, SSLError) as e:
            bilibili_logger.warning(f"Proxy connection failed ({type(e).__name__}): {str(e)}")
            add_proxy_to_blacklist(proxy["ip"])
            retries += 1
            time.sleep(1)  # 失败后等待1秒再重试

        except Exception as e:
            bilibili_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            break

def get_seasons_series_by_mid(mid):
    """
    获取指定mid的系列或合集
    """
    retries = 0
    pn = 1
    max_retries = 3
    series = []
    p = 1
    while p  < 25:
        while retries < max_retries:
            proxy = get_random_proxy()
            bilibili_logger.info(f"Using proxy: {proxy}")
            try:
                proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
                proxies = {
                    'http': proxy_url,
                    'https': proxy_url
                }
                headers = {
                    'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
                    'X-UA': 'V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC',
                    'Referer': "https://www.bilibili.com",
                }
                url = "https://api.bilibili.com/x/polymer/web-space/seasons_series_list"
                params = {
                    "mid": mid,
                    "page_size": 20,
                    "page_num": pn,
                    "web_location": "333.1387"
                }
                headers["Cookie"] = "buvid3=CF1D2375-5A25-04F7-A134-09A5ED231D0669802infoc; b_nut=1742454569; _uuid=8ED5E56E-DB910-10523-9DFF-3D99102A9645D69718infoc; enable_web_push=DISABLE; enable_feed_channel=ENABLE; buvid4=9E1A028F-4954-44E6-19E4-55798AD4506970258-025032007-f8lIvECHhcHCtGbJq6TCqA%3D%3D; SESSDATA=e47e5c5a%2C1758012514%2C226c4%2A32CjCgKpONGrwaR3G7XDPoRYhMKB2Jk6uMD-0y1rh50AP8D8y0vJmPGOyi9Gg_0Lt7nxoSVmhJN1RIUWlDS3lIZjVPQnVFNDFoZi1CRlRpU3ZEV01JSk1pZTVxY25DUzQzS3FjUzBlTDNKRm5IY2oxa09RM3l6cVUzY3lhdnhnc0JDSGdNWnQ3SVdnIIEC; bili_jct=6d7e78fd47604f1e9a00516db22acdc9; DedeUserID=31555843; DedeUserID__ckMd5=30381648deb37a25; buvid_fp_plain=undefined; header_theme_version=CLOSE; rpdid=|(umYuuJk)l)0J'u~RkRYRmk); home_feed_column=4; browser_resolution=1264-754; fingerprint=7f196f359a713617dfbf9312790eda46; buvid_fp=7f196f359a713617dfbf9312790eda46; bili_ticket=eyJhbGciOiJIUzI1NiIsImtpZCI6InMwMyIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3NDk3MTg0OTEsImlhdCI6MTc0OTQ1OTIzMSwicGx0IjotMX0.lBdVGzFak3RwaBTDIZXH4B_iHFL-WnjJMP90YJ2xrec; bili_ticket_expires=1749718431; CURRENT_FNVAL=2000; sid=fv549ylg"
                response = requests.get(url, headers=headers, proxies=proxies, params=params)
                bilibili_logger.info(f"Response status code: {response.status_code}")

                if len(response.json()["data"]["items_lists"]["seasons_list"]) == 0 and len(
                        response.json()["data"]["items_lists"]["series_list"]) == 0:
                    return str(series)

                for season in response.json()["data"]["items_lists"]["seasons_list"]:
                    season_name = season["meta"]["name"].replace("合集·", "", 1).strip()
                    series.append(season_name)
                for item in response.json()["data"]["items_lists"]["series_list"]:
                    item_name = item["meta"]["name"]
                    series.append(item_name)
                break

            except (ProxyError, ConnectionError, Timeout, SSLError) as e:
                bilibili_logger.warning(f"Proxy connection failed ({type(e).__name__}): {str(e)}")
                add_proxy_to_blacklist(proxy["ip"])
                retries += 1
                time.sleep(1)  # 失败后等待1秒再重试

            except Exception as e:
                bilibili_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
                print(response.json())
                retries += 1
                time.sleep(1)  # 失败后等待1秒再重试
        pn += 1
        p += 1

    return str(series)

def get_video_bvid_by_number_and_mid(mid,number):
    """
    获取指定mid的第number个视频的bvid
    """
    retries = 0
    max_retries = 3
    while retries < max_retries:
        proxy = get_random_proxy()
        bilibili_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {
                'http': proxy_url,
                'https': proxy_url
            }
            headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
                "X-UA": "V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC",
                "Referer": f"https://space.bilibili.com/{mid}/video",
            }
            params = {
                'mid': mid,
                'ps': 1,
                'pn': number,
                "keywords": "",
            }
            response = requests.get("https://api.bilibili.com/x/series/recArchivesByKeywords", headers=headers,
                                    params=params, proxies=proxies,timeout=4)
            bilibili_logger.info(f"Response status code: {response.status_code}")

            archives = response.json().get("data").get("archives", [])
            print(response.json())
            response.close()

            if len(archives) > 0:
                return archives[0].get("bvid")
            return ""

        except (ProxyError, ConnectionError, Timeout, SSLError) as e:
            bilibili_logger.warning(f"Proxy connection failed ({type(e).__name__}): {str(e)}")
            add_proxy_to_blacklist(proxy["ip"])
            retries += 1
            time.sleep(1)  # 失败后等待1秒再重试

        except Exception as e:
            bilibili_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            break


def get_video_data_by_bvid(bvid):
    retries = 0
    max_retries = 3
    while retries < max_retries:
        proxy = get_random_proxy()

        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            headers = {
                'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
                'X-UA': 'V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC',
                'Referer': "https://www.bilibili.com",
                # 'Origin' : 'https://space.bilibili.com'
            }

            params = {
                "bvid": bvid,
            }

            response = requests.get("https://api.bilibili.com/x/web-interface/view", headers=headers, params=params,proxies=proxies, timeout=4)
            res = {}

            # 处理响应数据异常
            if response.status_code == 200:
                data = response.json()
                if data.get("data"):
                    res["code"] = response.status_code
                    res["router"] = "https://api.bilibili.com/x/web-interface/view"
                    res["params"] = params
                    res["data"] = response.json()
                    res["created_at"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
                    return data["data"],res
                bilibili_logger.warning("API response structure unexpected")
                return {},{}

            bilibili_logger.warning(f"Non-200 status code: {response.status_code}")
            return {},{}

        except (ProxyError, ConnectionError, Timeout, SSLError) as e:
            bilibili_logger.warning(f"Proxy connection failed ({type(e).__name__}): {str(e)}")
            add_proxy_to_blacklist(proxy["ip"])
            retries += 1
            time.sleep(1)  # 失败后等待1秒再重试

        except Exception as e:
            bilibili_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            break

    bilibili_logger.error(f"Failed after {max_retries} attempts")
    return {},{}

def time_stamp_to_datetime(time_stamp):
    """
    将时间戳转换为日期时间
    """
    return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time_stamp))


def get_video_link_by_bvid(bvid):
    return f"https://www.bilibili.com/video/{bvid}"



def get_video_detail_by_bvid(bvid):
    request_id = uuid.uuid4()
    data,video_response = get_video_data_by_bvid(bvid)
    if data == {}:
        return {}
    if data is None:
        return {}
    if data.get("title") is None:
        return {}
    if data.get("stat") is None:
        return {}
    if data.get("pubdate") is None:
        return {}
    if data.get("duration") is None:
        return {}
    tags,res = get_video_tag_names_by_bvid(bvid)
    video_detail = {
        "bvid" : bvid,
        "title" : data.get("title"),
        "pubdate" : time_stamp_to_datetime(data.get("pubdate")),
        "duration" : data.get("duration"),
        "view" :  data.get("stat").get("view"),
        "like" : data.get("stat").get("like"),
        "danmaku" : data.get("stat").get("danmaku"),
        "reply" : data.get("stat").get("reply"),
        "favorite" : data.get("stat").get("favorite"),
        "coin" : data.get("stat").get("coin"),
        "share" : data.get("stat").get("share"),
        "good" : 0, # 无
        "dislike" : 0, # dislike恒为0
        "video_link" : get_video_link_by_bvid(bvid),
        "tags" : str(tags)
    }
    response = {
        "video_response" : video_response,
    }
    # save_video_response_to_oss(bvid,response,request_id)
    return video_detail


def get_register_timedate_by_mid(mid):
    """
    获取指定mid的注册时间
    """
    count = get_videos_count_by_mid(mid)

    first_video_detail = {}
    retries = 0
    while first_video_detail == {}:
        bilibili_logger.info(f"count:{count}")
        if count < 1:
            return ""
        bvid = get_video_bvid_by_number_and_mid(mid,count)
        if bvid == "":
            if retries > 10:
                count -= 10
            else:
                count -= 1
                retries += 1
            continue
        first_video_detail = get_video_detail_by_bvid(bvid)
        if first_video_detail == {}:
            count -= 1
        else:
            return first_video_detail.get("pubdate")


def get_up_link_by_mid(mid):
    return f"https://space.bilibili.com/{mid}"


def get_up_detail_by_mid(mid):
    request_id = uuid.uuid4()
    data = get_up_card_data_by_mid(mid,request_id)
    if data == {}:
        return {}
    print(data.get("card"))
    up_detail = {
        "uid" : mid,
        "avatar" : data.get("card").get("face"),
        "sign" : data.get("card").get("sign"),
        "nick_name" : data.get("card").get("name"),
        "ip" : "",
        "sex" : data.get("card").get("sex"),
        "fans" : data.get("card").get("fans"),
        "mcn" : "",
        "email" : "",
        "register_date" : get_register_timedate_by_mid(mid),
        "link" : get_up_link_by_mid(mid),
        "seasons_series" : get_seasons_series_by_mid(mid)
    }
    return up_detail

def get_all_bvid_list_by_mid_v2(mid):
    bvid_list = []
    page_queue = queue.Queue()
    page_queue.put(1)
    lock = threading.Lock()
    stop_event = threading.Event()

    def worker():
        while not stop_event.is_set():
            try:
                page = page_queue.get_nowait()
            except queue.Empty:
                break  # 队列为空，退出循环

            proxy = get_random_proxy()
            bilibili_logger.info(f"Using proxy: {proxy} for page {page}")
            try:
                proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
                proxies = {'http': proxy_url, 'https': proxy_url}
                headers = {
                    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
                    "X-UA": "V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC",
                    "Referer": f"https://space.bilibili.com/{mid}/video",
                }
                params = {
                    'mid': mid,
                    'ps': 100,
                    'pn': page,
                    "keywords": "",
                }
                response = requests.get(
                    "https://api.bilibili.com/x/series/recArchivesByKeywords",
                    headers=headers,
                    params=params,
                    proxies=proxies,
                    timeout=3
                )
                bilibili_logger.info(f"Response status code for page {page}: {response.status_code}")

                data = response.json().get("data", {})
                archives = data.get("archives", [])

                if not archives:
                    bilibili_logger.info(f"Page {page} is empty, stopping.")
                    with lock:
                        stop_event.set()  # 触发停止事件
                    continue

                # 提取bvid并添加到列表
                with lock:
                    bvid_list.extend(archive.get("bvid") for archive in archives)

                # 将下一页加入队列
                next_page = page + 1
                page_queue.put(next_page)
                bilibili_logger.debug(f"Added page {next_page} to queue")

            except (ProxyError, ConnectionError, Timeout, SSLError) as e:
                bilibili_logger.error(f"Proxy error on page {page}, retrying...")
                add_proxy_to_blacklist(proxy["ip"])
                page_queue.put(page)  # 重新放入队列等待重试
            except Exception as e:
                bilibili_logger.error(f"Error processing page {page}: {str(e)}")
            finally:
                page_queue.task_done()
                time.sleep(1)  # 保持原有请求间隔

    # 使用线程池并发处理
    with ThreadPoolExecutor(max_workers=10) as executor:  # 控制并发数
        for _ in range(5):  # 启动与工作线程数相同的任务
            executor.submit(worker)

        # 等待所有任务完成
        page_queue.join()

    return bvid_list


def get_video_details_by_bvid_list_v2(bvid_list):
    """
    获取指定bvid_list的视频详情
    """
    with concurrent.futures.ThreadPoolExecutor(max_workers=55) as executor:
        video_details = [
            detail
            for detail in executor.map(get_video_detail_by_bvid, bvid_list)
            if detail != {}
        ]
    return video_details



def get_500_bvid_list_by_mid(mid):
    bvid_list = []
    page_queue = queue.Queue()
    page_queue.put(1)
    lock = threading.Lock()
    stop_event = threading.Event()
    MAX_ITEMS = 500  # 设置最大收集数量

    def worker():
        while not stop_event.is_set():
            try:
                page = page_queue.get_nowait()
            except queue.Empty:
                break  # 队列为空，退出循环

            proxy = get_random_proxy()
            bilibili_logger.info(f"Using proxy: {proxy} for page {page}")
            try:
                proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
                proxies = {'http': proxy_url, 'https': proxy_url}
                headers = {
                    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
                    "X-UA": "V=1&PN=WebApp&LANG=zh_CN&VN_CODE=102&LOC=CN&PLT=PC&DS=Android&UID=3a96137e-5959-4314-b75c-720b7a719718&OS=Mac+OS&OSV=10.15.7&DT=PC",
                    "Referer": f"https://space.bilibili.com/{mid}/video",
                }
                params = {
                    'mid': mid,
                    'ps': 100,
                    'pn': page,
                    "keywords": "",
                }
                response = requests.get(
                    "https://api.bilibili.com/x/series/recArchivesByKeywords",
                    headers=headers,
                    params=params,
                    proxies=proxies,
                    timeout=3
                )
                bilibili_logger.info(f"Response status code for page {page}: {response.status_code}")

                data = response.json().get("data", {})
                archives = data.get("archives", [])

                if not archives:
                    bilibili_logger.info(f"Page {page} is empty, stopping.")
                    with lock:
                        stop_event.set()  # 触发停止事件
                    continue

                # 提取bvid并添加到列表
                with lock:
                    for archive in archives:
                        bvid = archive.get("bvid")
                        if bvid:
                            bvid_list.append(bvid)
                            # 检查是否达到500条
                            if len(bvid_list) >= MAX_ITEMS:
                                stop_event.set()
                                break  # 停止当前页的处理

                # 如果已经达到500条，不再添加新页
                if stop_event.is_set():
                    continue

                # 将下一页加入队列
                next_page = page + 1
                page_queue.put(next_page)
                bilibili_logger.debug(f"Added page {next_page} to queue")

            except (ProxyError, ConnectionError, Timeout, SSLError) as e:
                bilibili_logger.error(f"Proxy error on page {page}, retrying...")
                add_proxy_to_blacklist(proxy["ip"])
                page_queue.put(page)  # 重新放入队列等待重试
            except Exception as e:
                bilibili_logger.error(f"Error processing page {page}: {str(e)}")
            finally:
                page_queue.task_done()
                time.sleep(1)  # 保持原有请求间隔

    # 使用线程池并发处理
    with ThreadPoolExecutor(max_workers=10) as executor:  # 控制并发数
        for _ in range(5):  # 启动与工作线程数相同的任务
            executor.submit(worker)
        # 等待所有任务完成
        page_queue.join()

    # 返回最多500条数据
    return bvid_list[:MAX_ITEMS]


def get_500_videos_by_mid(mid):
    bvid_list = get_500_bvid_list_by_mid(mid)
    video_details = get_video_details_by_bvid_list_v2(bvid_list)
    for video_detail in video_details:
        video_detail["uid"] = mid
    return video_details

def get_all_videos_by_mid(mid):
    bvid_list = get_all_bvid_list_by_mid_v2(mid)
    video_details = get_video_details_by_bvid_list_v2(bvid_list)
    for video_detail in video_details:
        video_detail["uid"] = mid
    return video_details


# def task_up_list():
#     sheet_up_list = config.get("sheet_up_list")
#     spreadsheet_token = config.get("spreadsheet_token")
#     app_id = config.get("app_id")
#     app_serect = config.get("app_serect")
#     access_token = feishu_api.get_aenant_access_token(app_id,app_serect)
#     start = 2
#     end = 100
#     up_list = feishu_api.get_up_list_by_range(access_token,spreadsheet_token,sheet_up_list,start,end)
#     while len(up_list) > 0:
#         print(up_list)
#         for row in up_list:
#             mid = row["mid"]
#             if row["state"] == "ok":
#                 bilibili_logger.info(f"Up {mid} has been scraped, skipping...")
#                 continue
#             bilibili_logger.info(f"Starting to get up detail,mid:{mid}")
#             up_detail = get_up_detail_by_mid(mid)
#             insert_bilibili_user_detail(up_detail)
#             bilibili_logger.info(f"Starting to get videos for up {mid}")
#             video_details = get_all_videos_by_mid(mid)
#             insert_bilibili_video_details(video_details,up_detail)
#             feishu_api.update_state_by_rowId(access_token,spreadsheet_token,sheet_up_list,row["row_id"],THIS_REQUEST_TIME)
#             time.sleep(3)
#         start = end + 1
#         end = end + 100
#         up_list = feishu_api.get_up_list_by_range(access_token,spreadsheet_token,sheet_up_list,start, end)


THIS_UPDATE_CREATORS = []
THIS_UPDATE_KEYWORD_NUM = 0
THIS_UPDATE_KEYWORDS = []
THIS_UPDATE_CREATOR_NUM = 1
THIS_UPDATE_CREATOR_NUM_INCR = 0
THIS_UPDATE_VIDEO_NUM = 0
ALL_TRINO_TIME_BILIBILI = 10.0

def get_authors_bilibili(authors):
    need_author_list = []
    not_need_author_list = []
    creator_id_list = get_creators_by_dt_bilibili()
    for author in authors:
        if str(author["mid"]) in creator_id_list:
            not_need_author_list.append(author)
            continue
        need_author_list.append(author)
    return need_author_list,not_need_author_list



def task_keyword_list_bilibili_incremental_update():
    bilibili_logger.info("开始增量更新bilibili关键词作者")
    global ALL_TRINO_TIME_BILIBILI
    global THIS_UPDATE_CREATORS
    global THIS_UPDATE_CREATOR_NUM
    global THIS_UPDATE_CREATOR_NUM_INCR
    global THIS_UPDATE_VIDEO_NUM
    this_day_creators = THIS_UPDATE_CREATORS
    previous_creators = get_previous_day_update_creators_by_media("bilibili")
    need_update_creators = []
    for mid in previous_creators:
        if mid not in this_day_creators:
            need_update_creators.append(mid)

    bilibili_logger.info(f"需要增量更新的作者数量为:{len(need_update_creators)}")

    THIS_UPDATE_CREATOR_NUM  += len(need_update_creators)
    THIS_UPDATE_CREATOR_NUM_INCR = len(need_update_creators)
    
    for mid in need_update_creators:
        bilibili_logger.info(f"正在增量更新作者mid:{mid}")
        up_detail = get_up_detail_by_mid(mid)
        up_detail["tags"] = str(get_tags_by_id_and_media(mid,"bilibili"))
        bilibili_logger.info("用户信息查询完毕，上传数据库......")
        start = time.time()
        send_bilibili_user_detail_message_to_kafka(up_detail)
        ALL_TRINO_TIME_BILIBILI += time.time() - start
        bilibili_logger.info(f"Starting to get videos for up {mid}")
        video_details = get_500_videos_by_mid(mid)
        bilibili_logger.info("视频列表查询完毕，上传数据库......")
        start = time.time()
        send_bilibili_video_details_message_to_kafka(video_details, up_detail)
        THIS_UPDATE_VIDEO_NUM  += len(video_details)
        ALL_TRINO_TIME_BILIBILI += time.time() - start


def task_keyword_list_bilibili_full_update():
    global ALL_TRINO_TIME_BILIBILI
    global THIS_UPDATE_KEYWORD_NUM
    global THIS_UPDATE_CREATOR_NUM
    global THIS_UPDATE_VIDEO_NUM
    global THIS_UPDATE_CREATORS
    global THIS_UPDATE_KEYWORDS
    INSERT_TRINO = False
    start = 2
    end = 100
    keyword_list = feishu_api.get_keyword_list_by_range_bilibili(access_token, app_token, sheet_keyword_list_bilibili, start, end)
    if keyword_list[-1]["keyword"] == "T":
        keyword_list = keyword_list[:-1]
        INSERT_TRINO = True

    if len(keyword_list) == 0:
        bilibili_logger.info("没有需要查询的关键词")
        return
    while len(keyword_list) > 0:
        THIS_UPDATE_KEYWORD_NUM += len(keyword_list)
        print(keyword_list)
        for row in keyword_list:
            keyword = row["keyword"]
            THIS_UPDATE_KEYWORDS.append(keyword)
            bilibili_logger.info(f"正在查询: {keyword}")
            author_list = get_top100_videos_owner_mid_by_keyword_order(keyword, order_by_views)
            print(author_list)
            need_author_list,not_need_author_list = get_authors_bilibili(author_list)

            print(f"need:{need_author_list}")
            print(f"not need:{not_need_author_list}")

            bilibili_logger.info(f"需要查询 {len(need_author_list)} 个作者")
            print(need_author_list)
            index = 1
            for author in not_need_author_list:
                mid = author["mid"]
                bilibili_logger.info(f"更新作者标签，mid:{mid}")
                tags = get_tags_by_id_and_media(mid, "bilibili")
                if keyword in tags:
                    continue
                tags.append(keyword)
                update_tags_by_id_and_media(tags,mid,"bilibili")
 
                
            for author in need_author_list:
                mid = author["mid"]
                bilibili_logger.info(f"正在查询第{index}个作者...... {index}/{len(need_author_list)}")
                index = index + 1
                if not is_valid_up(mid):
                    continue

                bilibili_logger.info(f"Starting to get up detail,mid:{mid}")
                up_detail = get_up_detail_by_mid(mid)
                if up_detail == {}:
                    bilibili_logger.info(f"用户信息查询失败，mid:{mid}")
                    continue
                bilibili_logger.info("用户信息查询完毕，上传数据库......")
                up_detail["tags"] = str([keyword])
                start = time.time()
                if INSERT_TRINO:
                    insert_bilibili_user_detail(up_detail)
                else:
                    send_bilibili_user_detail_message_to_kafka(up_detail)
                ALL_TRINO_TIME_BILIBILI += time.time() - start
                bilibili_logger.info(f"Starting to get videos for up {mid}")
                video_details = get_500_videos_by_mid(mid)
                bilibili_logger.info("视频列表查询完毕，上传数据库......")
                start = time.time()
                if INSERT_TRINO:
                    insert_bilibili_video_details(video_details, up_detail)
                else:
                    send_bilibili_video_details_message_to_kafka(video_details, up_detail)
                
                THIS_UPDATE_CREATORS.append(str(mid))
                THIS_UPDATE_CREATOR_NUM  += 1
                THIS_UPDATE_VIDEO_NUM  += len(video_details)
                ALL_TRINO_TIME_BILIBILI += time.time() - start
            bilibili_logger.info(f"Keyword : {keyword} Done.")
            feishu_api.update_time_by_rowId(access_token, app_token, sheet_keyword_list_bilibili, row["row_id"],THIS_UPDATE_TIME)
        start = end + 1
        end = end + 100
        keyword_list = feishu_api.get_key_list_by_range(access_token, app_token, sheet_keyword_list_bilibili, start, end)

def is_more_than_180_days(time_str, time_format="%Y-%m-%d %H:%M:%S"):
    """
    判断给定时间字符串与当前时间的间隔是否大于180天
    """
    try:
        # 将字符串转换为datetime对象
        given_time = datetime.strptime(time_str, time_format)

        # 获取当前时间
        current_time = datetime.now()

        # 计算时间差
        time_difference = current_time - given_time

        # 判断是否大于180天
        return time_difference > timedelta(days=180)
    except ValueError as e:
        print(f"时间字符串格式错误: {e}")
        return False

def is_valid_up(mid):
    count = 1
    bvid = get_video_bvid_by_number_and_mid(mid,count)
    while bvid == "" or bvid == None:
        count += 1
        if count > 10:
            return False
        bvid = get_video_bvid_by_number_and_mid(mid,count)
    video_detail = get_video_detail_by_bvid(bvid)
    if video_detail == {}:
        return False
    pubdate = video_detail["pubdate"]
    if is_more_than_180_days(pubdate):
        bilibili_logger.info(f"Up {mid} is more than 180 days, skipping...")
        return False
    else:
        return True

STOP_THREAD = False

def thread_update():
    oss_manager_update_thread = threading.Thread(target=update_oss_manager, daemon=True)
    oss_manager_update_thread.start()

    feishu_token_update_thread = threading.Thread(target=update_feishu_token, daemon=True)
    feishu_token_update_thread.start()


sheet_keyword_list_bilibili = config.get("bilibili_sheet_keyword_list")
sheet_keyword_list_douyin = config.get("douyin_sheet_keyword_list")
node_token = config.get("node_token")
app_id = config.get("app_id")
app_serect = config.get("app_serect")
access_token = feishu_api.get_aenant_access_token(app_id,app_serect)
app_token = feishu_api.get_app_token(access_token, node_token)
FEISHU_UPDATE_INTERVAL = 5 * 60
FEISHU_LOCK = threading.Lock()

def update_feishu_token():
    global access_token
    global app_token
    while not STOP_THREAD:
        time.sleep(FEISHU_UPDATE_INTERVAL)
        try:
            with FEISHU_LOCK:
                access_token = feishu_api.get_aenant_access_token(app_id, app_serect)
                app_token = feishu_api.get_app_token(access_token, node_token)
            bilibili_logger.info("Feishu Token updated")
        except Exception as e:
            bilibili_logger.error(f"更新Feishu token时出错: {e}")


oss_manager = OSSManager(get_token())
oss_lock = threading.Lock()
OSS_MANAGER_UPDATE_INTERVAL = 5*60

def update_oss_manager():
    global oss_manager
    while not STOP_THREAD:
        time.sleep(OSS_MANAGER_UPDATE_INTERVAL)
        if STOP_THREAD:
            break
        try:
            with oss_lock:
                token = get_token()
                oss_manager = OSSManager(token)
            bilibili_logger.info("OSS manager updated")
        except Exception as e:
            bilibili_logger.error(f"更新OSS manager时出错: {e}")
    bilibili_logger.info("OSS manager update thread stopped")

def task_bilibili(now):
    global THIS_UPDATE_CREATORS
    global STOP_THREAD
    try:
        STOP_THREAD = False
        thread_update()

        task_keyword_list_bilibili_full_update()
        task_keyword_list_bilibili_incremental_update()

        insert_success_log_by_media("bilibili")

        message = f"✅ BiliBili脚本执行完成\n开始时间：{THIS_UPDATE_TIME}\n当前时间：{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n本次执行更新了{THIS_UPDATE_KEYWORD_NUM}个关键词:{THIS_UPDATE_KEYWORDS}\n更新了{THIS_UPDATE_CREATOR_NUM}个作者，{THIS_UPDATE_VIDEO_NUM}个作品\n增量更新了{THIS_UPDATE_CREATOR_NUM_INCR}个作者\n耗时：{time.perf_counter() - now}秒,上传数据库总用时：{ALL_TRINO_TIME_BILIBILI}秒"
        print(message)
        feishu_api.sendToFeishu(message)
    finally:
        bilibili_logger.info("停止后台更新线程")
        STOP_THREAD = True





if __name__ == "__main__":
    # print(feishu_api.get_keyword_list_by_range_bilibili(access_token,app_token,sheet_keyword_list_bilibili,2,100))
    # get_seasons_series_by_mid("10330740")
    task_bilibili(time.perf_counter())
    # up_detail = get_up_detail_by_mid("258878535")
    # up_detail["tags"] = "['Apex']"
    # tags = "['Apex']"
    # print(tags.replace("'", "'''"))
    # send_bilibili_user_detail_message_to_kafka(up_detail)
    # video_details = get_500_videos_by_mid("258878535")
    # send_bilibili_video_details_message_to_kafka(video_details,up_detail)