import sys
import os

# 获取当前文件所在目录的父目录（即项目根目录）
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if project_root not in sys.path:
    sys.path.append(project_root)

from concurrent.futures import ThreadPoolExecutor
from datetime import datetime, timedelta
import random
import threading
import time
from urllib.parse import urlparse, parse_qs

import requests
from DrissionPage._configs.chromium_options import ChromiumOptions
from DrissionPage._pages.chromium_page import ChromiumPage
from requests.exceptions import ProxyError, Timeout, SSLError

import common.feishu_api as feishu_api
from common.feishu_api import get_keyword_list_by_range_kuaishou
from kuaishou.kuaishou_did import FIFOProcessor, add_did_to_cd, get_dids, get_new_did, get_new_did_by_page, get_random_did,co
from common.start import INSERT_TRINO_LOCK, THIS_UPDATE_TIME, kuaishou_logger, config, get_random_proxy_ks
from common.trino_client import get_creators_by_dt_media, get_tags_by_id_and_media, insert_kuaishou_user_detail, insert_kuaishou_video_details, insert_success_log_by_media, update_tags_by_id_and_media


def get_top100_authors_by_keyword(keyword):
    posts_list = get_top100_posts_by_keyword(keyword)
    authors_dict = {}
    for post in posts_list:
        kuaishou_id = post.get("kuaishou_user_id")
        authors_dict[kuaishou_id] = {
            'kuaishou_id' : kuaishou_id,
            "kuaishou_user_name" : post.get("kuaishou_user_name")
        }
    print(authors_dict.values())
    return list(authors_dict.values())

def get_top100_posts_by_keyword(keyword):
    did = DID_QUEUE.pop_did()
    post_list = []
    pcursor = 0
    while len(post_list) < 50:
        retries = 0
        while retries < 6:
            proxy = get_random_proxy_ks()
            kuaishou_logger.info(f"Using proxy: {proxy}")
            try:
                proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
                proxies = {'http': proxy_url, 'https': proxy_url}
                url = "https://www.kuaishou.com/graphql"
                json = {
                    "operationName": "visionSearchPhoto",
                    "query": "fragment photoContent on PhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment feedContent on Feed {\n  type\n  author {\n    id\n    name\n    headerUrl\n    following\n    headerUrls {\n      url\n      __typename\n    }\n    __typename\n  }\n  photo {\n    ...photoContent\n    ...recoPhotoFragment\n    __typename\n  }\n  canAddComment\n  llsid\n  status\n  currentPcursor\n  tags {\n    type\n    name\n    __typename\n  }\n  __typename\n}\n\nquery visionSearchPhoto($keyword: String, $pcursor: String, $searchSessionId: String, $page: String, $webPageArea: String) {\n  visionSearchPhoto(keyword: $keyword, pcursor: $pcursor, searchSessionId: $searchSessionId, page: $page, webPageArea: $webPageArea) {\n    result\n    llsid\n    webPageArea\n    feeds {\n      ...feedContent\n      __typename\n    }\n    searchSessionId\n    pcursor\n    aladdinBanner {\n      imgUrl\n      link\n      __typename\n    }\n    __typename\n  }\n}\n",
                    "variables": {
                        "keyword": keyword,
                        "page": "search",
                        "pcursor": f"{pcursor}",
                    }
                }
                
                headers = {
                    "User-Agent": "Mozilla/5.0 (Linux; Android 13; SM-G981B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Mobile Safari/537.36",
                    "Origin": "https://www.kuaishou.com",
                }
                headers["Cookie"] = f"kpf=PC_WEB; clientid=3; didv=1746698858104; kwpsecproductname=PCLive; kwfv1=PnGU+9+Y8008S+nH0U+0mjPf8fP08f+98f+nLlwnrIP9P9G98YPf8jPBQSweS0+nr9G0mD8B+fP/L98/qlPe4f8ecM8ecA+/pYPn+0G9Hl+9P7P/8jw/zDwBcEwnGMwePA8/SS8/8jw/ql+fLEPeGhw/r9+ApDP0SfPnHhGnbYPnG=; _bl_uid=8tm4OaUgu08wn5gaFkXtq3hlqbdn; did={did}; ktrace-context=1|MS43NjQ1ODM2OTgyODY2OTgyLjY5NzcyNzU1LjE3NDg1MDI3MzUzMzUuMTE3MjkxOQ==|MS43NjQ1ODM2OTgyODY2OTgyLjM2NDQxNzEyLjE3NDg1MDI3MzUzMzUuMTE3MjkyMA==|0|graphql-server|webservice|false|NA; userId=3707378086; kuaishou.server.webday7_st=ChprdWFpc2hvdS5zZXJ2ZXIud2ViZGF5Ny5zdBKwAQBRz8pRbabg8jhqQqKIh_jKvzz-YvYTGAtJSxXM2P5jjQT1FHlEqv0LT9O4mm_ast4AvKTqc_8rvLz1bL_snQdy6bIxbhnWMji4tUSEsAZSaJqumPWlkhOpMoLccqp7fHFKDrB-LH4A8Knx6tQU6RcAT90dP7RDooJEdV0hqJU6u38LNI9GEDI7G5ND_nshPfIhMnFRV9A3slQ_hxwKfH7lviRplVgJrc32MZ6RqIz0GhI8tsmuBlpRuNU4fo_gMPQVxSUiILD6-LsUTedWYmR7uxRXqzE3qGS6VL0eWBixXbYAjlGUKAUwAQ; kuaishou.server.webday7_ph=f5d218fd523f5ecec36006056ada4eb69898; kpn=KUAISHOU_VISION"
                # headers["Cookie"] = "kpf=PC_WEB; clientid=3; didv=1746698858104; kwpsecproductname=PCLive; kwfv1=PnGU+9+Y8008S+nH0U+0mjPf8fP08f+98f+nLlwnrIP9P9G98YPf8jPBQSweS0+nr9G0mD8B+fP/L98/qlPe4f8ecM8ecA+/pYPn+0G9Hl+9P7P/8jw/zDwBcEwnGMwePA8/SS8/8jw/ql+fLEPeGhw/r9+ApDP0SfPnHhGnbYPnG=; _bl_uid=8tm4OaUgu08wn5gaFkXtq3hlqbdn; did=web_8256600ab812572492e8d1e8b28ae55b; ktrace-context=1|MS43NjQ1ODM2OTgyODY2OTgyLjY5NzcyNzU1LjE3NDg1MDI3MzUzMzUuMTE3MjkxOQ==|MS43NjQ1ODM2OTgyODY2OTgyLjM2NDQxNzEyLjE3NDg1MDI3MzUzMzUuMTE3MjkyMA==|0|graphql-server|webservice|false|NA; userId=3707378086; kuaishou.server.webday7_st=ChprdWFpc2hvdS5zZXJ2ZXIud2ViZGF5Ny5zdBKwAQBRz8pRbabg8jhqQqKIh_jKvzz-YvYTGAtJSxXM2P5jjQT1FHlEqv0LT9O4mm_ast4AvKTqc_8rvLz1bL_snQdy6bIxbhnWMji4tUSEsAZSaJqumPWlkhOpMoLccqp7fHFKDrB-LH4A8Knx6tQU6RcAT90dP7RDooJEdV0hqJU6u38LNI9GEDI7G5ND_nshPfIhMnFRV9A3slQ_hxwKfH7lviRplVgJrc32MZ6RqIz0GhI8tsmuBlpRuNU4fo_gMPQVxSUiILD6-LsUTedWYmR7uxRXqzE3qGS6VL0eWBixXbYAjlGUKAUwAQ; kuaishou.server.webday7_ph=f5d218fd523f5ecec36006056ada4eb69898; kpn=KUAISHOU_VISION"

                response = requests.post(url, json=json, proxies=proxies,  headers=headers, timeout=4)
                kuaishou_logger.info(f"Response status code: {response.status_code}")
                for post in response.json()["data"]["visionSearchPhoto"]["feeds"]:
                    cur_post = {
                        "kuaishou_user_id": post["author"]["id"],
                        "kuaishou_user_name": post["author"]["name"],
                        "title": post["photo"]["caption"],
                    }
                    post_list.append(cur_post)
                print(len(post_list))
                pcursor += 1
                time.sleep(2)
                break
            except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
                kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
                retries += 1
            except Exception as e:
                kuaishou_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
                print(response.json())
                retries += 1
                time.sleep(3)
    DID_QUEUE.add_did(did)
    return post_list

def ms_timestamp_to_datetime(timestamp_ms: int) -> str:
    """
    将毫秒级时间戳转换为本地日期时间对象
    
    参数:
        timestamp_ms: 毫秒级时间戳（如 1633516800000）
        
    返回:
        datetime 对象（如 2021-10-06 00:00:00）
    """
    return str(datetime.fromtimestamp(timestamp_ms / 1000.0).strftime("%Y-%m-%d %H:%M:%S"))



def get_posts_by_kuaishou_id(kuaishou_id,cur_did):
    did = cur_did
    post_list = []
    p = 1
    pcursor = "0"
    bad_times = 0
    while len(post_list) < 500 and p < 50:
        p += 1
        print(f"time:{p}")
        retries = 0
        while retries < 3:
            proxy = get_random_proxy_ks()
            kuaishou_logger.info(f"Using proxy: {proxy}")
            try:
                proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
                proxies = {'http': proxy_url, 'https': proxy_url}
                url = "https://www.kuaishou.com/graphql"
                json = {
                    "operationName": "visionProfilePhotoList",
                    "query": """
                fragment photoContent on PhotoEntity {
  __typename
  id
  duration
  caption
  originCaption
  likeCount
  viewCount
  commentCount
  realLikeCount
  coverUrl
  photoUrl
  photoH265Url
  manifest
  manifestH265
  videoResource
  coverUrls {
    url
    __typename
  }
  timestamp
  expTag
  animatedCoverUrl
  distance
  videoRatio
  stereoType
  profileUserTopPhoto
  musicBlocked
  riskTagContent
  riskTagUrl
}

fragment feedContentWithLiveInfo on Feed {
  type
  author {
    id
    name
    headerUrl
    following
    livingInfo
    headerUrls {
      url
      __typename
    }
    __typename
  }
  photo {
    ...photoContent
    __typename
  }
  canAddComment
  llsid
  status
  currentPcursor
  tags {
    type
    name
    __typename
  }
  __typename
}

query visionProfilePhotoList($pcursor: String, $userId: String, $page: String, $webPageArea: String) {
  visionProfilePhotoList(pcursor: $pcursor, userId: $userId, page: $page, webPageArea: $webPageArea) {
    result
    llsid
    webPageArea
    feeds {
      ...feedContentWithLiveInfo
      __typename
    }
    hostName
    pcursor
    __typename
  }
}
                """,
                    # "query": "fragment photoContent on PhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment feedContentWithLiveInfo on Feed {\n  type\n  author {\n    id\n    name\n    headerUrl\n    following\n    livingInfo\n    headerUrls {\n      url\n      __typename\n    }\n    __typename\n  }\n  photo {\n    ...photoContent\n    ...recoPhotoFragment\n    __typename\n  }\n  canAddComment\n  llsid\n  status\n  currentPcursor\n  tags {\n    type\n    name\n    __typename\n  }\n  __typename\n}\n\nquery visionProfilePhotoList($pcursor: String, $userId: String, $page: String, $webPageArea: String) {\n  visionProfilePhotoList(pcursor: $pcursor, userId: $userId, page: $page, webPageArea: $webPageArea) {\n    result\n    llsid\n    webPageArea\n    feeds {\n      ...feedContentWithLiveInfo\n      __typename\n    }\n    hostName\n    pcursor\n    __typename\n  }\n}\n",
                    "variables": {
                        "userId": kuaishou_id,
                        "page": "profile",
                        "pcursor": pcursor,
                    }
                }
                headers = {
                    "User-Agent":  "Mozilla/5.0 (Linux; Android 13; SM-G981B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Mobile Safari/537.36",
                    "Origin": "https://www.kuaishou.com",
                    "Referer": f"https://www.kuaishou.com/profile/{kuaishou_id}",
                }
                # headers["Cookie"] = f"kpf=PC_WEB; clientid=3; didv=1746698858104; kwpsecproductname=PCLive; kwfv1=PnGU+9+Y8008S+nH0U+0mjPf8fP08f+98f+nLlwnrIP9P9G98YPf8jPBQSweS0+nr9G0mD8B+fP/L98/qlPe4f8ecM8ecA+/pYPn+0G9Hl+9P7P/8jw/zDwBcEwnGMwePA8/SS8/8jw/ql+fLEPeGhw/r9+ApDP0SfPnHhGnbYPnG=; _bl_uid=8tm4OaUgu08wn5gaFkXtq3hlqbdn; did={did}; userId=3707378086; kuaishou.server.webday7_st=ChprdWFpc2hvdS5zZXJ2ZXIud2ViZGF5Ny5zdBKwAYWmakoUtiSlBqIphWvcshKYL6zX9aEmSoAVGTI6aRasO-HAulX92Ttb8RLK2e2fbdYpmsbZr74QIo46f9jCN_7Roc-sXEZewDGpJwz5jJ3Ze5r4flI9IsxZ7_0Uscil_-NliniZ3sgShBJvbisMEMy1u2_w4RepGpqekOruEf2_844YPeDvATVStLPQtN-YsBJ1DP4KKkUrddVOhpCh5JwFBnoxBrvNHZg8HtZLXenNGhJMg8a21uCwhwxScQ4lMeUhFTYiIOH1Og-cdwQQUX-niTQWWmh8KYEd-hUqXpu_eOicNw8nKAUwAQ; kuaishou.server.webday7_ph=326f66847d37cec00a33a5eba0285899aee2; kpn=KUAISHOU_VISION"


                cookies = {
                    'kpf': 'PC_WEB',
                    'clientid': '3',
                    'kpn': 'KUAISHOU_VISION',
                    # 'didv': '1746698858104',
                    # 'kwpsecproductname': 'PCLive',
                    # 'kwfv1': 'PnGU+9+Y8008S+nH0U+0mjPf8fP08f+98f+nLlwnrIP9P9G98YPf8jPBQSweS0+nr9G0mD8B+fP/L98/qlPe4f8ecM8ecA+/pYPn+0G9Hl+9P7P/8jw/zDwBcEwnGMwePA8/SS8/8jw/ql+fLEPeGhw/r9+ApDP0SfPnHhGnbYPnG=',
                    # '_bl_uid': '8tm4OaUgu08wn5gaFkXtq3hlqbdn',
                    # 'userId': '3707378086',
                    # 'kuaishou.server.webday7_st': 'ChprdWFpc2hvdS5zZXJ2ZXIud2ViZGF5Ny5zdBKwAYWmakoUtiSlBqIphWvcshKYL6zX9aEmSoAVGTI6aRasO-HAulX92Ttb8RLK2e2fbdYpmsbZr74QIo46f9jCN_7Roc-sXEZewDGpJwz5jJ3Ze5r4flI9IsxZ7_0Uscil_-NliniZ3sgShBJvbisMEMy1u2_w4RepGpqekOruEf2_844YPeDvATVStLPQtN-YsBJ1DP4KKkUrddVOhpCh5JwFBnoxBrvNHZg8HtZLXenNGhJMg8a21uCwhwxScQ4lMeUhFTYiIOH1Og-cdwQQUX-niTQWWmh8KYEd-hUqXpu_eOicNw8',
                    # 'kuaishou.server.webday7_ph': '326f66847d37cec00a33a5eba0285899aee2',
                    # 'ktrace-context': '1|MS43NjQ1ODM2OTgyODY2OTgyLjgxMjIyMzQ0LjE3NDc5NjgyMjcxMzIuODE0MTMx|MS43NjQ1ODM2OTgyODY2OTgyLjY2Nzc0OTI2LjE3NDc5NjgyMjcxMzIuODE0MTMy|0|graphql-server|webservice|false|NA',
                }
                
                cookies['did'] = did
                print(f"did:{cookies['did']}，pcursor:{pcursor}")
                response = requests.post(url, json=json, proxies=proxies, cookies=cookies ,headers=headers, timeout=4)
                kuaishou_logger.info(f"Response status code: {response.status_code}")
                if len(response.json()["data"]["visionProfilePhotoList"]["feeds"]) == 0:
                    if response.json()["data"]["visionProfilePhotoList"]["pcursor"] == "no_more":
                        kuaishou_logger.info("没有更多作品了")
                        return post_list
                    bad_times += 1
                    if bad_times >= 3:
                        kuaishou_logger.info("连续3次请求失败，刷新did......")
                        did = DID_QUEUE.pop_did()
                        bad_times = 0
                    retries += 1
                    time.sleep(2)
                    continue
                for post in response.json()["data"]["visionProfilePhotoList"]["feeds"]:
                    tags = []
                    if post["tags"] is not None:
                        for tag in post["tags"]:
                            tags.append(tag["name"])

                    cur_post = {
                        "kuaishou_user_id": post["author"]["id"],
                        "kuaishou_user_name": post["author"]["name"],
                        "title": post["photo"]["caption"],
                        "tags": str(tags),
                        "video_id": post["photo"]["id"],
                        "like_count": post["photo"]["realLikeCount"],
                        "play_count": post["photo"]["viewCount"],
                        "duration": post["photo"]["duration"]//1000,
                        "create_time": ms_timestamp_to_datetime(post["photo"]["timestamp"]),
                        # "comment_count": get_comment_count_by_video_id(post["photo"]["id"]),
                        "share" : 0,
                        "collect" : 0,
                        "video_link": get_kuaishou_video_link(post["photo"]["id"])
                    }
                    post_list.append(cur_post)
                pcursor = response.json()["data"]["visionProfilePhotoList"]["pcursor"]
                print(f"当前作品数量:{len(post_list)}")
                if pcursor == "no_more":
                    DID_QUEUE.add_did(did)
                    kuaishou_logger.info("没有更多作品了")
                    return post_list
                time.sleep(1.8)
                bad_times = 0
                break
            except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
                kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
                retries += 1
            except Exception as e:
                kuaishou_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
                bad_times += 1 
                if bad_times >= 3:
                    did = DID_QUEUE.pop_did()
                    bad_times = 0
                retries += 1
                time.sleep(2)
        
    return post_list


def get_posts_by_kuaishou_id_v2():
    retries = 0
    while retries < 3:
        proxy = get_random_proxy_ks()
        kuaishou_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            url = "https://www.kuaishou.com/graphql"
            json = {
                "operationName": "visionProfilePhotoList",
                # "query": "fragment photoContent on PhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment feedContentWithLiveInfo on Feed {\n  type\n  author {\n    id\n    name\n    headerUrl\n    following\n    livingInfo\n    headerUrls {\n      url\n      __typename\n    }\n    __typename\n  }\n  photo {\n    ...photoContent\n    ...recoPhotoFragment\n    __typename\n  }\n  canAddComment\n  llsid\n  status\n  currentPcursor\n  tags {\n    type\n    name\n    __typename\n  }\n  __typename\n}\n\nquery visionProfilePhotoList($pcursor: String, $userId: String, $page: String, $webPageArea: String) {\n  visionProfilePhotoList(pcursor: $pcursor, userId: $userId, page: $page, webPageArea: $webPageArea) {\n    result\n    llsid\n    webPageArea\n    feeds {\n      ...feedContentWithLiveInfo\n      __typename\n    }\n    hostName\n    pcursor\n    __typename\n  }\n}\n",
                "query": """
                fragment photoContent on PhotoEntity {
  __typename
  id
  duration
  caption
  originCaption
  likeCount
  viewCount
  commentCount
  realLikeCount
  coverUrl
  photoUrl
  photoH265Url
  manifest
  manifestH265
  videoResource
  coverUrls {
    url
    __typename
  }
  timestamp
  expTag
  animatedCoverUrl
  distance
  videoRatio
  liked
  stereoType
  profileUserTopPhoto
  musicBlocked
  riskTagContent
  riskTagUrl
}

fragment feedContentWithLiveInfo on Feed {
  type
  author {
    id
    name
    headerUrl
    following
    livingInfo
    headerUrls {
      url
      __typename
    }
    __typename
  }
  photo {
    ...photoContent
    __typename
  }
  canAddComment
  llsid
  status
  currentPcursor
  tags {
    type
    name
    __typename
  }
  __typename
}

query visionProfilePhotoList($pcursor: String, $userId: String, $page: String, $webPageArea: String) {
  visionProfilePhotoList(pcursor: $pcursor, userId: $userId, page: $page, webPageArea: $webPageArea) {
    result
    llsid
    webPageArea
    feeds {
      ...feedContentWithLiveInfo
      __typename
    }
    hostName
    pcursor
    __typename
  }
}
                """,
                "variables": {
                    "userId": "3x5n4bkhgdyh7b2",
                    "page": "profile",
                    "pcursor": ""
                }
            }
            headers = {
                "User-Agent": "Mozilla/5.0 (Linux; Android 13; SM-G981B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Mobile Safari/537.36",
                "Origin": "https://www.kuaishou.com",
                "Referer": f"https://www.kuaishou.com/profile/3x5n4bkhgdyh7b2",
            }

            cookies = {
                'kpf': 'PC_WEB',
                'clientid': '3',
                'kpn': 'KUAISHOU_VISION',
            }
                

            cookies['did'] = "web_a30d6f7ca18bcb015b5c7496ba0b66f7"
            response = requests.post(url, json=json, proxies=proxies, cookies=cookies, headers=headers, timeout=4)
            print(response.json()["data"]["visionProfilePhotoList"]["feeds"][0]["photo"])
            kuaishou_logger.info(f"Response status code: {response.status_code}")
            break
        except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
            kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
            retries += 1
        except Exception as e:
            kuaishou_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            print(response.json())
            retries += 1
            time.sleep(1)    



def get_kuaishou_video_link(video_id):
    return f"https://www.kuaishou.com/short-video/{video_id}"

def get_comment_count_by_video_id(video_id):
    retries = 0
    while retries < 3:
        proxy = get_random_proxy_ks()
        kuaishou_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            url = "https://www.kuaishou.com/graphql"
            json = {
                "operationName": "commentListQuery",
                "query": "query commentListQuery($photoId: String, $pcursor: String) {\n  visionCommentList(photoId: $photoId, pcursor: $pcursor) {\n    commentCount\n    pcursor\n    rootComments {\n      commentId\n      authorId\n      authorName\n      content\n      headurl\n      timestamp\n      likedCount\n      realLikedCount\n      liked\n      status\n      authorLiked\n      subCommentCount\n      subCommentsPcursor\n      subComments {\n        commentId\n        authorId\n        authorName\n        content\n        headurl\n        timestamp\n        likedCount\n        realLikedCount\n        liked\n        status\n        authorLiked\n        replyToUserName\n        replyTo\n        __typename\n      }\n      __typename\n    }\n    __typename\n  }\n}\n",
                "variables": {
                    "pcursor": "",
                    "photoId": video_id,
                }
            }
            headers = {
                "User-Agent": "Mozilla/5.0 (Linux; Android 13; SM-G981B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Mobile Safari/537.36",
            }

            cookies = {
                'kpf': 'PC_WEB',
                'clientid': '3',
                'kpn': 'KUAISHOU_VISION',
            }
            cookies["did"] = get_random_did()

            response = requests.post(url, json=json, proxies=proxies, cookies=cookies, headers=headers, timeout=4)
            kuaishou_logger.info(f"Response status code: {response.status_code}")
            print(response.json())
            return response.json()["data"]["visionCommentList"]["commentCount"]
        except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
            kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
            retries += 1
            time.sleep(1)  # 失败后等待1秒再重试
        except Exception as e:
            kuaishou_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            retries += 1
            time.sleep(3)
    return 0


def get_user_info_by_kuaishou_id(kuaishou_id,cur_did):
    did = cur_did
    retries = 0
    while retries < 3:
        proxy = get_random_proxy_ks()
        kuaishou_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            url = "https://www.kuaishou.com/graphql"
            json = {
                "operationName": "visionProfile",
                "query": "query visionProfile($userId: String) {\n  visionProfile(userId: $userId) {\n    result\n    hostName\n    userProfile {\n      ownerCount {\n        fan\n        photo\n        follow\n        photo_public\n        __typename\n      }\n      profile {\n        gender\n        user_name\n        user_id\n        headurl\n        user_text\n        user_profile_bg_url\n        __typename\n      }\n      isFollowing\n      livingInfo\n      __typename\n    }\n    __typename\n  }\n}\n",
                "variables": {
                    "userId": kuaishou_id
                }
            }
            headers = {
                "User-Agent": "Mozilla/5.0 (Linux; Android 13; SM-G981B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Mobile Safari/537.36",
            }

            cookies = {
                'kpf': 'PC_WEB',
                'clientid': '3',
                'kpn': 'KUAISHOU_VISION',
            }

            cookies["did"] = did

            response = requests.post(url, json=json, proxies=proxies, cookies=cookies, headers=headers, timeout=4)
            kuaishou_logger.info(f"Response status code: {response.status_code}")
            user_id = response.json()["data"]["visionProfile"]["userProfile"]["profile"]["user_id"]
            user_info = {
                "user_id": user_id,
                "user_name": response.json()["data"]["visionProfile"]["userProfile"]["profile"]["user_name"],
                "fans":  response.json()["data"]["visionProfile"]["userProfile"]["ownerCount"]["fan"],
                "video_count": response.json()["data"]["visionProfile"]["userProfile"]["ownerCount"]["photo_public"],
                "gender": response.json()["data"]["visionProfile"]["userProfile"]["profile"]["gender"],
                "avatar_url": response.json()["data"]["visionProfile"]["userProfile"]["profile"]["headurl"],
                "home_url": f"https://www.kuaishou.com/profile/{user_id}",
                "signature": response.json()["data"]["visionProfile"]["userProfile"]["profile"]["user_text"],

            }
            # add_did_to_cd(cookies['did'])
            print(user_info)
            return user_info
        except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
            kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
            retries += 1
            time.sleep(1)  # 失败后等待1秒再重试
        except Exception as e:
            kuaishou_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            retries += 1
            # add_did_to_cd(cookies['did'])
            time.sleep(1)
    return {}


sheet_keyword_list_kuaishou = config.get("kuaishou_sheet_keyword_list")
node_token = config.get("node_token")
app_id = config.get("app_id")
app_serect = config.get("app_serect")
access_token = feishu_api.get_aenant_access_token(app_id,app_serect)
app_token = feishu_api.get_app_token(access_token, node_token)
FEISHU_UPDATE_INTERVAL = 5 * 60
FEISHU_LOCK = threading.Lock()

STOP_THREAD = False
def update_feishu_token():
    kuaishou_logger.info("Kuaishou爬虫飞书Token后台更新线程启动")
    global access_token
    global app_token
    while not STOP_THREAD:
        try:
            with FEISHU_LOCK:
                access_token = feishu_api.get_aenant_access_token(app_id, app_serect)
                app_token = feishu_api.get_app_token(access_token, node_token)
            kuaishou_logger.info("Feishu Token updated")
            time.sleep(FEISHU_UPDATE_INTERVAL)
        except Exception as e:
            kuaishou_logger.error(f"更新Feishu token时出错: {e}")


UPDATE_DID_TIME = 180
DID_QUEUE = FIFOProcessor()

def wait_did():
    kuaishou_logger.info("Kuaishou等待did线程启动")
    while not STOP_THREAD:
        did_ex = DID_QUEUE.pop_did_ex()
        if did_ex["ex"] < int(time.time()):
            did = did_ex["did"]
            if did is None or did == "":
                continue
            kuaishou_logger.info(f"{did}准备就绪 ......")
            DID_QUEUE.add_did(did)
        else:
            DID_QUEUE.add_did_ex(did_ex)
    kuaishou_logger.info("Kuaishou等待did线程结束")

def update_did():
    kuaishou_logger.info("Kuaishou爬虫did更新线程启动")
    while not STOP_THREAD:
        try:
            did = get_new_did()
        except  Exception as e:
            kuaishou_logger.error(f"Error Type: {type(e).__name__}")
            kuaishou_logger.error(f"获取did时出错: {e}")
        did_ex = {
            "did": did,
            "ex": int(time.time()) + 300,
        }
        DID_QUEUE.add_did_ex(did_ex)
        time.sleep(4)
    kuaishou_logger.info("Kuaishou爬虫did更新线程关闭")

page1 = None
page2 = None 
page3 = None
RECONNECT = False
def update_did_page1():
    global RECONNECT
    global page1
    global page2 
    global page3
    kuaishou_logger.info(f"Kuaishou爬虫did更新线程[page1]开启")
    while not STOP_THREAD:
        try:
            if RECONNECT:
                time.sleep(60)
            did = get_new_did_by_page(page1)
            print(f"page1 get a new did : {did}")
        except  Exception as e:
            kuaishou_logger.error(f"Error Type: {type(e).__name__}")
            kuaishou_logger.error(f"page1获取did时出错: {e}")   
            RECONNECT = True
            time.sleep(20)
            try:
                page1.close()
                page1.quit()
            except Exception as e:
                kuaishou_logger.error(f"Error Type: {type(e).__name__}")
                kuaishou_logger.error(f"Error: {e}")
            try:
                page2.close()
                page2.quit()
            except Exception as e:
                kuaishou_logger.error(f"Error Type: {type(e).__name__}")
                kuaishou_logger.error(f"Error: {e}")
            try:
                page3.close()
                page3.quit()
            except Exception as e:
                kuaishou_logger.error(f"Error Type: {type(e).__name__}")
                kuaishou_logger.error(f"Error: {e}")

            page1 = ChromiumPage(addr_or_opts=co)
            time.sleep(2)
            page2 = ChromiumPage(addr_or_opts=co)
            time.sleep(2)
            page3 = ChromiumPage(addr_or_opts=co)

            RECONNECT = False

        did_ex = {
            "did": did,
            "ex": int(time.time()) + 300,
        }
        DID_QUEUE.add_did_ex(did_ex)
        time.sleep(4)
    kuaishou_logger.info(f"Kuaishou爬虫did更新线程[page1]关闭")


def update_did_page2():
    global RECONNECT
    global page1
    global page2 
    global page3
    kuaishou_logger.info(f"Kuaishou爬虫did更新线程[page2]开启")
    while not STOP_THREAD:
        try:
            if RECONNECT:
                time.sleep(60)
            did = get_new_did_by_page(page2)
            print(f"page2 get a new did : {did}")
        except  Exception as e:
            kuaishou_logger.error(f"Error Type: {type(e).__name__}")
            kuaishou_logger.error(f"page2获取did时出错: {e}")   
            RECONNECT = True
            time.sleep(20)
            try:
                page1.close()
                page1.quit()
            except Exception as e:
                kuaishou_logger.error(f"Error Type: {type(e).__name__}")
                kuaishou_logger.error(f"Error: {e}")
            try:
                page2.close()
                page2.quit()
            except Exception as e:
                kuaishou_logger.error(f"Error Type: {type(e).__name__}")
                kuaishou_logger.error(f"Error: {e}")
            try:
                page3.close()
                page3.quit()
            except Exception as e:
                kuaishou_logger.error(f"Error Type: {type(e).__name__}")
                kuaishou_logger.error(f"Error: {e}")

            page1 = ChromiumPage(addr_or_opts=co)
            time.sleep(2)
            page2 = ChromiumPage(addr_or_opts=co)
            time.sleep(2)
            page3 = ChromiumPage(addr_or_opts=co)

            RECONNECT = False

        did_ex = {
            "did": did,
            "ex": int(time.time()) + 300,
        }
        DID_QUEUE.add_did_ex(did_ex)
        time.sleep(4)
    kuaishou_logger.info(f"Kuaishou爬虫did更新线程[page2]关闭")


def update_did_page3():
    global RECONNECT
    global page1
    global page2 
    global page3
    kuaishou_logger.info(f"Kuaishou爬虫did更新线程[page3]开启")
    while not STOP_THREAD:
        try:
            if RECONNECT:
                time.sleep(60)
            did = get_new_did_by_page(page3)
            print(f"page3 get a new did : {did}")
        except  Exception as e:
            kuaishou_logger.error(f"Error Type: {type(e).__name__}")
            kuaishou_logger.error(f"page3获取did时出错: {e}")   
            RECONNECT = True
            time.sleep(20)
            try:
                page1.close()
                page1.quit()
            except Exception as e:
                kuaishou_logger.error(f"Error Type: {type(e).__name__}")
                kuaishou_logger.error(f"Error: {e}")
            try:
                page2.close()
                page2.quit()
            except Exception as e:
                kuaishou_logger.error(f"Error Type: {type(e).__name__}")
                kuaishou_logger.error(f"Error: {e}")
            try:
                page3.close()
                page3.quit()
            except Exception as e:
                kuaishou_logger.error(f"Error Type: {type(e).__name__}")
                kuaishou_logger.error(f"Error: {e}")

            page1 = ChromiumPage(addr_or_opts=co)
            time.sleep(2)
            page2 = ChromiumPage(addr_or_opts=co)
            time.sleep(2)
            page3 = ChromiumPage(addr_or_opts=co)

            RECONNECT = False
             
        did_ex = {
            "did": did,
            "ex": int(time.time()) + 300,
        }
        DID_QUEUE.add_did_ex(did_ex)
        time.sleep(4)
    kuaishou_logger.info(f"Kuaishou爬虫did更新线程[page3]关闭")


def thread_update():
    global access_token
    global app_token
    global page1 
    global page2 
    global page3

    page1 = ChromiumPage(addr_or_opts=co)
    time.sleep(2)
    page2 = ChromiumPage(addr_or_opts=co)
    time.sleep(2)
    page3 = ChromiumPage(addr_or_opts=co)

    access_token = feishu_api.get_aenant_access_token(app_id,app_serect)
    app_token = feishu_api.get_app_token(access_token, node_token)

    update_did_thread = threading.Thread(target=update_did_page1,daemon=True)
    update_did_thread.start()

    update_did_thread2 = threading.Thread(target=update_did_page2,daemon=True)
    update_did_thread2.start()

    update_did_thread3 = threading.Thread(target=update_did_page3,daemon=True)
    update_did_thread3.start()

    wait_did_thread = threading.Thread(target=wait_did,daemon=True)
    wait_did_thread.start()

    thread_insert_to_trino_thread = threading.Thread(target=thread_insert_to_trino, daemon=True)
    thread_insert_to_trino_thread.start()

    feishu_token_update_thread = threading.Thread(target=update_feishu_token, daemon=True)
    feishu_token_update_thread.start()



def is_valid_did(did):
    retries = 0
    while retries < 3:
        proxy = get_random_proxy_ks()
        kuaishou_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            url = "https://www.kuaishou.com/graphql"
            json = {
                "operationName": "visionSearchPhoto",
                "query": "fragment photoContent on PhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment feedContent on Feed {\n  type\n  author {\n    id\n    name\n    headerUrl\n    following\n    headerUrls {\n      url\n      __typename\n    }\n    __typename\n  }\n  photo {\n    ...photoContent\n    ...recoPhotoFragment\n    __typename\n  }\n  canAddComment\n  llsid\n  status\n  currentPcursor\n  tags {\n    type\n    name\n    __typename\n  }\n  __typename\n}\n\nquery visionSearchPhoto($keyword: String, $pcursor: String, $searchSessionId: String, $page: String, $webPageArea: String) {\n  visionSearchPhoto(keyword: $keyword, pcursor: $pcursor, searchSessionId: $searchSessionId, page: $page, webPageArea: $webPageArea) {\n    result\n    llsid\n    webPageArea\n    feeds {\n      ...feedContent\n      __typename\n    }\n    searchSessionId\n    pcursor\n    aladdinBanner {\n      imgUrl\n      link\n      __typename\n    }\n    __typename\n  }\n}\n",
                "variables": {
                    "keyword": "英雄联盟",
                    "page": "search",
                    "pcursor": "0",

                }
            }
            headers = {
                "User-Agent": "Mozilla/5.0 (Linux; Android 13; SM-G981B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Mobile Safari/537.36",
            }

            cookies = {
                'did': did,
                'kpf': 'PC_WEB',
                'clientid': '3',
                'kpn': 'KUAISHOU_VISION',
            }

            response = requests.post(url, json=json, proxies=proxies, cookies=cookies, headers=headers, timeout=4)
            kuaishou_logger.info(f"Response status code: {response.status_code}")
            if response.json()["data"]["visionSearchPhoto"]:
                if len(response.json()["data"]["visionSearchPhoto"]["feeds"]) == 0:
                    kuaishou_logger.info(f"did: {did} 无效")
                    retries += 1
                    continue
                kuaishou_logger.info(f"did: {did} 有效")
                time.sleep(2)
                return True

        except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
            kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
            retries += 1
            time.sleep(1)  # 失败后等待1秒再重试
        except Exception as e:
            kuaishou_logger.error(f"Unexpected error: {type(e).__name__}", exc_info=True)
            print(response.json())
            kuaishou_logger.error(f"did测试失败，正在重试......")
            retries += 1
            time.sleep(1)
    kuaishou_logger.error(f"did: {did} 无效")
    return False



def get_captcha_url(did):
    retries = 0
    while retries < 3:
        proxy = get_random_proxy_ks()
        kuaishou_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            url = "https://www.kuaishou.com/graphql"
            json = {
                "operationName": "visionSearchPhoto",
                "query": "fragment photoContent on PhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment feedContent on Feed {\n  type\n  author {\n    id\n    name\n    headerUrl\n    following\n    headerUrls {\n      url\n      __typename\n    }\n    __typename\n  }\n  photo {\n    ...photoContent\n    ...recoPhotoFragment\n    __typename\n  }\n  canAddComment\n  llsid\n  status\n  currentPcursor\n  tags {\n    type\n    name\n    __typename\n  }\n  __typename\n}\n\nquery visionSearchPhoto($keyword: String, $pcursor: String, $searchSessionId: String, $page: String, $webPageArea: String) {\n  visionSearchPhoto(keyword: $keyword, pcursor: $pcursor, searchSessionId: $searchSessionId, page: $page, webPageArea: $webPageArea) {\n    result\n    llsid\n    webPageArea\n    feeds {\n      ...feedContent\n      __typename\n    }\n    searchSessionId\n    pcursor\n    aladdinBanner {\n      imgUrl\n      link\n      __typename\n    }\n    __typename\n  }\n}\n",
                "variables": {
                    "keyword": "英雄联盟",
                    "page": "search",
                    "pcursor": "0",
                }
            }
            headers = {
                "User-Agent": "Mozilla/5.0 (Linux; Android 13; SM-G981B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Mobile Safari/537.36",
            }

            cookies = {
                'did': did,
                'kpf': 'PC_WEB',
                'clientid': '3',
                'kpn': 'KUAISHOU_VISION',
            }

            response = requests.post(url, json=json, proxies=proxies, cookies=cookies, headers=headers, timeout=4)
            print(response.json())
            return response.json()["data"]["url"]

        except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
            kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
            retries += 1
            time.sleep(1)  # 失败后等待1秒再重试
    return ""


def extract_captcha_session(url):
    """
    从给定的URL中提取captchaSession参数

    参数:
        url (str): 包含captchaSession的URL字符串

    返回:
        str: 提取到的captchaSession值，如果未找到则返回None
    """
    try:
        # 解析URL
        parsed_url = urlparse(url)
        # 获取查询参数部分
        query_params = parse_qs(parsed_url.query)
        # 获取captchaSession参数
        captcha_session = query_params.get('captchaSession', [None])[0]
        return captcha_session
    except Exception as e:
        print(f"提取captchaSession时出错: {e}")
        return None


def get_captcha_config(did):
    captcha_url = get_captcha_url(did)
    print(captcha_url)
    captcha_session = extract_captcha_session(captcha_url)
    print(captcha_session)

    retries = 0
    while retries < 3:
        proxy = get_random_proxy_ks()
        kuaishou_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            url = "https://captcha.zt.kuaishou.com/rest/zt/captcha/sliding/config"
            data = {
                "captchaSession": captcha_session,
            }
            headers = {
                "User-Agent": "Mozilla/5.0 (Linux; Android 13; SM-G981B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Mobile Safari/537.36",
            }
            cookies = {
                'did': did,
                'kpf': 'PC_WEB',
                'clientid': '3',
                'kpn': 'KUAISHOU_VISION',
            }
            response = requests.post(url, data=data, proxies=proxies, cookies=cookies, headers=headers, timeout=4)
            print(response.json())
            return response.json()
        except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
            kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
            retries += 1
            time.sleep(1)

def slide_did(verifyParam):
    retries = 0
    while retries < 3:
        proxy = get_random_proxy_ks()
        kuaishou_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            url = "https://captcha.zt.kuaishou.com/rest/zt/captcha/sliding/kSecretApiVerify"
            json = {"verifyParam":"3sCt3iAAAAAAAAAAAAAAAwEQBjoIdCECHA8N76gZAABEDYcfnH++Zl0UHFi8w4KsylFni3n/IM0Nj1pBmG34p1j8eK6Pr7g3ffp1d8Ptyny/0avKDo/xvxqVKtaGjJmUxN4f+nw3MrTe33hoa6R3JqayhJm4yt6hve6Gll0VrKaMxGCuxJ0tgcFMZ4fDzlOqSikIvcK9pttMS/XhwsWVm6zWiCqJaNKrdpHtqbVnq0/KVZSYbYYWg7QOqkpqcLfDSZOnnOoxs4lparykKgChtrZ8wTDIm0IfjZg/vVylj8irwoC7+ObBco33a4KilZapTR5FtKEAS4tCfFNHMSBLpwFdiaqtgqOOhdkUk4yHeK6jybqut6CmWWe9Ur7Lo7vItgfE77/nQrZQAf0HEbeRvJ+fwY9o1rvT/lGAh4Z4mRGwV7xQbKhTSP78y7tf5qbdvk3JxJ+V7Iez6OflboqG0K+fzE6DhIWdniS/pgLXmM2d1Y3zKaev0UijkIS3kJmOZoBFvA6QlpnKr6pxvAq2zaHhyrKz7ybYkBVajXGsyChnyH5eVhHjL7/BpXCOjLhFEhuk3ca8tfjFpBy5Y5Kk9YhWn6yhIf66u3uwPA3W8RGAXZqaJ/iIw/THaaOy2obIsHqGr1IUjBbobYS+w9KUn8ocUa6P6FbXhLZYZkioso69eRDrjcLzrvu6tabIRKfeHVyOeL6sa9JwqJbiQdyLFJ6Rt/BecUCQnA9NWEhKzdC8HAt5sK2YEHNLFvQT0sioDlMjVpe8qZeApodarvnpXvh3UltXrMLPIoXN4hnLqBGOh5vpl//A0o7fDZWOkfGQkyRdXvQpLRLbjCj31tj4C7nBOg7gF8XZ69MxgL3vi9rZvs7fDRLK3B7E/P5aFMe+7r7Dyd3KwYnMADrSpclqwmCcC9bam/5azgnIW5AUVRqM3c3LDwDLjAfS2sGMCtfAHLzLZN4R+72GyNgelNsHWh+l3P7OzMql/gpC2bRpxEou9AplEtGJuKgs7giwS9jOD4pIxw/ADbgDy4LFpagCEayl0w7twkQlwqkZhI7BV/rvdA3ET7r4DsZ6H9v0Lsb7xNyDyf/WkbWuSlquZHDR0hVqCxvC2Nh/+Nreqa3UKQQfyY/S3ibDyK3V7P6uvNbOGjzLZccDz8LYr83sQsyM6M8Zy/18AUXaGk5kLA/YoZ+uHsPzwT3jzvfZ6CheD96ECPxtzRBAsfvPzezYmIbv1vw4Ho7Fvt6P0/QEG96uHK7/zLX9xk3P3L5bpl77yrwIRp/AigPSw8PZ2Ajg0NnruP3D2+h1zPzL0gue+87N9qTl/Bf/m5nGDtHazbwTqwfo0LzOVdxIJs7H/MvFusLPxLPsHe/92spezvfCws2NmsWiVgS8w936wL4sz8vSKbHJ7YrDxGQv3TfTnMvdyRabesvlzunsqMfV0viMHqYPwtXl2m7EQ+p4An/L0b/N6/vABWmarYjTzj4u/cIKtfyEKNh8igbq2P6bjP4E2BGxy0sJHky37vv8PNBWVt/WzZXf6eLaqXXCySAWzLHjz/bLusTGzkyN0OYMhNeoDsKo8+2sjM3oSFSkyBUPfKT83tsCLr4O/spK35+M9hxa+bicCT7ueunBAFAtkNrKpbvJ/8JN+8h1ywTVA/lmLO4InPvZ3MvaDDqBDsWWzdbjDQwcULwMCtPEbAzJJZnsuvkOL8pbjt/v88WNzGbFSc2m48rZuqi0Lc7Txb5KTSwX/v/T0yoc2LhII8AeWAHcgdzpsAnL7MLFnc/62sHYbEqWSA29yULWNsC81Mg2bq3N+8tY37HSjq7We7oI6NmCfcxOcE4N/cz4hP/aqZ2nCxqoiC7NDpP97M/+V933yKT0ysrOXOze/HvJtEjpTzrYHi/mWdDo7C7Dy8110NgfwpAv+sfK7n/B4oXZ/XwKoY33xLH4pnvOGMYmSfmsr9EPyLvpsAoJvvjaAtdCQ/+MlPFQwbzNritJ6cOvysKT3FU67nvLFgScj/9/0LBK6dDWzZzR1sjSK4jO+NLQiALg1tncrCBACfxIb+JNw+6M0QPu8Ae+ANqh/Mm8T8gKS6xNCyASf+qjhgv6qaxuHELOdfsQ9OvVNMDBLT6sd6paGi6/7ujb2sudyt8b2s24fdXZzfzPgMLKFT3I04r7/DRt7dvN35vVFjINp8CA69wObMIg6Oxk7sXN6gcm7OoqU8uwTpU+GHZTFn8aC3wA8vwK+KnWKLvOcMHMwrjB1O5Vq8oJmsIU3BguEPgI0klnxEekz8WY/tU8WLX6osACF7/NqRXKGcG0gPjWe9pHlVvTrZyAFuZBhHTdzUDYOAqqC8r4uiAgDtpMil4MyhCRzCqD7CvY+crGvMysGGjp4Y3+jC2YzbwYFlKeDnDOzN4LsMPD79mhd4TX11fIjCbLwA2n0siDGhu1xKfJw+4478VSw5zSefrzwLoHDck2BL3+XNgOf9Hf0wv5elarFNPr/+gPPAuVzOfu6uWaw6joXARkVvzeBGQa7ywB2I3TFsTf6NrKfVPQj9kM1ln+nhTVX7vhtcar3B3drw4ACcVex/7Y6gKt2AfaQbBRB9ITzX3zpfrMLJzV1ZYptc+U3B/BliNbBhDYUdTZesiMSO3tssG8Hu/Dwoqmzu0A0H4b6cYKCI2L2f7Z0CQP/UDEuMVuTcIsjdzKCNoApaYLCMA6CPjPvb5u06SFVQAiuMW8wNXOkZFIknH7Vk2oHVhZDuvKv9wbDWi+yZKCy7mt3WKXlL5jyEvV2P919v381KeisJOpvE8Y1cncaHhqR0Fpqt0t9F3B/UUSCXhbyekQQrRXv/xJNqWXCI76lRr+1boPrs+tlI2XWYKQyv0P08UFP4KJWJpD+27cFQsPsoyxK9upaYQYg52VPK3Gob5Bs1m+mO9/1N2SDs8Xrr1fAN5errHeGA/9w97JTHUIRUIafYiWD6kK2X/EngyMlZyZ+nzEGsz8ma6fwEutjN1zh4eiorxRD1no3EzB1sqn2vktLlC4vLKaHhVPdZiFyOr7DujE3pJdiZur2O8G3sFBTCB3EqycbpKOkx4MhpLB0p6W7V+4RlZQcUr/lbqH38sN+8pf18sGg6KOwvciktp+00zS/5fGzdvh1pWkksPe/I8yAe71v6YcL86BudwE/yoNiN7C3M+zW4aL8kjnhN2v6YzjcaQQjusjGgDCHH0C3kS70JO4q5+yJq0cIQkNVdGXwtrN0Oru2TvdC1exrYB43kkwSe1XpdawrMDPqf36r/Jd+h6iDg0NqtO9TJSJnNvbv/zOyNc2jON2XN2SmJf+4JjqynLr/C0vmTzBy4pWW5g9Q9fO8dKW0+LIVt0verad/t6Kxdbn2Z+wqddKw9XryNT/yAsswkG3gfu+WAqBasTomuAvytXVE5ORD8LQ/cIdI/zT1spXKRntVxZ9B40a+C/7od+8z+qR/fyQrALBDMyCZM24rN/6m7SeduEGtlyMGv2rthpM2Vdvw87e6s6vjJfHBVgoq5nvu7nb/rjtRUkxT8dNywrv//agwbJ3b//fXuWeQkapU55yEgD7GViqkaQDuiXH2lXvb5OdhZSAXW5LVKbIs12cDayy8fC2WNbJgb8Cxqbfys/HvdDWw4mbgN+OgJZFqNFJh33oEhn5YfcTl8S5HpzL493w7eDX2kEO799eSZb9jw3+GkGRkXrez9XaVLPaBcdGqrQZzLXXOhTo6c913A0F0N5SnuuOWlv5qw8/2erqCysMtOK8nI7HBIGNvBrcz3yHitqUB7i4UYdbyw7D/7I63uUcyBXrQ83cuclMiqP+o1UBlfTryy0WLNOSQKzTl7GkiY12FxgHBdpLXPoFJY6WYFKkjvYRjHKVg8EO2/6yG+n5L3iVgJfOCwU6y7XXxdk51p3d7LRRquzWzscWKIwYBMeQhoCUQ/jO/tzFegDE+1uevH5GvXSOon7pVkqFjIzA1Sz4LbT6/e+vU4qYOEoFazbO9dvtLJODkbNdV5EuVVe7ulsTwITaKPIWj9+CnZXcqBZL7sLXuejBwIHjUl6qpLOPlLm4o7uau73NQ83euPmyut0NePuQ4FLZ6Q02DTrZzey5mPaxZZoJQZddfIoPDsLs/RLv6Pz/XPqHg1WZ6+0BPCiKcv0euV+Gi5y75sP9B7rMc8AMwUwyzSzC7W/LzM2FsoGUT6Sd6s//Fn2tmsTds47MR1qtV/8r3oLPriuekM4FhgyfiIjZgoTU/y8d+jxq3NvvR9fO3u7E6wSCv4herJOVonc+FN7pyHZC/czQyM+17+/Ey8DU1RYFfAgN/M1aRqnfo4q8qRbKwZW+1PfZpMjACxgJqzzFjff7VIRbsVyUsqqPjxbWif/WzLjCgOzqXfbN+zItqNf73ZSH0oGYhaiHkpdE+rWJvNZbp0GyQb9+5Uy34DvW7dTCb864/gqovlfiyQHExISf1uvFShALz9tPmCgfrlOEQsfsrf7eX73ujymYTZlRHIAhiYaZX0HI/bv8WsolrvvKX5BFy+pFjAAFAuLQXajF3ensyFILqfgceg+czJeYwzoN1t5Mrfns/ISasIFEHGMCH0tzDHdYUlVj4RSRg7IDXgv52sB/vNfZS9bKj9788voUrJ637Q/N2tvv5oCCLK1RC9j8yZjbd4dI1u0FuE9kwgDIupizbVqUYp4GhpwTNgUk2cB6AsGN18X5+A+S1ZDdylaZ+xO8q1Cf9pec7t6tli7B2HVUn2fT0vzpWBSY+88VcB+ViVuHuqrGxriPgrGWBrdBpvCbNftsBKkWTjZJ3cEOIcWHBdGB6cJ5C64Uy8b/lLkoUsWFW1XvUpycCPGFDpzC+98aLJoI1JWEVFOErqoziE67Adbe3QTaSIvXwM8K1+T6wgTegu/4y3r+GCACpdf8fkUxm5//CFmT3eee4XnXgaTbrbwa/Qy4Xl2SkX5urFaMuJ6wqsxhr22MdcccAU1abF7o2cXCRtLvwBzv2VsrirAQq7rdAw2hY8/OZe2pKQvXzM6tX47brcja4dW+7UmOiry50Oi/VrzfjhzF6gxf29Wr+grYARAKAlENfteCAIyY/InN3SKfeerVwwWCpc9O3xqbwtTtHM7wvA6bjqoG3Lr7z+mdBnScn4XJRKOdKsPJJMKAcvjH/OjIUcOBiOqEm4tSj9hdcQLB5/rQn82dFdPlluVenQPLhcOo1RINfdCle4Ooya76o4NO+hzE+4pyQ+0BAlfE+IQFwcVWVwqr15FG+WrOHkuQR778Cv5K3uNPii2NHN3/V8Ce6lTkr4qI/7AlGLma/UG3nqOMlrdI2pEOzO3YwDLaLsFOVCsKnI8G/SrtncfE9NVeqgDM7ZfcR1eL70tZlY/IqLX42MDeLXXo+/t6nZS4g7aETzgCyczH1/p4wuipx9xLw9CNXM/YDA7Lccvu0uwricNWeFeK/f8OjCjaiPyQaf/+1/6f3g3Aj5Rv3LQIgRl2owpajW2FhIGDpt3X1OXWTKbDyXsir97NzFCIkxYSmumYxk2BlpD8hpad3vuWkp+LmbvLjfTZ7wFcK4wIF0WBdpdPe1amhz1XGcGH2q2y/ATeokLN1a6rEJrUeo4oQs4vgXq64R+cjnmXg0WFg4yszQrfwOsP0IF9y4XdSLT13pJ1jqNJn6ueq0ebu7Ds2VZGVkPJ/w/Irtn17t9OrdZ6aSjQBqwb+yqP7L+/77HImFWcuAi938qt4Xegld/8y1hOm5Ycnr1a/FGYt0Ib/x7CzK6/z+XKB5zdqMtfCpHOCNrcwCvmBsnuRctflqJOyry1egXGRALaAcUATOjpweybXgiVuJO64YFeXE/pfgxzyOyJFMXiDZ1Zu6lcyZ6LA3TWmilKvkD/DfkK/Y1Jq6B4Gq6JlhTByn2P+en/8aiiAvvn74rXpZZLCYu2CSGAylnpwplR+8SDLser/SrOvZGOTJ0PhEbUp/8oHdDJJPwsz5vgrOodqxGuUsa76azY3O5GWYdqDOcL7fxC+MX7uo6Oel8BlryeVdvPxxheUpnO6LaBg1RfQ33rwXTT2S7cD5zBpS+CCVeW/te/Vt3Oia6m6gj+0KT76Oda56Apw8HJcxpNm9IBf6p6BjjBAwYv39cPZJ7Myl7LypxnjwW8KdubVBeY6OyhLSz91wrIh/8cF23W/wPYtsu2fwPLisvO15+AhbIR6dsoA5hYEfr+xhjU1UW7hZomM73aDvVo3pLC1stvsYSO+h+uyk303chYAhWq169aDXy9hlj9jJBSB38Aw1vC/9PLzIrxednU4w2cCMmGxw/by05+wMv9fMIvXZCqqukaJOVaWDY8BQzO1szIVs/ZrtglDOoN/dHV/c2SS8gZhOkyDZJB1QFG8RP6aVuo/vL+td+t1ZAci/s83S0yyCfqywSJqlCd0AHJzPjpJLvB7rReKs7s+NVLxOrMwIFb3JH43YGkU4JnzAjVNkiT2AzOzTmoumjbwViBl9bqTMHqDHzSBYb+DRW+fN64k55P/9oFy8IFulXPwavp1LDCDS2eUuXv6ontFdvNqlzJTM8aBXAYAfyWM8XWTsvpazvMt5ObFJrXfOwN/5WagMaO2gnOmij2/PnO+Litnh4JRaSCT+5jEADEk8pm+csnzuuP1sD5cJmil9nO/eSKmHfqwPjJzsPPL//vw95UzfzZcAytK3ebjkWRP8scw8GYJhndmPKOzQ2ZsErqy/0V2NRe+Axe2sCtVLJehtTrW8DNVky4/NOv6gTFz3Iujs/60C2/1H3WjKiyLt4LuBgC6e88SR0ogq+EuhfY7MHXiJ0Zy4gWhVUXsIqo8FuLDzzsdfgLuUJs+ZadSkkUaIvCGPz5HA57w/1XK0ONdEJ7089cyu3/6X3e/9vvWNGeAfzhXlg7G92m1vsJJKmqAf8bnRbrwIzeB9/LzQfqypS908LA/VTmCNLtDBQIy8ieFCi6K3pWoKre2CaqFtvhixfLqevNrDPvBZysVM1f2hUBXkSy69OleKslg07C7QyZQcKc/fzoyZbCAasU+f+lygvp/KzByk4O/dGM3NSU28Y8/Rbb+msaYKgf68GYnspNwRpA39/tpNvqWzgrwfnWy3HG3e/AsPOumitfy8/roWiTj9f73quO/uyMzVH8Hs/XNnoi1s0a393OtvKHydxOz/62LAwcBI3sBsEuq77LGI1cyuxJSUz9njwBqQ7KBzfiKxoBnrTc3BV0FqgxwaXLxvJLJMrDmtoApHZN/bR2R9TQ7t0F9i+eAtKv0/z//PPJ1caHSf610CoL22gSxj79rj7OGslxm+WWi5hNqsg7wNbZ+qHbyw3KCZbUCpAs2H5excIefdIIvb5BBYNVQe+fiXnIQn393g5s889B49oovkPTV+6s4f/KBZYQGljU3hoq3v3P0Khzw9i57M6sngJOwQjY3vvX+lPb5Ds76cV0nIDVK5jbIdmt/hP5sfzqm1fVfOxc1JIb3hSPwsfdFuxm2Me+wn8H3Z7Urdc8DaXEJ8Miw9260N3aq1TRzbr/XY1rgoscvbpX3vztVuta7qzE3BzE1c/L5JgC59nE+El9496Ay3eWrMPorYmE2alTu3LAu66lVs2k+l0doUzNp8geFKTGyin/t8SPk+7WVNnQDuoOvO+B+48MVJvV2fzem8s8W8HOxdvLcI+f/7YIw6gRy8Gs86LrwY4fpgmupaq52oyHvLyPGgDNyYExw/zVCtvNdELwHN3+Js8Dlv2migob/r267ev+mZaLi4lS6bLA6lX5FuyN1kvA09HZJAQ8FXAIB5eJs82vyB7bi9zyx/qNjcNV/o0OC1vp28EM3+yk+g0khNmsv/hKVXDCyUatuqlt287oKT+0zIP+nY8NUu9Inppr08z43AVkG8kuzt/HMCfJ/tBWBf/uE+yaEtjqVKXTxhYNf+sLc58HpQHOst7NXMyMu8nrI9/7sIvOVX3KwejUkh6Oy9XZrL3iF+oJcEtfgiqGKVPrSMXUuC6KGhXO4lZIvsi63zH/xQvg7CWwycoD213CTlwOpE+/k7lXjqpe7B/dFs1el63M1B06zyYuusCMz8jPwtosvILV29ScYE/C7L+W15ZWmFS+BWzM+RfosNhMv+nLDPKCR8uWOkus3gB2PCw1a6b+nuzHv4BJzaXK6cfFws160OU7/V/+zFdDjJqVTJpvh5zz1fjWy1pEjBrJG9bCToje2n+2L/jF+9zOtE3d3BYAtsEPA8vJ5fYptturb7n9AIW2pgzm6AGE2EouEtvd0cOzOh+PnMvUuEBOBl4Dzpc6zawb3o1SzLs6KYnD5cFUvIIf1QLJ1Rru3oP5tNP/zutGnp1TnNvI29kt6CiOC/3LBxfLvdTCXLjVxtHazgsMtO/Q66hT/vvLHr7oldxtXJVBmleYT6wG281V3O+S3MsNqB/D4TTX0ku63srfzx5pisnV2aV+QA/I/RTehYvW/S5A1P8Qnp/fjNDHpNvseFnIXMpd7qlhXA6toKvbwfvLAI2+wG3Pj//HvAwa1yTuGkr8FsWdgvk7xXTUyl0WXUzAGmsWLRycLM2MfDAuquiozcPcs8PZv17JuNvIdtz9cNDXyxRcH+QqnAvJulx+1/zfqQl1lVeZBN7oBJ8bwclJcNRF28P6xqjH0oN3ga8XVQnKzev5y3HcK+WUV6IKzfhJzLzP5rAP63ndDVze0Ocu1YqYQnicxf2T/IZ1y+zd9syr/7peQgsIwYXZ2muMEavsp4IGj/vKz11YJ55/w+1/i8nUjouV/B7YLoHALno33Yy9mEsWJNelk4rsibvpXotBWAS8b8vLWZIeL8DWwOnIHZPXFvu3"}
            headers = {
                "User-Agent": "Mozilla/5.0 (Linux; Android 13; SM-G981B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Mobile Safari/537.36",
            }
            # cookies = {
            #     'did': did,
            #     'kpf': 'PC_WEB',
            #     'clientid': '3',
            #     'kpn': 'KUAISHOU_VISION',
            # }
            response = requests.post(url, json=json, proxies=proxies,  headers=headers, timeout=4)
            print(response.json())
            return response.json()
        except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
            kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
            retries += 1
            time.sleep(1)



THIS_UPDATE_CREATORS_KS = []
THIS_UPDATE_KEYWORD_NUM_KS = 0
THIS_UPDATE_CREATOR_NUM_KS = 0
THIS_UPDATE_VIDEO_NUM_KS = 0
THIS_UPDATE_KEYWORDS = []
DETAIL_DATA = []
DATA_LOCK = threading.Lock()
INSERT_LOCK = threading.Lock()
def thread_insert_to_trino():
    kuaishou_logger.info("后台上传数据库线程启动")
    while not STOP_THREAD:
        with DATA_LOCK:
            detail = DETAIL_DATA.copy()
            DETAIL_DATA.clear()
        with INSERT_LOCK:
            if len(detail) > 0:
                print(len(detail))
            for item in detail:
                try:
                    with INSERT_TRINO_LOCK: 
                        kuaishou_logger.info(f"第{item['index']}个作者 正在上传数据库......")
                        user_detail = item["user_detail"]
                        video_details = item["video_details"]
                        register_time = video_details[-1]["create_time"]
                        user_detail["register_time"] = register_time

                        if user_detail == {}:
                            continue
                        kuaishou_logger.info(f"第{item['index']}个作者 正在上传用户详细......")
                        insert_kuaishou_user_detail(user_detail)

                        if video_details == []:
                            continue
                        kuaishou_logger.info(f"第{item['index']}个作者 正在上传视频详细......")
                        insert_kuaishou_video_details(video_details, user_detail)
                        kuaishou_logger.info(f"第{item['index']}个作者 上传完成")
                except  Exception as e:
                    kuaishou_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
                    continue
    kuaishou_logger.info("上传数据库线程关闭")





def get_authors_kuaishou(authors):
    need_author_list = []
    not_need_author_list = []
    creator_id_list = get_creators_by_dt_media("kuaishou")
    for author in authors:
        if str(author["kuaishou_id"]) in creator_id_list:
            not_need_author_list.append(author)
            continue
        need_author_list.append(author)
    return need_author_list,not_need_author_list

def task_keyword_list_kuaishou_full_update():
    global THIS_UPDATE_KEYWORD_NUM_KS
    global THIS_UPDATE_CREATOR_NUM_KS
    global THIS_UPDATE_VIDEO_NUM_KS
    global THIS_UPDATE_CREATORS_KS
    global THIS_UPDATE_KEYWORDS 
    start = 2
    end = 100
    keyword_list = feishu_api.get_keyword_list_by_range_kuaishou(access_token, app_token, sheet_keyword_list_kuaishou, start, end)
    if len(keyword_list) == 0:
        kuaishou_logger.info("没有需要查询的关键词")
        return
    while len(keyword_list) > 0:
        print(keyword_list)
        for row in keyword_list:
            if DID_QUEUE.get_size() < 100:
                time.sleep(800)

            keyword = row["keyword"]
            THIS_UPDATE_KEYWORD_NUM_KS += 1
            THIS_UPDATE_KEYWORDS.append(keyword)
            kuaishou_logger.info(f"正在查询: {keyword}")
            author_list = get_top100_authors_by_keyword(keyword)
            kuaishou_logger.info(f"查询到 {len(author_list)} 个作者")
            need_author_list,not_need_author_list = get_authors_kuaishou(author_list)
            kuaishou_logger.info(f"需要查询 {len(need_author_list)} 个作者")
            print(f"Need:{need_author_list}")
            print(f"Not Need:{not_need_author_list}")

            index = 1
            for author in not_need_author_list:
                kuaishou_id = author["kuaishou_id"]
                kuaishou_logger.info(f"正在更新作者标签{index}/{len(not_need_author_list)}，kuaishou_id:{kuaishou_id}")
                index += 1
                tags = get_tags_by_id_and_media(kuaishou_id, "kuaishou")
                if keyword in tags:
                    continue
                tags.append(keyword)
                update_tags_by_id_and_media(tags,kuaishou_id,"kuaishou")

            index = 1
            for author in need_author_list:
                author['index'] = index
                index = index + 1

            with ThreadPoolExecutor(max_workers=5) as executor:
                futures = [executor.submit(task_author, author,len(need_author_list),keyword) for author in need_author_list]
                
            start_time = time.time()
            timeout = 20 * 60

            while DETAIL_DATA != [] or INSERT_LOCK.locked():
                current_time = time.time()
                elapsed_time = current_time - start_time
                    
                # 检查是否超时
                if elapsed_time > timeout:
                    kuaishou_logger.warning(f"等待数据库上传超过20分钟，强制结束等待")
                    break
                kuaishou_logger.info(f"关键词'{keyword}'的{len(need_author_list)}个作者查询完毕，正在上传数据库......")
                time.sleep(5)

            kuaishou_logger.info(f"Keyword : {keyword} Done.")
            feishu_api.update_time_by_rowId(access_token, app_token, sheet_keyword_list_kuaishou, row["row_id"], THIS_UPDATE_TIME)
        start = end + 1
        end = end + 100
        keyword_list = feishu_api.get_key_list_by_range(access_token, app_token, sheet_keyword_list_kuaishou, start, end)


def task_author(author : dict,sum,keyword):
    global THIS_UPDATE_KEYWORD_NUM_KS
    global THIS_UPDATE_CREATOR_NUM_KS
    global THIS_UPDATE_VIDEO_NUM_KS
    global THIS_UPDATE_CREATORS_KS
    global DETAIL_DATA

    did = DID_QUEUE.pop_did()
    time.sleep(1.8)
    kuaishou_id = author["kuaishou_id"]
    kuaishou_logger.info(f"正在查询第{author['index']}个作者...... {author['index']}/{sum}")

    THIS_UPDATE_CREATOR_NUM_KS += 1
    THIS_UPDATE_CREATORS_KS.append(kuaishou_id)

    kuaishou_logger.info(f"Starting to get user detail,kuaishou_id:{kuaishou_id}")
    user_detail = get_user_info_by_kuaishou_id(kuaishou_id,did)
    user_detail["tags"] = str([keyword])
    if user_detail == {}:
        kuaishou_logger.info(f"用户信息查询失败，kuaishou_id:{kuaishou_id}")
        return {},[]
    kuaishou_logger.info(f"用户信息查询完毕，kuaishou_id:{kuaishou_id}")

    video_details = get_posts_by_kuaishou_id(kuaishou_id,did)
    if video_details == []:
        kuaishou_logger.info(f"没有视频，kuaishou_id:{kuaishou_id}")
        return {},[]
    kuaishou_logger.info(f"视频列表查询完毕，kuaishou_id:{kuaishou_id}")
    THIS_UPDATE_VIDEO_NUM_KS += len(video_details)

    with DATA_LOCK:
        kuaishou_logger.info(f"{kuaishou_id}等待上传......")
        data_detail = {
            "index" : author['index'],
            "user_detail": user_detail,
            "video_details": video_details
        }
        DETAIL_DATA.append(data_detail)
        print(f"data:{len(DETAIL_DATA)}")


def get_first_post_by_kuaishou_id(kuaishou_id):
    did = get_new_did()
    is_valid_did(did)
    retries = 0 
    while retries < 3:
        proxy = get_random_proxy_ks()
        kuaishou_logger.info(f"Using proxy: {proxy}")
        try:
            proxy_url = f"http://{proxy['ip']}:{proxy['port']}"
            proxies = {'http': proxy_url, 'https': proxy_url}
            url = "https://www.kuaishou.com/graphql"
            json = {
                "operationName": "visionProfilePhotoList",
                  "query": "fragment photoContent on PhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n  __typename\n  id\n  duration\n  caption\n  originCaption\n  likeCount\n  viewCount\n  commentCount\n  realLikeCount\n  coverUrl\n  photoUrl\n  photoH265Url\n  manifest\n  manifestH265\n  videoResource\n  coverUrls {\n    url\n    __typename\n  }\n  timestamp\n  expTag\n  animatedCoverUrl\n  distance\n  videoRatio\n  liked\n  stereoType\n  profileUserTopPhoto\n  musicBlocked\n  riskTagContent\n  riskTagUrl\n}\n\nfragment feedContentWithLiveInfo on Feed {\n  type\n  author {\n    id\n    name\n    headerUrl\n    following\n    livingInfo\n    headerUrls {\n      url\n      __typename\n    }\n    __typename\n  }\n  photo {\n    ...photoContent\n    ...recoPhotoFragment\n    __typename\n  }\n  canAddComment\n  llsid\n  status\n  currentPcursor\n  tags {\n    type\n    name\n    __typename\n  }\n  __typename\n}\n\nquery visionProfilePhotoList($pcursor: String, $userId: String, $page: String, $webPageArea: String) {\n  visionProfilePhotoList(pcursor: $pcursor, userId: $userId, page: $page, webPageArea: $webPageArea) {\n    result\n    llsid\n    webPageArea\n    feeds {\n      ...feedContentWithLiveInfo\n      __typename\n    }\n    hostName\n    pcursor\n    __typename\n  }\n}\n",
                "variables": {
                    "userId": kuaishou_id,
                    "page": "profile",
                    "pcursor": "",
                }
            }
            
            headers = {
                   "User-Agent": "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Mobile Safari/537.36 Edg/134.0.0.0",
                   "Origin": "https://www.kuaishou.com",
                   "Referer": f"https://www.kuaishou.com/profile/{kuaishou_id}",
            }

            cookies = {
                'kpf': 'PC_WEB',
                'clientid': '3',
                 'kpn': 'KUAISHOU_VISION',
                'kwpsecproductname': 'PCLive',
                 'kwfv1': 'PnGU+9+Y8008S+nH0U+0mjPf8fP08f+98f+nLlwnrIP9P9G98YPf8jPBQSweS0+nr9G0mD8B+fP/L98/qlPe4f8ecM8ecA+/pYPn+0G9Hl+9P7P/8jw/zDwBcEwnGMwePA8/SS8/8jw/ql+fLEPeGhw/r9+ApDP0SfPnHhGnbYPnG=',
                 '_bl_uid': '8tm4OaUgu08wn5gaFkXtq3hlqbdn',
                 'ktrace-context': '1|MS43NjQ1ODM2OTgyODY2OTgyLjIyMTcyNDgzLjE3NDc3MjM0ODY3ODkuMTQ3MjI4OQ==|MS43NjQ1ODM2OTgyODY2OTgyLjUzNzczMTM2LjE3NDc3MjM0ODY3ODkuMTQ3MjI5MA==|0|graphql-server|webservice|false|NA'
             }    
            
            cookies['did'] = did
            print(f"did:{cookies['did']}")
            response = requests.post(url, json=json, proxies=proxies, cookies=cookies, headers=headers, timeout=4)
            kuaishou_logger.info(f"Response status code: {response.status_code}")
            add_did_to_cd(cookies['did'])
            if len(response.json()["data"]["visionProfilePhotoList"]["feeds"]) != 0:
                return response.json()["data"]["visionProfilePhotoList"]["feeds"][0]
        except  (ProxyError, ConnectionError, Timeout, SSLError) as e:
            kuaishou_logger.warning(f"Proxy connection failed ({type(e).__name__})")
            retries += 1
        except Exception as e:
            kuaishou_logger.error(f"Unexpected error: {str(e)}", exc_info=True)
            print(response.json())
            add_did_to_cd(cookies['did'])
            retries += 1
            time.sleep(1)
    return {}


def is_more_than_180_days(time_str, time_format="%Y-%m-%d %H:%M:%S"):
    """
    判断给定时间字符串与当前时间的间隔是否大于180天
    """
    try:
        # 将字符串转换为datetime对象
        given_time = datetime.strptime(time_str, time_format)

        # 获取当前时间
        current_time = datetime.now()

        # 计算时间差
        time_difference = current_time - given_time

        # 判断是否大于180天
        return time_difference > timedelta(days=180)
    except ValueError as e:
        print(f"时间字符串格式错误: {e}")
        return False

def is_valid_user(kuaishou_id):
    first_post = get_first_post_by_kuaishou_id(kuaishou_id)
    if first_post == {}:
        return False
    first_post_create_time = ms_timestamp_to_datetime(first_post["photo"]["timestamp"])
    if not is_more_than_180_days(first_post_create_time):
        return True
    kuaishou_logger.info(f"User {kuaishou_id} is more than 180 days, skipping...")
    return False
    

GET_DID_LOCK = threading.Lock()
def get_new_did_v2(): 
    if GET_DID_LOCK.locked():
        time.sleep(5)   
    with GET_DID_LOCK:
        did = get_new_did()    
    time.sleep(5)
    return did
        
def task_kuaishou(now):
    global STOP_THREAD
    try:
        STOP_THREAD = False
        thread_update()
        task_keyword_list_kuaishou_full_update()
        insert_success_log_by_media("kuaishou")
        message = f"✅ Kuaishou脚本执行完成\n开始时间：{THIS_UPDATE_TIME}\n当前时间：{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n本次执行更新了{THIS_UPDATE_KEYWORD_NUM_KS}个关键词:{THIS_UPDATE_KEYWORDS}\n本次执行更新了{THIS_UPDATE_CREATOR_NUM_KS}个作者，{THIS_UPDATE_VIDEO_NUM_KS}个作品\n耗时：{time.perf_counter() - now}秒"
        print(message)
        feishu_api.sendToFeishu(message)
    finally:
        STOP_THREAD = True
        time.sleep(20)


if __name__ == "__main__":
    task_kuaishou(time.perf_counter())
