import datetime
import requests
import time
import random
from loguru import logger
from work.search_site_list import get_site_id

def get_10_keys_word(company_info, user):
    headers = {
        "accept": "application/json, text/plain, */*",
        "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
        "baggage": "sentry-environment=production,sentry-public_key=9b4a6569c04148aa9ab275932e8b1961,sentry-trace_id=6d3c87b36e644447aa41b966f7c05994",
        "cache-control": "no-cache",
        "content-type": "application/x-www-form-urlencoded",
        "origin": "https://online.seranking.com",
        "pragma": "no-cache",
        "priority": "u=1, i",
        "referer": "https://online.seranking.com/admin.site.rankings.site_id-7535981.html?ref=left_menu_clicked",
        "sec-ch-ua": "\"Google Chrome\";v=\"125\", \"Chromium\";v=\"125\", \"Not.A/Brand\";v=\"24\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "sentry-trace": "6d3c87b36e644447aa41b966f7c05994-883ed211bb97959d",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
        "x-requested-with": "XMLHttpRequest"
    }
    cookies = user['cookies']
    matching_project = get_site_id(company_info=company_info, user=user)

    current_date = datetime.datetime.now()
    current_date_str = current_date.strftime("%Y-%m-%d")
    yesterday_date = current_date - datetime.timedelta(days=1)
    yesterday_str = yesterday_date.strftime("%Y-%m-%d")
    url = "https://online.seranking.com/api.projects.site.positions.html"
    params = {
        "do": "keywordsList",
        "site_id": matching_project['site_id'],
        "columns[]": "created_at",
        "keywords_count_display": "100",
        "page": "1",
        "group_by": "list",
        "sort_by": "name",
        "sort_mode": "1",
        "is_paid": "0",
        "columns_group_mode": "day",
        "site_se_id": matching_project['site_se_id'],
        "list_type": "in_top10",
        "report_period_from": yesterday_str,
        "report_period_to": current_date_str
    }
    retry_count = 0
    while retry_count < 3:
        try:
            res = requests.get(url, params=params, headers=headers, cookies=cookies, timeout=5)
        except Exception as e:
            logger.error(f"{user['account']}: {company_info['company_name']} ----> 请求超时: {e}")
            time.sleep(random.randint(1, 2))
            retry_count += 1
            continue
        if res.status_code == 200:
            data = res.json()
            top_10_keys = data["data"]["count"]
            logger.info(f"{matching_project['company_name']} ------> 当前top10关键词数量: {top_10_keys}")
            return top_10_keys
        else:
            logger.error(f"{matching_project['company_name']} -----> 获取top10关键词失败")
            return 0
