import requests
import time
import datetime
from loguru import logger
import os
import re
import random
import sys

session = requests.Session()

def site_id_list(cookies):
    headers = {
        "accept": "application/json, text/plain, */*",
        "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
        "baggage": "sentry-environment=production,sentry-public_key=9b4a6569c04148aa9ab275932e8b1961,sentry-trace_id=9528a2ef952b444fba859a0d93c7a13b,sentry-sample_rate=0.01,sentry-sampled=false",
        "cache-control": "no-cache",
        "pragma": "no-cache",
        "priority": "u=1, i",
        "referer": "https://online.seranking.com/admin.site.rankings.site_id-8081108.html",
        "sec-ch-ua": "\"Not/A)Brand\";v=\"8\", \"Chromium\";v=\"126\", \"Google Chrome\";v=\"126\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "sentry-trace": "9528a2ef952b444fba859a0d93c7a13b-a4a9a03d96f806a8-0",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
    }
    url = "https://online.seranking.com/api.projects.site.list.html"
    params = {
        "do": "search",
        "limit": "30",
        "offset": "0",
        "query": "",
        "include_sub_accounts_sites": "1",
        "group_sub_account": "1",
        "show_groups": "1"
    }
    retry_count = 0
    while retry_count < 3:
        try:
            res = requests.get(url, headers=headers, cookies=cookies, params=params, timeout=5)
        except Exception as e:
            # logger.error(f"{user['account']}: {company_info['company_name']} ----> 请求超时: {e}")
            time.sleep(random.randint(1, 2))
            retry_count += 1
            continue
        if res.status_code == 200:
            data = res.json()
            site_id = data['data']
            return site_id
        elif res.status_code == 403:
            return -1
        else:
            logger.error(f"请求失败 -----> {res.status_code}")
            return 0

def project_site(site_id, cookies):
    headers = {
        "accept": "application/json, text/plain, */*",
        "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
        "cache-control": "no-cache",
        "pragma": "no-cache",
        "priority": "u=1, i",
        "referer": "https://online.seranking.com/admin.site.rankings.site_id-8081108.html?ref=project_select_clicked",
        "sec-ch-ua": "\"Not/A)Brand\";v=\"8\", \"Chromium\";v=\"126\", \"Google Chrome\";v=\"126\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
    }
    url = f"https://online.seranking.com/api.projects.site.positions.entrypoint.site_id-{site_id}.html"
    retry_count = 0
    while retry_count < 3:
        try:
            res = requests.get(url, headers=headers, cookies=cookies, timeout=5)
        except Exception as e:
            # logger.error(f"{user['account']}: {company_info['company_name']} ----> 请求超时: {e}")
            time.sleep(random.randint(1, 2))
            retry_count += 1
            continue
        if res.status_code == 200:
            data = res.json()
            site_se_id = data["data"]["search_engines"][0]["site_engine_id"]
            search_engine_id = data["data"]["search_engines"][0]["search_engine_id"]
            group_id = data["data"]["keywords_groups"][0]["id"]
            matching_project = {
                "site_id": str(site_id),
                "group_id": str(group_id),
                "site_se_id": str(site_se_id),
                "search_engine_id": str(search_engine_id)
            }
            return matching_project
        else:
            logger.error(f"获取site失败 ----> {res.status_code}")
            return 0

def get_site_id(company_info, user):
    cookies = user['cookies']
    project_list = site_id_list(cookies=cookies)
    project_info = ''
    for match in project_list:
        if company_info['company_name'] == match.get("site_title"):
            project_info = match
            break
    if project_info == '':
        logger.info(f"账号: {user['account']} -----> 没有 {company_info['company_name']} 该项目")
        res_info = f"账号: {user['account']} -----> 没有 {company_info['company_name']} 该项目"
        return 0
    site_id = project_info['site_id']
    matching_project = project_site(site_id, cookies)
    matching_project['company_name'] = company_info['company_name']
    return matching_project