import requests
import time
import random
from loguru import logger
from work.search_site_list import get_site_id

def get_time():
    time.sleep(random.randint(5, 15))

def index_bar(company_info, user):
    cookies = user['cookies']
    matching_project = get_site_id(company_info=company_info, user=user)
    if matching_project == 0:
        return 0
    url = "https://online.seranking.com/api.projects.site.positions.common.html"
    params = {
        "do": "getIndexed",
        "site_id": matching_project['site_id'],
        "site_se_id": matching_project["site_se_id"]
    }
    retry_count = 0
    while retry_count < 3:
        try:
            res = requests.get(url, params=params, cookies=cookies, timeout=5)
        except Exception as e:
            logger.error(f"{user['account']}: {company_info['company_name']} ----> 请求超时: {e}")
            time.sleep(random.randint(1, 2))
            retry_count += 1
            continue
        if res.status_code == 200:
            data = res.json()
            logger.info(f"{matching_project['company_name']} -----> 进度条: {data['data']['indexed']}")
            return data['data']['indexed']
        else:
            return -1


