import requests
import time
import datetime
from loguru import logger
import os
import sys
from work.search_site_list import get_site_id

def excel_export(user, company_info):
    cookies = user['cookies']
    matching_project = get_site_id(company_info=company_info, user=user)
    if matching_project == 0:
        return f"账号: {user['account']} -----> 没有 {company_info['company_name']} 该项目"
    headers = {
        "accept": "application/json, text/plain, */*",
        "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
        "baggage": "sentry-environment=production,sentry-public_key=9b4a6569c04148aa9ab275932e8b1961,sentry-trace_id=c89ec661cce847d3b9ee3f13049b58c8",
        "cache-control": "no-cache",
        "content-type": "application/x-www-form-urlencoded",
        "origin": "https://online.seranking.com",
        "pragma": "no-cache",
        "priority": "u=1, i",
        "referer": "https://online.seranking.com/admin.site.rankings.site_id-8286431.html?ref=project_select_clicked",
        "sec-ch-ua": "\"Google Chrome\";v=\"125\", \"Chromium\";v=\"125\", \"Not.A/Brand\";v=\"24\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "sentry-trace": "c89ec661cce847d3b9ee3f13049b58c8-8665a0202242c9cd",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
        "x-requested-with": "XMLHttpRequest"
    }
    url = "https://online.seranking.com/api.projects.export.keywords.position.html"
    current_date = datetime.datetime.now()
    current_date_str = current_date.strftime("%Y-%m-%d")
    yesterday_date = current_date - datetime.timedelta(days=1)
    yesterday_str = yesterday_date.strftime("%Y-%m-%d")
    data = {
        "do": "export",
        "group_by": "list",
        "results_type": "organic",
        "page": "1",
        "keywords_count_display": "100",
        "list_type": "in_top30",
        "groups_mode": "0",
        "group_by_last_landing_url": "0",
        "report_period_from": yesterday_str,
        "report_period_to": current_date_str,
        "site_id": matching_project['site_id'],
        "sort_date": "",
        "columns_group_mode": "day",
        "export_type": "positions_detailed",
        "se": str(matching_project['search_engine_id']),
        "seId": matching_project['site_se_id'],
        "table": str(matching_project['search_engine_id']),
        "include_target_urls": "0",
        "with_diff": "false",
        "file_type": "xlsx"
    }
    res = requests.post(url, headers=headers, cookies=cookies, data=data)
    data = res.json()
    token = data["data"]["token"]
    url = "https://online.seranking.com/api.projects.export.keywords.position.html"
    params = {
        "do": "taskStatus",
        "site_id": str(matching_project['site_id']),
        "token": token
    }
    while True:
        res = requests.get(url, headers=headers, cookies=cookies, params=params)
        if res.status_code == 200:
            data = res.json()
            if data['data'] == "success":
                logger.info('获取成功')
                break
            time.sleep(2)
    excel_url = f"https://online.seranking.com/api.projects.export.keywords.position.html?do=download&site_id={matching_project['site_id']}&token={token}"
    res = requests.get(excel_url, headers=headers, cookies=cookies)
    if res.status_code == 200:
        if not os.path.exists("excel"):
            print("目录不存在，正在创建...")
            os.makedirs("excel")
            print(f"已创建目录: excel")
        with open(f'./excel/{matching_project["company_name"]}.xlsx', 'wb') as file:
            file.write(res.content)
        logger.success("文件保存成功")
        return 1
    else:
        logger.error(f"文件保存失败 -----> {res.status_code}")
        return 0
