import requests
from requests.cookies import RequestsCookieJar
from loguru import logger
import os
import json
import random
import time
import datetime
import sys
from work.search_site_list import site_id_list
from work.creat_project import get_site_id
from work.export_excel import excel_export
from work.SE_login import se_login

session = requests.Session()
session.headers = {
    "accept": "application/json, text/plain, */*",
    "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
    "baggage": "sentry-environment=production,sentry-public_key=9b4a6569c04148aa9ab275932e8b1961,sentry-trace_id=6d3c87b36e644447aa41b966f7c05994",
    "cache-control": "no-cache",
    "content-type": "application/x-www-form-urlencoded",
    "origin": "https://online.seranking.com",
    "pragma": "no-cache",
    "priority": "u=1, i",
    "referer": "https://online.seranking.com/admin.site.rankings.site_id-7535981.html?ref=left_menu_clicked",
    "sec-ch-ua": "\"Google Chrome\";v=\"125\", \"Chromium\";v=\"125\", \"Not.A/Brand\";v=\"24\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Windows\"",
    "sec-fetch-dest": "empty",
    "sec-fetch-mode": "cors",
    "sec-fetch-site": "same-origin",
    "sentry-trace": "6d3c87b36e644447aa41b966f7c05994-883ed211bb97959d",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
    "x-requested-with": "XMLHttpRequest"
}

def read_account(file_path, user):
    directory = os.path.dirname(file_path)
    if not os.path.exists(directory):
        print("目录不存在，正在创建...")
        os.makedirs(directory)
        print(f"已创建目录: {directory}")

    if not os.path.exists(file_path):
        print("文件不存在，正在创建...")
        open(file_path, 'w').close()
        print(f"已创建json文件: {file_path}")

    if os.path.getsize(file_path) == 0:
        data_to_save = []
        print("文件为空")
        new_user = se_login(user)
        data_to_save.append(new_user)
        with open(file_path, 'w') as file:
            json.dump(data_to_save, file, indent=4)
        time.sleep(1)
        return new_user
    else:
        with open(file_path, 'r') as file:
            json_data = json.load(file)

    target_account = user['account']
    matching_account = None
    for account_info in json_data:
        if account_info["account"] == target_account:
            matching_account = account_info
            break
    if matching_account is not None:
        logger.info(f"账号: {target_account} ----> 账号信息已经存在")
    else:
        logger.info(f"账号: {target_account} ----> 未找到指定账号信息")
        new_user = se_login(user)
        json_data.append(new_user)
        with open(file_path, 'w') as file:
            json.dump(json_data, file, indent=4)
            logger.success(f"账号: {user['account']} ----> 账号信息保存成功")
        matching_account = new_user
    return matching_account

def retry_login(file_path, user):
    with open(file_path, 'r') as file:
        json_data = json.load(file)
    target_account = user['account']
    matching_account = None
    for account_info in json_data:
        if account_info["account"] == target_account:
            matching_account = account_info
            break
    logger.info(f"账号: {target_account} ----> cookies 已失效，正在重新登录获取新的 cookies")
    new_user = se_login(user)
    matching_account.update(new_user)
    # 更新 json_data 中的相应账户信息
    for i, account_info in enumerate(json_data):
        if account_info["account"] == target_account:
            json_data[i] = matching_account
            break
    with open(file_path, 'w') as file:
        json.dump(json_data, file, indent=4)
        logger.success(f"账号: {user['account']} ----> 账号信息更新成功")

def project_site(site_id):
    headers = {
        "accept": "application/json, text/plain, */*",
        "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
        "cache-control": "no-cache",
        "pragma": "no-cache",
        "priority": "u=1, i",
        "referer": "https://online.seranking.com/admin.site.rankings.site_id-8081108.html?ref=project_select_clicked",
        "sec-ch-ua": "\"Not/A)Brand\";v=\"8\", \"Chromium\";v=\"126\", \"Google Chrome\";v=\"126\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
    }
    url = f"https://online.seranking.com/api.projects.site.positions.entrypoint.site_id-{site_id}.html"
    res = session.get(url, headers=headers)
    if res.status_code == 200:
        data = res.json()
        site_se_id = data["data"]["search_engines"][0]["site_engine_id"]
        search_engine_id = data["data"]["search_engines"][0]["search_engine_id"]
        group_id = data["data"]["keywords_groups"][0]["id"]
        matching_project = {
            "site_id": str(site_id),
            "group_id": str(group_id),
            "site_se_id": str(site_se_id),
            "search_engine_id": str(search_engine_id)
        }
        return matching_project
    else:
        logger.error(f"获取site失败 ----> {res.status_code}")
        return 0

def get_time():
    time.sleep(random.randint(5, 15))

def get_100_keys_id(matching_project):
    current_date = datetime.datetime.now()
    current_date_str = current_date.strftime("%Y-%m-%d")
    yesterday_date = current_date - datetime.timedelta(days=1)
    yesterday_str = yesterday_date.strftime("%Y-%m-%d")
    url = "https://online.seranking.com/api.projects.site.positions.html"
    params = {
        "do": "getKeywordsIds",
        "site_id": matching_project['site_id'],
        "columns[]": "created_at",
        "keywords_count_display": "100",
        "page": "1",
        "group_by": "list",
        "sort_by": "name",
        "sort_mode": "1",
        "is_paid": "0",
        "columns_group_mode": "day",
        "site_se_id": matching_project['site_se_id'],
        "list_type": "not_in_serp",
        "report_period_from": yesterday_str,
        "report_period_to": current_date_str
    }
    res = session.get(url, params=params)
    if res.status_code == 200:
        data = res.json()
        keys_word_id = data['data']
        keys_list = keys_word_id.split(',')
        logger.info(f"{matching_project['company_name']} -----> 获取关键崔id成功: 共有{len(keys_list)}个关键词")
        return keys_word_id

def get_10_keys_word(matching_project):
    current_date = datetime.datetime.now()
    current_date_str = current_date.strftime("%Y-%m-%d")
    yesterday_date = current_date - datetime.timedelta(days=1)
    yesterday_str = yesterday_date.strftime("%Y-%m-%d")
    url = "https://online.seranking.com/api.projects.site.positions.html"
    params = {
        "do": "keywordsList",
        "site_id": matching_project['site_id'],
        "columns[]": "created_at",
        "keywords_count_display": "100",
        "page": "1",
        "group_by": "list",
        "sort_by": "name",
        "sort_mode": "1",
        "is_paid": "0",
        "columns_group_mode": "day",
        "site_se_id": matching_project['site_se_id'],
        "list_type": "in_top10",
        "report_period_from": yesterday_str,
        "report_period_to": current_date_str
    }
    res = session.get(url, params=params)
    if res.status_code == 200:
        data = res.json()
        top_10_keys = data["data"]["count"]
        logger.info(f"{matching_project['company_name']} ------> 当前top10关键词数量: {top_10_keys}")
        return top_10_keys
    else:
        logger.error(f"{matching_project['company_name']} -----> 获取top10关键词失败")

def index_bar(matching_project):
    url = "https://online.seranking.com/api.projects.site.positions.common.html"
    params = {
        "do": "getIndexed",
        "site_id": matching_project['site_id'],
        "site_se_id": matching_project["site_se_id"]
    }
    while True:
        res = session.get(url, params=params)
        if res.status_code == 200:
            data = res.json()
            logger.info(f"{matching_project['company_name']} -----> 进度条: {data['data']['indexed']}")
            if data['data']['indexed'] >= 99:
                logger.info(f"{matching_project['company_name']} -----> 进度条已到达100")
                return 1
            get_time()
        else:
            break

def recheck_keys(keys_word_id, matching_project):
    url = "https://online.seranking.com/api.projects.site.positions.recheck.html"
    data = {
        "do": "getAmount",
        "site_id": str(matching_project['site_id']),
        "type": "selected",
        f"selected[{str(matching_project['site_se_id'])}][keywords]": keys_word_id
    }
    res = session.post(url, data=data)
    if res.status_code == 200:
        data = res.json()
        total_check_count = data["data"][str(matching_project['site_id'])]["total_check_count"]
        logger.info(f"{matching_project['company_name']} -----> 共有{total_check_count}个关键词需要复查")

    else:
        logger.error(f"复查异常 ----> {res.status_code}")
        sys.exit()
    time.sleep(random.randint(3, 6))

    headers = {
        "accept": "application/json, text/plain, */*",
        "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
        "baggage": "sentry-environment=production,sentry-public_key=9b4a6569c04148aa9ab275932e8b1961,sentry-trace_id=0358230aed8a44d0998348189ea070f5",
        "cache-control": "no-cache",
        "content-type": "application/x-www-form-urlencoded",
        "origin": "https://online.seranking.com",
        "pragma": "no-cache",
        "priority": "u=1, i",
        "referer": "https://online.seranking.com/admin.site.rankings.site_id-8286431.html?ref=project_select_clicked",
        "sec-ch-ua": "\"Google Chrome\";v=\"125\", \"Chromium\";v=\"125\", \"Not.A/Brand\";v=\"24\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "sentry-trace": "0358230aed8a44d0998348189ea070f5-b87c304e3c06b210",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
        "x-requested-with": "XMLHttpRequest"
    }

    url = "https://online.seranking.com/api.projects.site.positions.recheck.html"
    params = {
        "do": "recheck"
    }
    data = {
        "do": "recheck",
        "site_id": str(matching_project['site_id']),
        "type": "selected",
        f"selected[{str(matching_project['site_se_id'])}][keywords]": keys_word_id
    }
    res = session.post(url, params=params, data=data, headers=headers)
    if res.status_code == 200:
        data = res.json()
        if data['data'].get("message"):
            logger.success(f"{matching_project['company_name']} ------> 复查成功，请稍后等待刷新")
            return 1
        else:
            logger.error(f"{matching_project['company_name']} ------> 复查失败，{data}")
    else:
        logger.error(f"{matching_project['company_name']} ----> 复查进行中失败")
        sys.exit()

def dele_keys_word(company_info, keyword_ids, cookies):
    headers = {
        "accept": "application/json, text/plain, */*",
        "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
        "cache-control": "no-cache",
        "origin": "https://online.seranking.com",
        "pragma": "no-cache",
        "priority": "u=1, i",
        "referer": "https://online.seranking.com/admin.site.rankings.site_id-8286431.html?ref=project_select_clicked",
        "sec-ch-ua": "\"Google Chrome\";v=\"125\", \"Chromium\";v=\"125\", \"Not.A/Brand\";v=\"24\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36",
        "x-requested-with": "XMLHttpRequest"
    }
    url = "https://online.seranking.com/api.keywords.html"
    files = {
        'do': (None, 'deleteKeywords'),
        'site_id': (None, str(company_info['site_id'])),
        'keyword_ids': (None, str(keyword_ids)),
        'group_ids': (None, '')
    }
    res = requests.post(url, headers=headers, cookies=cookies, files=files)
    data = res.json()
    keys_list = keyword_ids.split(',')
    if res.status_code == 200:
        logger.success(f"{company_info['company_name']}: 共有{len(keys_list)}个 -----> 关键词删除成功")
        return 1
    else:
        logger.error(f"{company_info['company_name']} -----> 删除失败: {data['error']}")
        return 0

def add_key(keys_list, company_info, matching_account):
    url = "https://online.seranking.com/api.keywords.html"
    params = {
        "do": "add"
    }
    data = {
        "site_id": str(company_info['site_id']),
        "is_new_site": "false",
        "group_id": str(company_info['group_id']),
        "keys": str(keys_list).replace("'", "\""),
        "allow_duplicates": "0"
    }
    res = session.post(url, params=params, data=data)
    if res.status_code == 200:
        data = res.json()
        if data.get("data"):
            logger.success(f"{company_info['company_name']} ------>  添加成功关键词: {data['data']['added']}")
            return 1
        else:
            logger.error(f"{company_info['company_name']} ----> 添加关键词失败: {data['error']}")
            return 0
    elif res.status_code == 403:
        logger.error(f"{matching_account['account']} -----> 账号cookies信息已失效")
        return 0
    else:
        logger.error(f"{company_info['company_name']}: 添加关键词失败 ----> {res.json()} {res.status_code}")
        sys.exit()

def se_run(user, company_info, keys_list, Reach_keys):
    account_files = "./user/user_pool.json"
    matching_account = read_account(account_files, user)
    cookies = matching_account['cookies']
    session.cookies = RequestsCookieJar()
    for key, value in cookies.items():
        session.cookies.set(key, value)

    project_list = site_id_list(cookies=cookies)
    project_info = ''
    if project_list == 0:
        retry_login(account_files, user)
        return se_run(user, company_info, keys_list, Reach_keys)
    for match in project_list:
        # print(match)
        if company_info['company_name'] == match.get("site_title"):
            project_info = match
            break
    if project_info == '':
        logger.info(f"账号: {user['account']} -----> 没有 {company_info['company_name']} 该项目")
        return 0
    site_id = project_info['site_id']
    matching_project = project_site(site_id)
    matching_project['company_name'] = company_info['company_name']

    res = add_key(keys_list, matching_project, matching_account)

    if res == 1:
        res = index_bar(matching_project) #进度条
        top10_keys = get_10_keys_word(matching_project)
        if top10_keys >= Reach_keys:
            logger.info(f"{company_info['company_name']} -----> 已到达关键词标准")
            excel_export(cookies=cookies, company_info=company_info)
            logger.info(f"{company_info['company_name']} -----> 导出excel表格完毕")
            keys_word_id = get_100_keys_id(matching_project)
            res = dele_keys_word(matching_project, keys_word_id, cookies)
            if res == 1:
                excel_export(cookies=cookies, company_info=matching_project)
                logger.info(f"{company_info['company_name']} -----> 导出excel表格完毕")

        elif Reach_keys - 20 < top10_keys < Reach_keys:
            logger.info(f"相差-20差距, 准备复查中")

            if res == 1:
                keys_word_id = get_100_keys_id(matching_project)
                res = recheck_keys(keys_word_id, matching_project)
                time.sleep(random.randint(3, 5))
                if res == 1:
                    res = index_bar(matching_project) #进度条
                    logger.info(f"{matching_project['company_name']} -----> 复查完毕")
                    if res == 1:
                        keys_word_id = get_100_keys_id(matching_project)
                        res = dele_keys_word(matching_project, keys_word_id, cookies)
                        if res == 1:
                            top10_keys = get_10_keys_word(matching_project)
                            if top10_keys >= Reach_keys:
                                logger.info(f"{company_info['company_name']} -----> 已到达关键词标准")
                                excel_export(cookies=cookies, company_info=matching_project)
                                logger.info(f"{company_info['company_name']} -----> 导出excel表格完毕")
                                return 1

        else:
            logger.info(f"账号{user['account']}: {company_info['company_name']} -----> 未达到标准")
            if res == 1:
                keys_word_id = get_100_keys_id(matching_project)
                res = dele_keys_word(matching_project, keys_word_id, cookies)
                if res == 1:
                    excel_export(cookies=cookies, company_info=matching_project)
                    logger.info(f"{company_info['company_name']} -----> 导出excel表格完毕")
            return 1
    else:
        return 0

# if __name__ == '__main__':
#     user = {"account": "zhenghongzhi@grweb.cn", "password": "hongzhi789="}
#     project_name = "沧州朗为国际贸易有限公司"
#
#     se_run(user, project_name)