import asyncio
import json
import random
import re
import threading
import time

from com.arcfox.middleware import async_mysql_middleware as db
import requests
from loguru import logger
from aqc_parser import parse_data
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '/root/work/arcfox-crawler/'))
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.util.muilty_coroutine_util import concurrency


task_manager = RedisTaskManager("company_list_sort_set")
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36'
}


def parse_id(response, company_name):
    content = re.findall(r'{"pid":(.*?),"entName"', response)
    if content:
        company_pid = content[0].replace("\"", "")
        logger.info(f"[{company_name}]获取company_pid: {company_pid}")
        return company_pid
    return None


def get_proxy():
    url = "http://http.tiqu.letecs.com/getip3?num=1&type=1&pro=&city=0&yys=0&port=1&pack=29238&ts=0&ys=0&cs=0&lb=1&sb=0&pb=4&mr=1&regions=&tl=2"
    try:
        response = requests.get(url, timeout=3)
        proxy_str = response.text.replace("\r\n", "")
        logger.info(f"proxy: {proxy_str}")
        return {"http": f"http://{proxy_str}", "https": f"http://{proxy_str}"}
    except Exception as e:
        logger.error(e)
        return None


def init_session(proxy):
    session = requests.session()
    url = "https://aiqicha.baidu.com/"
    payload = {}
    try:
        response = session.get(url, headers=headers, data=payload, timeout=5, proxies=proxy)
        logger.info(response.cookies)
        return session
    except Exception as e:
        logger.warning("init_session超时换代理")
        return None


def search(company_name, session, proxy):
    params = {
        'q': company_name,
        't': '0',
    }
    url = 'https://aiqicha.baidu.com/s'
    #url = "https://httpbin.org/get"
    try:
        response = session.get(url, params=params, headers=headers, timeout=5, proxies=proxy)
        if response.status_code == 200:
            return parse_id(response.text, company_name)
    except Exception as e:
        logger.warning("获取id超时换代理")
    return None



async def get_company_detail(company_name, session, proxy):
    company_id = search(company_name, session, proxy)
    if company_id:
        try:
            response = session.get('https://aiqicha.baidu.com/detail/basicAllDataAjax', params={"pid": company_id},
                                   headers=headers, timeout=5, proxies=proxy)
            if response.status_code == 200:
                result = parse_data(response)
                if result:
                    logger.info(result)
                    await save_data(result)
                    return True
        except Exception as e:
            logger.warning("获取详情超时换代理")
    return False


async def save_data(result):
    with open("result.txt", "a") as f:
        f.write(json.dumps(result, ensure_ascii=False) + "\n")
    task = {
        "company_name": result['company_name'],
        "company_info": json.dumps(result, ensure_ascii=False),
        "task_status": 1,
        "company_unicode": result['credit_no']
    }
    await __save_db(tasks=[task])

@concurrency(5)
async def __save_db(**params):
    task = params['task']
    mapping = db.MysqlMapping("tbl_company_task")
    if await check_exist(mapping, task):
        logger.info("数据已存在, 更新!")
        await mapping.update({"company_info": task["company_info"], "task_status": 1},
                             {"company_name": task['company_name'], "company_unicode": task['company_unicode']})
    else:
        logger.info("数据不已存在, 新增!")
        await mapping.insert(task)


async def check_exist(mapping, data):
    condition = {
        "company_unicode": data['company_unicode']
    }
    result = await mapping.query(condition, ["id"])
    if result:
        return result[0][0]

def get_cookie():
    file_name = "cookies.txt"
    with open(file_name, 'r') as f:
        cookie_list = f.readlines()
    if len(cookie_list) > 0:
        idx = random.choice(range(len(cookie_list)))
        cookie = cookie_list[idx].replace("\n", "")
        # del cookie_list[idx]
        #
        # with open(file_name, 'w') as file:
        #     for line in cookie_list:
        #         file.write(line)
        cookies = {
            'BAIDUID_BFESS': "02CF3D4FD363AB9D68DDBC94D084E822:FG=1",
        }
        logger.info(cookies)
        return cookies
    else:
        logger.info("暂无可用cookie")
        return None


async def get_redis_cookie():
    task_manager = RedisTaskManager("aqc_cookies")
    cookies = await task_manager.pull_tasks(1)
    logger.info(cookies)
    if cookies:
        return cookies[0]


async def get_tasks():
    tasks = await task_manager.pull_tasks(1)
    logger.info(tasks)
    return tasks


async def run():
    proxy = {}
    session = init_session(proxy)
    while True:
        tasks = await get_tasks()
        if not tasks:
            time.sleep(10)
            logger.info("暂无任务, 休眠10秒重试")
        else:
            for task in tasks:
                #result = await get_company_detail(task['companyName'], session, proxy)
                result = await get_company_detail('上海凭安征信服务有限公司', session, proxy)
                if not result:
                    await task_manager.add_fail_tasks([task])
                    session = init_session(proxy)
                time.sleep(1)


if __name__ == "__main__":
    asyncio.run(run())

