# -*- coding: utf-8 -*-
import time
from loguru import logger
import datetime
import traceback
import threading
import sys
import os
import json
from queue import Queue
from fake_useragent import UserAgent
import concurrent.futures
from src.service.requests_middler import requests_sys
from src.service.requests_middler import proxy_requests_sys
from src.service.requests_middler import insert_shipping_info
from src.service.requests_middler import read_excel
from src.service.requests_middler import read_transport_json
from src.service.requests_middler import get_proxies
from src.service.requests_middler import get_abrod_proxies
from src.crawler.zim.zim_get_abck import get_abck_personal

ip_queue = Queue()

ua = UserAgent()
random_user_agent = ua.random
stop_threading_event = threading.Event()

total_requests = 0
loss_requests = 0

def proxies_queue(proxies_switch):
    retry_count = 0
    while retry_count < 3:
        if ip_queue.empty():
            if proxies_switch == 1:
                proxy = get_proxies()
            elif proxies_switch == 2:
                proxy = get_abrod_proxies()
            else:
                proxy = get_proxies()
            # proxy = get_abrod_proxies()
            if proxy:
                logger.info(f"获取ip代理成功: {proxy}")
                ip_queue.put(proxy)
            else:
                retry_count += 1
                logger.error(f"获取ip代理失败")
        else:
            proxy = ip_queue.get()
            return proxy

def get_point(cookies, point, des_code):
    headers = {
        "accept": "*/*",
        "accept-language": "zh-CN,zh;q=0.9",
        "access-control-allow-origin": "*",
        "cache-control": "no-cache",
        "culture": "en-US",
        "expires": "0",
        "pageid": "1436",
        "pragma": "no-cache",
        "priority": "u=1, i",
        "referer": "https://www.zimchina.com/schedules/point-to-point?portcode=CNSNH%3B10&portdestinationcode=USLAX%3B10&direction=true&fromdate=01-Oct-2024&weeksahead=4",
        "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36"
    }
    url = "https://www.zimchina.com/api/v2/scheduleByRoute/GetPortsInLands"
    params = {
        "query": point.upper()
    }
    # res = requests.get(url, headers=headers, cookies=cookies, params=params)
    retry_count = 0
    while retry_count < 3:
        res = requests_sys(url, headers=headers, cookies=cookies, params=params, GET=True)
        if res:
            js_data = res.json()
            if js_data:
                for info in js_data:
                    if des_code in info['portCode']:
                        return info['portCode']
                return None
            else:
                logger.error(f"{point}: 查询接口获取数据失败")
        elif res == 0:
            logger.error(f"{point}: 获取查询接口数据请求超时")
            retry_count += 1
        else:
            logger.error(f"{point}: _abck参数失效")
            return 0
    return None

#人机验证https://www.zimchina.com/_sec/cp_challenge/verify

def load_cookies_from_json(filename):
    """从 JSON 文件中加载 cookies."""
    if os.path.exists(filename):
        with open(filename, 'r') as json_file:
            return json.load(json_file)
    return None

def save_abck_json(abck_value, filename):
    """将 cookies 保存到指定的 JSON 文件中."""
    cookies = {
        "_abck": abck_value,
    }
    with open(filename, 'w') as json_file:
        json.dump(cookies, json_file, indent=4)
    print(f"{filename}: _abck 已成功保存到 {filename}")

def save_cookies_to_json(cookies_abck, filename):
    """将 cookies 保存到指定的 JSON 文件中."""
    cookies = {
        "_abck": cookies_abck['_abck'],
        "bm_sz": cookies_abck['bm_sz']
    }
    with open(filename, 'w') as json_file:
        json.dump(cookies, json_file, indent=4)
    print(f"{filename}: _abck 已成功保存到 {filename}")

def run_save_cookies(cookie_file):
    # _abck = get_abck()
    # cookies = get_abck_platform()
    cookies = get_abck_personal()
    if cookies != 0:
        # save_abck_json(_abck, cookie_file)
        save_cookies_to_json(cookies, cookie_file)

def zim_transport(origin_city, destination_city, cookie_file):
    global total_requests
    global loss_requests

    port_mapping = {}
    # file_path = "../maeu_ports.json"
    file_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'maeu_ports.json')

    if os.path.exists(file_path):
        with open(file_path, "r") as file:
            Menu_json = json.load(file)
    else:
        logger.error(f"不存在文件: {file_path}")
        sys.exit()
    for info in Menu_json['ports']:
        port_mapping[info['portName'].upper()] = {
            'portCode': info.get('portCode'),
            'unLocationCode': info['unLocationCode'],
            "regionCode": info.get('regionCode'),
            "portName": info.get("portName")
        }
    zim_query_file = "zim_query.json"
    zim_query_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), zim_query_file)
    # print(f"zim_query.json路径{zim_query_file}")
    if os.path.exists(zim_query_file):
        with open(zim_query_file, "r") as file:
            zim_data = json.load(file)
    else:
        zim_data = {}
        with open(zim_query_file, "w") as file:
            json.dump(zim_data, file)
    zim_data = {key.upper(): value for key, value in zim_data.items()}
    origin_info = port_mapping.get(origin_city.upper())
    destination_info = port_mapping.get(destination_city.upper())
    if origin_info and destination_info:
        origin_code = origin_info['portCode']
        origin_unLocationCode = origin_info['unLocationCode']
        destination_code = destination_info['portCode']
        des_unLocationCode = destination_info['unLocationCode']
        regionCode = destination_info.get('regionCode', None)

        retry_count = 0
        err_count = 0
        while retry_count < 3 and err_count < 3:
            cookies = load_cookies_from_json(cookie_file)
            origin_query = zim_data.get(origin_city.upper())
            des_query = zim_data.get(destination_city.upper())
            if not des_query:
                logger.info(f"{destination_city}: 未获取到编码 跳过")
                break
            if cookies and "_abck" in cookies:
                logger.info(f"{cookie_file}: 已获取现有 _abck参数")
            else:
                # _abck = get_abck()
                # cookies = get_abck_platform()
                # save_abck_json(_abck, cookie_file)
                # save_cookies_to_json(cookies, cookie_file)
                run_save_cookies(cookie_file)
                cookies = load_cookies_from_json(cookie_file)

            # if not zim_data.get(destination_city.upper()):
            #     des_query = get_point(cookies, destination_city, des_unLocationCode)
            #     if des_query is None:
            #         logger.error(f"{destination_city} ----> {des_query}查询参数获取失败")
            #         break
            #     elif des_query == 0:
            #         _abck = get_abck()
            #         save_cookies_to_json(_abck, cookie_file)
            #         retry_count += 1
            #         continue
            #     zim_data[destination_city] = des_query
            #     with open(zim_query_file, "w") as file:
            #         json.dump(zim_data, file, indent=4)
            #         logger.success(f"{destination_city} ---> {des_query}参数保存成功")
            logger.info(f"当前查询 起点:{origin_city} ----> 终点: {destination_city}")
            logger.info(f"当前请求总数: {total_requests}, 失败请求总数: {loss_requests}")
            headers = {
                "accept": "*/*",
                "accept-language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
                "access-control-allow-origin": "*",
                "cache-control": "no-cache",
                "culture": "en-US",
                "expires": "0",
                "pageid": "16439",
                "pragma": "no-cache",
                "priority": "u=1, i",
                "referer": "https://www.zim.com/schedules/point-to-point?portcode=CNSNH%3B10&portdestinationcode=USLAX%3B10&direction=true&fromdate=23-Dec-2024&weeksahead=6",
                "sec-ch-ua": "\"Microsoft Edge\";v=\"131\", \"Chromium\";v=\"131\", \"Not_A Brand\";v=\"24\"",
                "sec-ch-ua-mobile": "?0",
                "sec-ch-ua-platform": "\"Windows\"",
                "sec-fetch-dest": "empty",
                "sec-fetch-mode": "cors",
                "sec-fetch-site": "same-origin",
                "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0",
                "x-dtpc": "5$319785368_138h334vTHNTKIKHOIAVFNELFFNAPFVTAKHCHIHM-0e0",
                "x-dtreferer": "https://www.zim.com/schedules/point-to-point?portcode=CNSNH;10&portdestinationcode=USLAX;10&direction=true&fromdate=23-Dec-2024&weeksahead=6"
            }

            current_date = datetime.datetime.now()
            formatted_date = current_date.strftime("%d-%b-%Y")
            params = {
                "portcode": origin_query,
                "portdestinationcode": des_query,
                "direction": "true",
                "fromdate": formatted_date,
                "weeksahead": "4"
            }
            # url = "https://www.zimchina.com/api/v2/scheduleByRoute/getResult"
            url = "https://www.zim.com/api/v2/scheduleByRoute/getResultNewDesign"
            proxies_switch = 1 #1国内 2国外
            proxies = proxies_queue(proxies_switch)
            # print(cookies['_abck'])
            # res = requests_sys(url, headers=headers, cookies=cookies, params=params, GET=True)
            res = proxy_requests_sys(url, headers=headers, cookies=cookies, params=params, GET=True, proxies=proxies)
            # res = proxy_requests_sys(url, headers=headers, params=params, GET=True, proxies=proxies)
            if res:
                ip_queue.put(proxies)
                total_requests += 1
                # cookies = res.cookies
                # save_cookies_to_json(cookies, cookie_file)
                js_data = res.json()
                if js_data.get("routes"):
                    for info in js_data['routes']:
                        item = {}
                        item['origin_code'] = f"{origin_city}({origin_unLocationCode})"
                        if regionCode:
                            item['destination_code'] = f"{destination_city}, {regionCode}({des_unLocationCode})"
                        else:
                            item['destination_code'] = f"{destination_city}({des_unLocationCode})"
                        item['depart_date'] = datetime.datetime.fromisoformat(info[0]['departureDate']).date().isoformat()
                        item['arrive_date'] = datetime.datetime.fromisoformat(info[-1]['arrivaleDate']).date().isoformat()
                        item['transport_contractor'] = "ZIM"
                        # item['haulage_time'] = sum(int(x['daysAtSea']) for x in info)
                        item['haulage_time'] = info[0]['daysAtSea']
                        item['ship_name'] = info[0]['vesselName']
                        item['voyage_number'] = info[0]['vesselCode']
                        index = 1
                        while index < 5:
                            if info[-index].get("vesselCode"):
                                sys_city = info[-index]['portArrivaleName'].split('(')[0].strip()
                                # print(sys_city)
                                item['ship_name'] = info[-index]['vesselName']
                                item['voyage_number'] = info[-index]['vesselCode']
                                if destination_city.upper() != sys_city.upper():
                                    des2_city = port_mapping.get(sys_city.upper())['portName']
                                    des2_code = port_mapping.get(sys_city.upper())['unLocationCode']
                                    regionCode2 = port_mapping.get(sys_city.upper()).get('regionCode', None)
                                    if regionCode2:
                                        item['transfer_code'] = f"{des2_city}, {regionCode2}({des2_code})"
                                    else:
                                        item['transfer_code'] = f"{des2_city}({des2_code})"
                                break
                            else:
                                index += 1
                        insert_shipping_info(item)
                        # print(item)
                else:
                    read_transport_json(transport='ZIM', origin_city=origin_city, des_city=destination_city,
                                        fail_result="未查询到数据")
                    logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 未查询到数据")
                    loss_requests += 1
                break
            elif res == 0:
                logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 未查询到数据, 请求超时")
                retry_count += 1
            else:
                logger.error(f"{origin_city} ----> {destination_city}: _abck参数失效")
                # _abck = get_abck()
                # cookies = get_abck_platform()
                # save_abck_json(_abck, cookie_file)
                # save_cookies_to_json(cookies, cookie_file)
                run_save_cookies(cookie_file)
                # if cookies != 0:
                #     save_cookies_to_json(cookies, cookie_file)
                # elif cookies == 0:
                #     sys.exit()
                err_count += 1
        if err_count == 3:
            read_transport_json(transport='ZIM', origin_city=origin_city, des_city=destination_city,
                                fail_result="查询数据异常")
            logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 查询数据异常")
        if retry_count == 3:
            read_transport_json(transport='ZIM', origin_city=origin_city, des_city=destination_city,
                                fail_result="请求超时")
            loss_requests += 1

#zim
def process_route(origin_city, destination_list, cookie_file):
    for destination_city in destination_list:
        zim_transport(origin_city, destination_city, cookie_file)

def zim_spider_run():
    global total_requests
    global loss_requests
    # origin_city_list = [
    # "Shanghai", "XIAMEN", "NANSHA", "QINGDAO", "YANTIAN", "NINGBO", "TIANJIN"
    #   ]
    # origin_city_list = [
    #     "NINGBO", "TIANJIN"
    #   ]
    #
    # destination_list = [
    #     "Los Angeles", "Long Beach", "Atlanta", "Baltimore", "Boston", "Buffalo", "Calgary", "Charleston",
    #     "Chicago", "Chippewa Falls", "Cincinnati", "Cleveland", "Dallas", "Denver",
    #     "Detroit", "Edmonton", "EL Paso", "Halifax", "Houston", "Jacksonville",
    #     "Kansas City", "Louisville", "Memphis", "Miami", "Minneapolis", "Montreal",
    #     "Nashville", "New Orleans", "New York", "Norfolk", "Oakland", "Omaha",
    #     "Philadelphia", "Pittsburgh", "Portland", "Prince Rupert", "Saint Louis",
    #     "Salt Lake City", "San Antonio", "Saskatoon", "Savannah", "Seattle",
    #     "Tacoma", "Tampa", "Toronto", "Vancouver", "Winnipeg", "Honolulu", "KAHULUI", "Hilo",
    # "Kawaihae", "Nawiliwili", "Charlotte", "Columbus"
    # ]
    # destination_list = ["Montreal",
    #     "Nashville", "NEW ORLEANS", "NEW YORK", "NORFOLK", "OAKLAND", "OMAHA",
    #     "PHILADELPHIA", "Pittsburgh", "Portland", "Prince Rupert", "Saint Louis",
    #     "Salt Lake City", "San Antonio", "Saskatoon", "Savannah", "Seattle",
    #     "Tacoma", "Tampa", "Toronto", "Vancouver", "Winnipeg", "Honolulu", "KAHULUI", "Hilo",
    #     "Kawaihae", "Nawiliwili", "Charlotte", "Columbus"
    # ]
    json_path_one = r"origin_and_destination.json"
    json_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), json_path_one)
    with open(f'{json_path}', 'r', encoding='utf-8') as f:
        tmp = json.loads(f.read())

    origin_city_list = tmp['origin_city_list']
    destination_list = tmp['destination_list']
    del tmp, json_path
    cookie_files = [
        'zim_abck_shanghai.json',
        'zim_abck_xiamen.json',
        'zim_abck_nansha.json',
        'zim_abck_qingdao.json',
        'zim_abck_yantian.json',
        'zim_abck_ningbo.json',
        'zim_abck_tianjin.json'
    ]
    cookie_file_paths = [os.path.join(os.path.dirname(os.path.abspath(__file__)), file) for file in cookie_files]
    # print(cookie_file_paths)
    with concurrent.futures.ThreadPoolExecutor() as executor:
        futures = []
        for origin_city, cookie_file in zip(origin_city_list, cookie_file_paths):
            future = executor.submit(process_route, origin_city, destination_list, cookie_file)
            futures.append(future)
            time.sleep(5)  # 可选：根据需要控制线程间隔

        for future in concurrent.futures.as_completed(futures):
            try:
                future.result()
            except Exception as e:
                print(f"An error occurred: {e}")
                print(traceback.format_exc())

    read_excel(transport="ZIM", total_requests=total_requests, loss_requests=loss_requests)

if __name__ == '__main__':
    zim_spider_run()
