"""
需要VPN 国外ip代理池
"""
# -*- coding: utf-8 -*-
from lxml import etree
import time
from loguru import logger
import datetime
import traceback
import threading
import sys
import os
import json
from queue import Queue
from fake_useragent import UserAgent
import concurrent.futures
from src.service.requests_middler import proxy_requests_sys
from src.service.requests_middler import insert_shipping_info
from src.service.requests_middler import read_excel
from src.service.requests_middler import get_abrod_proxies
from src.service.requests_middler import read_transport_json
from src.crawler.cma.cma_get_datademo import get_verify
from curl_cffi import requests as curl_requests
ua = UserAgent()
random_user_agent = ua.random
stop_threading_event = threading.Event()

total_requests = 0
loss_requests = 0
ip_queue = Queue()

current_working_directory = os.getcwd()
def proxies_queue():
    # 获取代理并放入队列
    if ip_queue.empty():
        # proxy = get_proxies()
        proxy = get_abrod_proxies()
        logger.info(f"获取ip代理成功: {proxy}")
        if proxy:
            ip_queue.put(proxy)
        else:
            logger.error(f"获取ip代理失败")
    else:
        proxy = ip_queue.get()
        return proxy

def get_point(origin_city, local_code):
    global total_requests
    global loss_requests
    headers = {
        "accept": "application/json, text/javascript, */*; q=0.01",
        "accept-language": "zh-CN,zh;q=0.9",
        "cache-control": "no-cache",
        "pragma": "no-cache",
        "priority": "u=0, i",
        "referer": "https://www.cma-cgm.com/",
        "sec-ch-device-memory": "8",
        "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
        "sec-ch-ua-arch": "\"x86\"",
        "sec-ch-ua-full-version-list": "\"Google Chrome\";v=\"129.0.6668.59\", \"Not=A?Brand\";v=\"8.0.0.0\", \"Chromium\";v=\"129.0.6668.59\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-model": "\"\"",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "user-agent": random_user_agent,
        "x-dtpc": "8$105720128_162h17vPJWWHKHCHAEWSKSLCIKMDGADHUHKPCMF-0e0",
        "x-requested-with": "XMLHttpRequest"
    }
    cookies = {
        "datadome": "xRCK9bNd5JppYnqzsxyb5gbTZuDfoCik0_yi4CNV19r86FCThKTKk1WXmqSSTLYGVri6RJoAS2yHUho5_prDyLOnCazxytXREeHIUZ4sL1gIfGibAQ6g5o_ODFhontKM"
    }
    url = "https://www.cma-cgm.com/api/PortsWithInlands/GetAllPlaces"
    params = {
        "id": origin_city.upper()
    }
    res = curl_requests.get(url, headers=headers, cookies=cookies, params=params)
    if res:
        js_data = res.json()
        for info in js_data:
            if info.get('placeCode').upper() == local_code.upper():
                return info['actualName']
    elif res == 0:
        loss_requests += 1
        # sys.exit()
    return None

def get_datadome():
    datadome = get_verify()
    if datadome:
        return datadome
    else:
        return None


def load_cookies_from_json(filename):
    """从 JSON 文件中加载 cookies."""
    if os.path.exists(filename):
        filename = os.path.join(current_working_directory, filename)
        with open(filename, 'r') as json_file:
            return json.load(json_file)
    return None


def save_cookies_to_json(datadome_value, filename):
    """将 cookies 保存到指定的 JSON 文件中."""
    cookies = {
        "datadome": datadome_value
    }
    filename = os.path.join(current_working_directory, filename)
    with open(filename, 'w') as json_file:
        json.dump(cookies, json_file, indent=4)
    print(f"Cookies 已成功保存到 {filename}")

def cma_transport(origin_city, destination_city, cookie_file):
    global total_requests
    global loss_requests
    total_requests += 1
    url = "https://www.cma-cgm.com/ebusiness/schedules/routing-finder"
    port_mapping = {}
    # file_path = "maeu_ports.json"
    # file_path = os.path.join(current_working_directory + '/src/crawler', file_path)
    file_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'maeu_ports.json')
    if os.path.exists(file_path):
        with open(file_path, "r") as file:
            Menu_json = json.load(file)
    else:
        logger.error(f"不存在文件: {file_path}")
        sys.exit()
    for info in Menu_json['ports']:
        port_mapping[info['portName'].upper()] = {
            'portCode': info.get('portCode'),
            'unLocationCode': info['unLocationCode'],
            'regionCode': info.get('regionCode')
        }
    origin_info = port_mapping.get(origin_city.upper())
    destination_info = port_mapping.get(destination_city.upper())
    if origin_info and destination_info:
        mapping = {}
        origin_unLocationCode = origin_info['unLocationCode']
        des_unLocationCode = destination_info['unLocationCode']
        regionCode = destination_info.get('regionCode', None)
        # POLDescription = get_point(origin_city, origin_unLocationCode)
        # PODDescription = get_point(destination_city, des_unLocationCode)
        # file_path = 'cma_query.json'
        # file_path = os.path.join(current_working_directory + '/src/crawler/cma', file_path)
        # file_path = os.path.join(current_working_directory, file_path)
        file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),'cma_query.json')
        if os.path.exists(file_path):
            with open(file_path, 'r') as f:
                mapping = json.load(f)
                mapping_upper = {key.upper(): value for key, value in mapping.items()}
        # if origin_city not in mapping:
        #     origin_city_query = get_point(origin_city, origin_unLocationCode)
        #     mapping[origin_city] = origin_city_query
        # if destination_city not in mapping:
        #     destination_city_query = get_point(destination_city, des_unLocationCode)
        #     mapping[destination_city] = destination_city_query
        with open(file_path, 'w') as f:
            json.dump(mapping, f, ensure_ascii=False, indent=4)
        if mapping_upper.get(origin_city.upper()) and mapping_upper.get(destination_city.upper()):
            date = datetime.datetime.now().strftime('%b-%Y')
            data = {
                "ActualPOLDescription": mapping[origin_city],
                "ActualPODDescription": mapping[destination_city],
                "ActualPOLType": "Port",
                "ActualPODType": "Port",
                "polDescription": mapping[origin_city],
                "podDescription": mapping[destination_city],
                "IsDeparture": "True",
                "SearchDate": f"01-{date}",
                "DateRange": "3"
            }
            retry_count = 0
            headers = {
                "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
                "accept-language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
                "cache-control": "no-cache",
                "content-type": "application/x-www-form-urlencoded",
                "origin": "https://www.cma-cgm.com",
                "pragma": "no-cache",
                "priority": "u=0, i",
                "referer": "https://www.cma-cgm.com/ebusiness/schedules/routing-finder",
                "sec-ch-device-memory": "8",
                "sec-ch-ua": "\"Microsoft Edge\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
                "sec-ch-ua-arch": "\"x86\"",
                "sec-ch-ua-full-version-list": "\"Microsoft Edge\";v=\"129.0.2792.52\", \"Not=A?Brand\";v=\"8.0.0.0\", \"Chromium\";v=\"129.0.6668.59\"",
                "sec-ch-ua-mobile": "?0",
                "sec-ch-ua-model": "\"\"",
                "sec-ch-ua-platform": "\"Windows\"",
                "sec-fetch-dest": "document",
                "sec-fetch-mode": "navigate",
                "sec-fetch-site": "same-origin",
                "sec-fetch-user": "?1",
                "upgrade-insecure-requests": "1",
                "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0"
            }
            while retry_count < 3:
                cookies = load_cookies_from_json(cookie_file)
                if cookies and "datadome" in cookies:
                    datadome = cookies["datadome"]
                    print(f"已获取 {origin_city} 的现有 datadome:", datadome)
                else:
                    datadome = get_datadome()
                    save_cookies_to_json(datadome, cookie_file)
                    cookies = load_cookies_from_json(cookie_file)
                    datadome = cookies["datadome"]
                # datadome = get_datadome()
                # save_cookies_to_json(datadome, cookie_file)
                proxies = proxies_queue()
                # res = requests_sys(url, headers=headers, cookies=cookies, data=data, POST=True)
                res = proxy_requests_sys(url, headers=headers, cookies=cookies, data=data, proxies=proxies, POST=True)
                if res:
                    ip_queue.put(proxies)
                    logger.info(f"当前查询 起点:{origin_city} ----> 终点: {destination_city}")
                    logger.info(f"当前请求总数: {total_requests}, 失败请求总数: {loss_requests}")
                    html = etree.HTML(res.text)
                    num = 1
                    html_retry_count = 0

                    while True:
                        if html.xpath(f"//ul/li[{num}]/article"):
                            item = {}
                            if not html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[3]/span/text()"):
                                logger.error(f"{origin_city}: {destination_city} 数据结构异常,疑似修改")
                                loss_requests += 1
                                read_transport_json(transport='CMA', origin_city=origin_city, des_city=destination_city,
                                                    fail_result="数据结构异常,疑似修改")
                                break
                            depart_date = html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[1]/span/text()")[0]
                            arrive_date = html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[last()]/span/text()")[0]
                                                #//ul/li[4]/article/div[3]/div[1]/ul/li[5]/ul/li[1]/dl/div[1]/dd
                            vessle = html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[2]/ul/li[1]/dl/div[1]/dd/text()") or html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[5]/ul/li[1]/dl/div[1]/dd/text()")

                            voyage_number = html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[2]/ul/li[2]/dl/div[2]/dd/a/text()") or html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[2]/ul/li[2]/dl/div[1]/dd/a/text()")
                            shipping_lanes = html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[2]/ul/li[2]/dl/div[1]/dd/a/text()") or html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[2]/ul/li[2]/dl/div[2]/dd/a/text()")
                            item['origin_code'] = f"{origin_city}({origin_unLocationCode})"
                                                    #//ul/li[1]/article/div[3]/div[1]/ul/li[5]/ul/li[2]/dl/div[1]/dd/a
                            #des_city = html.xpath(f"//ul/li[{num}]/article/div[3]/div[1]/ul/li[last()]/div/a[1]/text()")[1].split(',')[0].strip()
                            if regionCode:
                                item['destination_code'] = f"{destination_city}, {regionCode}({des_unLocationCode})"
                            else:
                                item['destination_code'] = f"{destination_city}({des_unLocationCode})"
                            item['depart_date'] = datetime.datetime.strptime(depart_date, "%A, %d-%b-%Y").strftime("%Y-%m-%d")
                            item['arrive_date'] = datetime.datetime.strptime(arrive_date, "%A, %d-%b-%Y").strftime("%Y-%m-%d")
                            item['transport_contractor'] = "CMA"
                            days_difference = html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/div/div/text()")[0].split('D')[0].strip() if html.xpath(f"//ul/li[{num}]/article/div[3]/div[1]/div/div/text()") else None
                            item['shipping_lanes'] = shipping_lanes[0] if shipping_lanes else None
                            item['haulage_time'] = days_difference
                            item['ship_name'] = vessle[0] if vessle else None
                            item['voyage_number'] = voyage_number[0] if voyage_number else None
                            item['transfer_code'] = None  # 默认中转港编码
                            if html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[last() - 2]/div/a[1]/text()"):
                                des_city = html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[last() - 2]/div/a[1]/text()")[1].split(',')[0].strip()
                                if "tianjin xingang" in des_city:
                                    des_city = "tianjin"

                                # 国家判断为美国国家港口即中转港
                                des_country = html.xpath(f"//ul/li[{num}]/article/div[2]/div[1]/ul/li[last() - 2]/div/a[1]/text()")[1].split(',')[-1].strip()
                                trans_code = port_mapping.get(des_city.upper()).get('unLocationCode')
                                if des_country.upper() == str("US").upper():
                                    item['transfer_code'] = f"{des_city.capitalize()}({trans_code.upper()})"
                                    if item.get('transfer_code'):
                                        if not port_mapping.get(item['transfer_code'].split('(')[0].upper()):
                                            item['transfer_code'] = None

                            # print(item)
                            insert_shipping_info(item)
                        elif html_retry_count == 3:
                            break
                        else:
                            html_retry_count += 1
                        num += 1
                    break

                elif res == 0:
                    logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 请求超时")
                    retry_count += 1
                    continue
                else:
                    datadome = get_datadome()
                    logger.info(f"更换 datadome cookies参数 --> {datadome}")
                    save_cookies_to_json(datadome, cookie_file)
                    retry_count += 1
                    continue
            if retry_count == 3:
                read_transport_json(transport='CMA', origin_city=origin_city, des_city=destination_city,
                                    fail_result="请求超时")

def process_route(origin_city, destination_list, cookie_file):
    for destination_city in destination_list:
        cma_transport(origin_city, destination_city, cookie_file)

def cma_spider_run():
    global total_requests
    global loss_requests
    # origin_city_list = [
    #     "Shanghai", "XIAMEN", "NANSHA", "QINGDAO", "YANTIAN", "NINGBO"
    # ]

    # destination_list = [
    #     "Los Angeles", "Long Beach", "Atlanta", "Baltimore", "Boston", "Buffalo", "Calgary", "Charleston",
    #     "Chicago", "Chippewa Falls", "Cincinnati", "Cleveland", "Dallas", "Denver",
    #     "Detroit", "Edmonton", "EL Paso", "Halifax", "Houston", "Jacksonville",
    #     "Kansas City", "Louisville", "Memphis", "Miami", "Minneapolis", "Montreal",
    #     "Nashville", "New Orleans", "New York", "Norfolk", "Oakland", "Omaha",
    #     "Philadelphia", "Pittsburgh", "Portland", "Prince Rupert", "Saint Louis",
    #     "Salt Lake City", "San Antonio", "Saskatoon", "Savannah", "Seattle",
    #     "Tacoma", "Tampa", "Toronto", "Vancouver", "Winnipeg"
    # ]
    json_path_one = r"origin_and_destination.json"
    # current_working_directory_one = os.getcwd() + "/src/crawler"
    # current_working_directory_one = os.getcwd() # 本地
    # json_path = os.path.join(current_working_directory_one, json_path_one)
    json_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), json_path_one)

    with open(f'{json_path}', 'r', encoding='utf-8') as f:
        tmp = json.loads(f.read())

    origin_city_list = tmp['origin_city_list']
    destination_list = tmp['destination_list']
    del tmp, json_path
    origin_city_list = [
        "TIANJIN"
    ]
    destination_list = [
        "Los Angeles", "Long Beach", "Atlanta", "Baltimore", "Boston", "Buffalo", "Calgary", "Charleston",
        "Chicago", "Chippewa Falls", "Cincinnati", "Cleveland", "Dallas", "Denver",
        "Detroit", "Edmonton", "EL Paso", "Halifax", "Houston", "Jacksonville",
        "Kansas City", "Louisville", "Memphis", "Miami", "Minneapolis", "Montreal",
        "Nashville", "New Orleans", "New York", "Norfolk", "Oakland", "Omaha",
        "Philadelphia", "Pittsburgh", "Portland", "Prince Rupert", "Saint Louis",
        "Salt Lake City", "San Antonio", "Saskatoon", "Savannah", "Seattle",
        "Tacoma", "Tampa", "Toronto", "Vancouver", "Winnipeg", "Honolulu", "KAHULUI", "Hilo",
    "Kawaihae", "Nawiliwili", "Charlotte", "Columbus"
    ]
    cookie_files = [
        'cma_datadome_shanghai.json',
        'cma_datadome_xiamen.json',
        'cma_datadome_nansha.json',
        'cma_datadome_qingdao.json',
        'cma_datadome_yantian.json',
        'cma_datadome_ningbo.json'
    ]
    # cookie_file_paths = [os.path.join(current_working_directory, file) for file in cookie_files] # windwos
    cookie_file_paths = [os.path.join(os.path.dirname(os.path.abspath(__file__)), file) for file in cookie_files] # Linux
    with concurrent.futures.ThreadPoolExecutor() as executor:
        futures = []
        for origin_city, cookie_file in zip(origin_city_list, cookie_file_paths):
            future = executor.submit(process_route, origin_city, destination_list, cookie_file)
            futures.append(future)
            time.sleep(1)  # 可选：根据需要控制线程间隔

        for future in concurrent.futures.as_completed(futures):
            try:
                future.result()
            except Exception as e:
                print(f"An error occurred: {e}")
                print(traceback.format_exc())
    read_excel(transport="CMA", total_requests=total_requests, loss_requests=loss_requests)

# if __name__ == '__main__':
#     cma_spider_run()