"""
需要国外ip代理池
"""
# -*- coding: utf-8 -*-
from lxml import etree
import re
import time
from loguru import logger
import datetime
import traceback
import threading
import sys
import os
import json
from queue import Queue
from fake_useragent import UserAgent
import concurrent.futures
from src.crawler.yml.YML_verify import get_verify
from src.service.requests_middler import requests_sys
from src.service.requests_middler import proxy_requests_sys
from src.service.requests_middler import insert_shipping_info
from src.service.requests_middler import read_excel
from src.service.requests_middler import read_transport_json
from src.service.requests_middler import get_proxies

ip_queue = Queue()
current_working_directory = os.getcwd()
ua = UserAgent()
random_user_agent = ua.random
stop_threading_event = threading.Event()

total_requests = 0
loss_requests = 0


def proxies_queue():
    # 获取代理并放入队列
    if ip_queue.empty():
        proxy = get_proxies()
        # proxy = get_abrod_proxies()
        logger.info(f"获取ip代理成功: {proxy}")
        if proxy:
            ip_queue.put(proxy)
        else:
            logger.error(f"获取ip代理失败")
    else:
        proxy = ip_queue.get()
        return proxy

def get_point(query, code):
    global total_requests
    global loss_requests
    headers = {
        "Accept": "application/json, text/javascript, */*; q=0.01",
        "Accept-Language": "zh-CN,zh;q=0.9",
        "Cache-Control": "no-cache",
        "Connection": "keep-alive",
        "Pragma": "no-cache",
        "Referer": "https://www.yangming.com/e-service/schedule/P-to-PSearch.aspx",
        "Sec-Fetch-Dest": "empty",
        "Sec-Fetch-Mode": "cors",
        "Sec-Fetch-Site": "same-origin",
        "User-Agent": random_user_agent,
        "X-Requested-With": "XMLHttpRequest",
        "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\""
    }
    url = "https://www.yangming.com/e-service/schedule/PointToPoint_CountryLocationList.ashx"
    params = {
        "q": query.upper(),
        "limit": "99999",
        "timestamp": str(int(time.time() * 1000)),
        "p_Type": "F",
        "p_floc": ""
    }
    # proxies = proxies_queue()
    res = requests_sys(url, headers=headers, params=params, GET=True)
    # res = proxy_requests_sys(url, headers=headers, params=params, GET=True, proxies=proxies)
    if res:
        js_data = res.json()
        # ip_queue.put(proxies)
        if js_data:
            for info in js_data:
                if info.get("LOC_CD").upper() == code.upper():
                    return info['LOC_NAME']
            logger.error(f"{query}: 未查询到编码")
            return None
        else:
            logger.error(f"{query}: 未有相关数据")
            return None
    elif res == 0:
        return None

def get_ContentPlaceHolder1_hidST(hiddate_Start):
    global total_requests
    global loss_requests
    headers = {
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "Accept-Language": "zh-CN,zh;q=0.9",
        "Cache-Control": "no-cache",
        "Connection": "keep-alive",
        "Content-Type": "application/x-www-form-urlencoded",
        "Origin": "https://www.yangming.com",
        "Pragma": "no-cache",
        "Referer": "https://www.yangming.com/e-service/schedule/P-to-PSearch.aspx",
        "Sec-Fetch-Dest": "document",
        "Sec-Fetch-Mode": "navigate",
        "Sec-Fetch-Site": "same-origin",
        "Sec-Fetch-User": "?1",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": random_user_agent,
        "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\""
    }
    url = "https://www.yangming.com/e-service/schedule/PointToPointResult.aspx"
    data = {
        "__EVENTTARGET": "btnSearch0",
        "__EVENTARGUMENT": "",
        "__VIEWSTATE": "zaGqsZ8bCvh7kJqHNZNVuQSxNuEpD+YeRdpIWvp7toYwTGD6Sz6MKB17pn/t13CljAGbOQmy5au6I2FkFVeu8c9c6b5syYS8Meg2Ec+k+b9ftFmONTaLCrNUtuaDQ1g6dvvZNPmPUcYKCf4RFKnNZYd3ujpiHG9jc9j/DPJYircu9FyVpOBk4F4oSJkP1WDdyedQ4qzyZ//nxXd0XuINh3n32TPmUUHUC5IE2YG4jMJfsu0Tp4voJ1oDOgcOKzdJjzap99JIqGs3xqMwkBaB0fcmqOeQP6rrKXI7LLjXEiN7b+19QNMO2UEPndv23sXi",
        "__VIEWSTATEGENERATOR": "894CB138",
        "__VIEWSTATEENCRYPTED": "",
        "__PREVIOUSPAGE": "-VTgQFuWNhddfTTYCzVqH47RFTlzqOBKlROMWtADwuCZj-B7mhqv5pFe-Kj1OdSWf39WmU4FsCP4-NHABD2PX4fhFDsMlpkRQIUNqp0VE9X5qb0hj7JI_6KSwJQbJEzS0",
        "__EVENTVALIDATION": "hRGW99ayFpc1yDRs0yeX7DAsTsL08zTKfOtvXwzSHEcuWRYHAngYJQdNr3DkzsJKYdgrtGennz1AhfmGNIjsGX/8zSehEpC0K8D1h3qprof4nPmoEgOeR8Augz50Kjvhec6M/B+VBVqd3Wsxy/brTAjlQH8XK/9SQV9ViWS955zLloV251aX44oZCkbAKrcWDWB40a/zUj0QeRn9KugUI9XcxhSme4K8mCVBqHtW+GcZqHkWDZVjNhxosUaBB6isMBPGT+SsIAYftZWXSOYzkWD66rYR7xVKX7xDBjDO2YALExLRZiQKaiqPDJ7Ow2W3eR4GhfvRfvpfFu3GkcjoLCOPyr+qc/48MnGdFAfZ7+g7qg4/GUntKRKfHzQteb+rl4NlorpRmiyWCDC3blv9hUoWDAaCT5XmFthaTRyZEFWb85tHBvy5xb8cNSc2ytHDIINJMmDgIYKHbj8wQVww0z5w4PP+zivlAn2dMDsZDmCiHZvd1aSR9XKYTQs7gZPyaT63AtHyYfKsyjnnkGIrEi50qxxhCmq6A306hlBkuRUV5t/jEg8WJGrDmtY1demE",
        "txtFrom": "SHANGHAI, SH (CNSHA)",
        "hidFrom": "CNSHA",
        "hidFrom_O": "SHANGHAI, SH (CNSHA)",
        "hidFrom_txt": "shanghai",
        "radServiceTerm1": "Y",
        "txtTo": "LOS ANGELES, CA (USLAX)",
        "hidTo": "USLAX",
        "hidTo_txt": "Los Angeles",
        "radServiceTerm2": "Y",
        "hiddate_Start": f"{hiddate_Start}/01",
        "hiddate_End": f"{hiddate_Start}/30",
        "hidFromHome": "Y",
        "hidPriorityType": "ALL",
        "hidPeriodType": "DEP",
        "hidFromType": "Input",
        "hidToType": "Input"
    }
    retry_count = 0
    while retry_count < 5:
        proxies = proxies_queue()
        # res = requests_sys(url, headers=headers, data=data, POST=True)
        res = proxy_requests_sys(url, headers=headers, data=data, POST=True, proxies=proxies)
        if res:
            if "500Error" in res.text:
                logger.error(f"获取st失败")
                retry_count += 1
                continue
            ip_queue.put(proxies)
            if re.findall('id="ContentPlaceHolder1_hidST" value="(.*?)"', res.text):
                ContentPlaceHolder1_hidST = re.findall('id="ContentPlaceHolder1_hidST" value="(.*?)"', res.text)[0]
                return ContentPlaceHolder1_hidST
            elif "input below code to make sure you're not a robot" in res.text:
                logger.error(f"触发验证码")
                res = get_verify(proxies=proxies)
                retry_count += 1
                continue
            else:
                retry_count += 1
                logger.error(f"获取 ContentPlaceHolder1_hidST 失败")
        elif res == 0:
            logger.error(f"获取 ContentPlaceHolder1_hidST 请求超时")
            retry_count += 1
            continue
        else:
            logger.error(f"获取 ContentPlaceHolder1_hidST 失败 -----> ")
            retry_count += 1
            continue
    return None

def get_cookies():
    headers = {
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
        "Cache-Control": "no-cache",
        "Connection": "keep-alive",
        "Pragma": "no-cache",
        "Sec-Fetch-Dest": "document",
        "Sec-Fetch-Mode": "navigate",
        "Sec-Fetch-Site": "none",
        "Sec-Fetch-User": "?1",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0",
        "sec-ch-ua": "\"Microsoft Edge\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\""
    }
    url = "https://www.yangming.com/"
    retry_count = 0
    while retry_count < 3:
        res = requests_sys(url, headers=headers, GET=True)
        if res:
            return res.cookies
        else:
            retry_count += 1
            continue
    return None

def yangming_transport(origin_city, destination_city):
    global total_requests
    global loss_requests
    port_mapping = {}
    # file_path = "../maeu_ports.json"
    # file_path = "maeu_ports.json"
    # file_path = os.path.join(current_working_directory + r'/src/crawler', file_path)
    file_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'maeu_ports.json')
    if os.path.exists(file_path):
        with open(file_path, "r") as file:
            Menu_json = json.load(file)
    else:
        logger.error(f"不存在文件: {file_path}")
        sys.exit()
    for info in Menu_json['ports']:
        port_mapping[info['portName'].upper()] = {
            'portCode': info.get('portCode'),
            'unLocationCode': info['unLocationCode'],
            'regionCode': info.get('regionCode')
        }
    origin_info = port_mapping.get(origin_city.upper())
    destination_info = port_mapping.get(destination_city.upper())
    if origin_info and destination_info:
        total_requests += 1
        mapping = {}
        origin_unLocationCode = origin_info['unLocationCode']
        if origin_city.upper() == "NINGBO":
            origin_unLocationCode = "CNNGB"
        elif origin_city.upper() == "TIANJIN":
            origin_unLocationCode = "CNXGG"
            origin_city = "xingan"
        des_unLocationCode = destination_info['unLocationCode']
        regionCode = destination_info.get('regionCode', None)
        # file_path = 'yangming_query.json'
        # file_path = os.path.join(current_working_directory + '/src/crawler/yml', file_path)
        file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'yangming_query.json')
        if os.path.exists(file_path):
            with open(file_path, 'r') as f:
                mapping = json.load(f)

        # if origin_city not in mapping:
        #     origin_city_query = get_point(origin_city, origin_unLocationCode)
        #     if origin_city_query is None:
        #         loss_requests += 1
        #         return
        #     mapping[origin_city] = origin_city_query
        # if destination_city not in mapping:
        #     destination_city_query = get_point(destination_city, des_unLocationCode)
        #     if destination_city_query is None:
        #         loss_requests += 1
        #         read_transport_json(transport='YML', origin_city=origin_city, des_city=destination_city,
        #                             fail_result="未查询到编码")
        #         return
        #     mapping[destination_city] = destination_city_query

        with open(file_path, 'w') as f:
            json.dump(mapping, f, ensure_ascii=False, indent=4)
        # cookies = {}
        # cookies.update(get_cookies())
        retry_count = 0
        while retry_count < 3:
            hiddate_Start = datetime.datetime.now().strftime("%Y/%m")
            ContentPlaceHolder1_hidST = get_ContentPlaceHolder1_hidST(hiddate_Start)
            if not ContentPlaceHolder1_hidST:
                loss_requests += 1
                retry_count += 1
                continue
            headers = {
                "Host": "www.yangming.com",
                "Pragma": "no-cache",
                "Cache-Control": "no-cache",
                "sec-ch-ua-platform": "\"Windows\"",
                "X-Requested-With": "XMLHttpRequest",
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0",
                "Accept": "application/json, text/javascript, */*; q=0.01",
                "sec-ch-ua": "\"Microsoft Edge\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
                "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
                "sec-ch-ua-mobile": "?0",
                "Origin": "https://www.yangming.com",
                "Sec-Fetch-Site": "same-origin",
                "Sec-Fetch-Mode": "cors",
                "Sec-Fetch-Dest": "empty",
                "Referer": "https://www.yangming.com/e-service/schedule/PointToPointResult.aspx",
                "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6"
            }
            url = "https://www.yangming.com/e-service/schedule/VesselRouting.ashx"
            current_date = datetime.datetime.now()
            StartDate = current_date.strftime('%Y%m%d')
            # EndDate = current_date.strftime('%Y%m')
            date = datetime.datetime.now()
            toDate = (date + datetime.timedelta(days=27)).strftime('%Y%m%d')
            data = {
                "RoutingType": "BY_DATE",
                "From": origin_unLocationCode,
                "To": des_unLocationCode,
                "PriorityType": "ALL",
                "PeriodType": "DEP",
                "StartDate": StartDate,
                # "EndDate": f"{EndDate}30",
                "EndDate": toDate,
                "VoyageCode": "",
                "CaseID": "",
                "FromTerm": "Y",
                "ToTerm": "Y",
                "fromHome": "Y",
                "FromTxt": origin_city,
                "ToTxt": destination_city,
                "Version": "www.yangming.com",
                "ST": ContentPlaceHolder1_hidST
            }
            proxies = proxies_queue()
            # res = requests_sys(url, headers=headers, data=data, POST=True)
            res = proxy_requests_sys(url, headers=headers, data=data, POST=True, proxies=proxies)
            if res:
                # cookies.update(res.cookies)
                logger.info(f"当前查询 起点:{origin_city}：{origin_unLocationCode} ----> 终点: {destination_city}： {des_unLocationCode}")
                logger.info(f"当前请求总数: {total_requests}, 失败请求总数: {loss_requests}")
                try:
                    js_data = json.loads(res.text)
                    inner_json_str = json.loads(js_data)
                except Exception as e:
                    logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 解析html页面异常")
                    read_transport_json(transport='YML', origin_city=origin_city, des_city=destination_city,
                                   fail_result="解析html页面异常")
                    loss_requests += 1
                    return
                if inner_json_str.get("result"):
                    html = etree.HTML(inner_json_str['result'])
                    dep_date_list = html.xpath("//table/tbody/tr/td[2]/span[2]/text()")
                    if not dep_date_list:
                        retry_count += 1
                        logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 第{retry_count}次 重新查询中")
                        continue
                    # ip_queue.put(proxies)
                    index = 1
                    for num in range(len(dep_date_list)):
                        item = {}
                        dep_date = html.xpath(f"//table/tbody/tr[{index}]/td[2]/span[2]/text()")[0] if html.xpath(f"//table/tbody/tr[{index}]/td[2]/span[2]/text()") else None
                        arr_date = html.xpath(f"//table/tbody/tr[{index}]/td[4]/span[2]/text()")[0] if html.xpath(f"//table/tbody/tr[{index}]/td[4]/span[2]/text()") else None
                        arr_place = html.xpath(f"//table/tbody/tr[{index}]/td[4]/span[1]/text()")[0] if html.xpath(f"//table/tbody/tr[{index}]/td[4]/span[1]/text()") else None
                        ship_name = html.xpath(f"//table/tbody/tr[{index}]/td[7]/a/text()")[0] if html.xpath(f"//table/tbody/tr[{index}]/td[7]/a/text()") else None
                        ship_code = html.xpath(f"//table/tbody/tr[{index}]/td[6]/text()")[0] if html.xpath(f"//table/tbody/tr[{index}]/td[6]/text()") else None
                        transit = html.xpath(f"//table/tbody/tr[{index}]/td[5]/text()")[0] if html.xpath(f"//table/tbody/tr[{index}]/td[5]/text()") else None
                        if "XINGAN" == origin_city.upper():
                            item['origin_code'] = f"TIANJIN(CNTJN)"
                        else:
                            item['origin_code'] = f"{origin_city}({origin_info['unLocationCode']})"
                        if regionCode:
                            item['destination_code'] = f"{destination_city}, {regionCode}({des_unLocationCode})"
                        else:
                            item['destination_code'] = f"{destination_city}({des_unLocationCode})"
                        # item['destination_code'] = f"{arr_place.strip()}({port_mapping.get(arr_place.upper())['unLocationCode']})"
                        item['depart_date'] = datetime.datetime.strptime(dep_date, '%Y/%m/%d').strftime('%Y-%m-%d')
                        item['arrive_date'] = datetime.datetime.strptime(arr_date, '%Y/%m/%d').strftime('%Y-%m-%d')
                        item['transport_contractor'] = "YML"
                        item['haulage_time'] = transit
                        item['ship_name'] = ship_name
                        item['voyage_number'] = ship_code
                        item['shipping_lanes'] = ship_code[:3] # 航线编号
                        sys_index = 0
                        if destination_city.upper() != arr_place.strip().upper():
                            item['transfer_code'] = f"{arr_place.strip()}({port_mapping.get(arr_place.upper())['unLocationCode']})"
                        else:
                            while True:
                                sys_des_mode = html.xpath(f"//table/tbody/tr[{index + 1}]/td/div/table[1]//tr[last() - {sys_index}]/td[8]/div/text()")[0]
                                if sys_des_mode == "VESSEL":
                                    break
                                sys_index += 1
                            sys_des_city = html.xpath(f"//table/tbody/tr[{index + 1}]/td/div/table[1]//tr[last() - {sys_index}]/td[3]/div/text()")[0]
                            if destination_city.upper() != sys_des_city.strip().upper():
                                item['transfer_code'] = f"{sys_des_city.strip()}({port_mapping.get(sys_des_city.upper())['unLocationCode']})"
                        index += 2
                        if item.get('transfer_code'):
                            if not port_mapping.get(item['transfer_code'].split('(')[0].upper()):
                                del item['transfer_code']
                        # print(item)
                        insert_shipping_info(item)
                else:
                    loss_requests += 1
                    logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 未查询到数据")
                    read_transport_json(transport='YML', origin_city=origin_city, des_city=destination_city,
                                   fail_result="未查询到数据")
                    retry_count += 1
                break
            elif res == 0:
                logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 未查询到数据, 请求超时")
                retry_count += 1
        if retry_count == 3:
            logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 或存在ip请求异常或服务器异常")
            read_transport_json(transport='YML', origin_city=origin_city, des_city=destination_city,
                           fail_result="或存在ip请求异常或服务器异常")
#YML
def process_route(origin_city, destination_list):
    for destination_city in destination_list:
        yangming_transport(origin_city, destination_city)

def yangming_spider_run():
    global total_requests
    global loss_requests
    # origin_city_list = [
    #     "Shanghai", "XIAMEN", "NANSHA", "QINGDAO", "YANTIAN", "NINGBO"
    # ]
    #
    # destination_list = [
    #     "Los Angeles", "Long Beach", "Atlanta", "Baltimore", "Boston", "Buffalo", "Calgary", "Charleston",
    #     "Chicago", "Chippewa Falls", "Cincinnati", "Cleveland", "Dallas", "Denver",
    #     "Detroit", "Edmonton", "EL Paso", "Halifax", "Houston", "Jacksonville",
    #     "Kansas City", "Louisville", "Memphis", "Miami", "Minneapolis", "Montreal",
    #     "Nashville", "New Orleans", "New York", "Norfolk", "Oakland", "Omaha",
    #     "Philadelphia", "Pittsburgh", "Portland", "Prince Rupert", "Saint Louis",
    #     "Salt Lake City", "San Antonio", "Saskatoon", "Savannah", "Seattle",
    #     "Tacoma", "Tampa", "Toronto", "Vancouver", "Winnipeg"
    # ]



    # with concurrent.futures.ThreadPoolExecutor() as executor:
    #     futures = [executor.submit(process_route, origin_city, destination_list) for origin_city in origin_city_list]

    json_path_one = r"origin_and_destination.json"
    json_path_one = r"origin_and_destination - 副本.json"
    json_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), json_path_one)
    with open(f'{json_path}', 'r', encoding='utf-8') as f:
        tmp = json.loads(f.read())

    origin_city_list = tmp['origin_city_list']
    destination_list = tmp['destination_list']
    del tmp, json_path, json_path_one

    with concurrent.futures.ThreadPoolExecutor() as executor:
        futures = []
        for origin_city in origin_city_list:
            future = executor.submit(process_route, origin_city, destination_list)
            futures.append(future)
            time.sleep(1)

        for future in concurrent.futures.as_completed(futures):
            try:
                future.result()
            except Exception as e:
                # 输出异常详细信息，包括出错的行号
                print(f"An error occurred: {e}")
                print(traceback.format_exc())

    read_excel(transport="YML", total_requests=total_requests, loss_requests=loss_requests)

if __name__ == '__main__':
    yangming_spider_run()