# -*- coding: utf-8 -*-
import requests
from lxml import etree
import re
import time
from loguru import logger
import random
import datetime
import execjs
from bs4 import BeautifulSoup
import traceback
import threading
import sys
import os
import json
from queue import Queue, Empty
from fake_useragent import UserAgent
import concurrent.futures
from src.service.requests_middler import requests_sys
from src.service.requests_middler import proxy_requests_sys
from src.service.requests_middler import insert_shipping_info
from src.service.requests_middler import read_excel
from src.service.requests_middler import get_proxies
from src.service.requests_middler import read_transport_json
ip_queue = Queue()

total_requests = 0
loss_requests = 0

ua = UserAgent()
random_user_agent = ua.random
stop_threading_event = threading.Event()
current_working_directory = os.getcwd()

def proxies_queue():
    # 获取代理并放入队列
    if ip_queue.empty():
        proxy = get_proxies()
        logger.info(f"获取ip代理成功: {proxy}")
        if proxy:
            ip_queue.put(proxy)
        else:
            logger.error(f"获取ip代理失败")
    else:
        proxy = ip_queue.get()
        return proxy


def get_matson_options():
    global total_requests
    global loss_requests
    headers = {
        "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "accept-language": "zh-CN,zh;q=0.9",
        "cache-control": "no-cache",
        "pragma": "no-cache",
        "priority": "u=0, i",
        "referer": "https://www.matson.com/matnav/index.html",
        "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "document",
        "sec-fetch-mode": "navigate",
        "sec-fetch-site": "same-origin",
        "sec-fetch-user": "?1",
        "upgrade-insecure-requests": "1",
        "user-agent": random_user_agent
    }
    url = "https://www.matson.com/matnav/schedules/interactive_vessel_schedule.html"
    res = requests_sys(url, headers=headers, GET=True)
    if res:
        soup = BeautifulSoup(res.content, 'html.parser')

        options = soup.select('div select option')
        # for option in options[1:-1]:
        #     options_data = {option['value']: option.text}
        #     print(options_data)
        options_data = {option.text: option['value'] for option in options[1:-1]}
        # file_path = 'matson_options.json'
        # file_path = os.path.join(current_working_directory + r'/src/crawler/maston', file_path)
        file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'matson_options.json')
        with open(file_path, 'w', encoding='utf-8') as json_file:
            json.dump(options_data, json_file, ensure_ascii=False, indent=4)
    elif res == 0:
        loss_requests += 1

def check_transfer(selectedOrigin, selectedDestination, value):
    headers = {
        "accept": "*/*",
        "accept-language": "zh-CN,zh;q=0.9",
        "cache-control": "no-cache",
        "content-type": "application/x-www-form-urlencoded; charset=UTF-8",
        "origin": "https://www.matson.com",
        "pragma": "no-cache",
        "priority": "u=1, i",
        "referer": "https://www.matson.com/matnav/schedules/interactive_vessel_schedule.html",
        "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\"",
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-origin",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
        "x-requested-with": "XMLHttpRequest"
    }
    url = "https://www.matson.com/wp-content/plugins/matson-plugin/Api_calls/details.php"
    data = {
        "selectedOrigin": selectedOrigin,
        "selectedDestination": selectedDestination,
        "value": value
    }
    res = requests_sys(url, headers=headers, data=data, POST=True)
    if res:
        js_data = res.json()
        for info in js_data['transportaionList']:
            if "Rail" in info:
                return js_data

def matson_transport(origin_city, destination_city):
    try:
        global total_requests
        global loss_requests
        mapping = {}
        # file_path = "matson_options.json"
        # file_path = os.path.join(current_working_directory + r'/src/crawler/maston', file_path)
        file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'matson_options.json')
        if os.path.exists(file_path):
            with open(file_path, "r") as file:
                options_json = json.load(file)
        else:
            logger.error(f"不存在文件: {file_path}")
            sys.exit()

        origin_label = options_json.get(origin_city.upper())
        total_requests += 1
        if origin_label:
            port_mapping = {}
            # file_path = r'/src/crawler' + "/maeu_ports.json"
            # file_path = current_working_directory + file_path
            file_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'maeu_ports.json')
            if os.path.exists(file_path):
                with open(file_path, "r") as file:
                    Menu_json = json.load(file)
            for info in Menu_json['ports']:
                port_mapping[info['portName'].upper()] = {
                    'portCode': info.get('portCode'),
                    'unLocationCode': info['unLocationCode'],
                    'regionCode' : info.get('regionCode') # 州代码
                }
            origin_info = port_mapping.get(origin_city.upper())
            destination_info = port_mapping.get(destination_city.upper())
            if origin_info and destination_info:
                origin_unLocationCode = origin_info['unLocationCode']
                des_unLocationCode = destination_info['unLocationCode']
                # 地区编码
                regionCode = destination_info.get('regionCode')
                headers = {
                    "accept": "*/*",
                    "accept-language": "zh-CN,zh;q=0.9",
                    "cache-control": "no-cache",
                    "content-type": "application/x-www-form-urlencoded; charset=UTF-8",
                    "origin": "https://www.matson.com",
                    "pragma": "no-cache",
                    "priority": "u=1, i",
                    "referer": "https://www.matson.com/matnav/schedules/interactive_vessel_schedule.html",
                    "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
                    "sec-ch-ua-mobile": "?0",
                    "sec-ch-ua-platform": "\"Windows\"",
                    "sec-fetch-dest": "empty",
                    "sec-fetch-mode": "cors",
                    "sec-fetch-site": "same-origin",
                    "user-agent": random_user_agent,
                    "x-requested-with": "XMLHttpRequest"
                }
                url = "https://www.matson.com/wp-content/plugins/matson-plugin/Api_calls/destinations.php"
                data = {
                    "origin": origin_label
                }
                retry_count = 0
                while retry_count < 3:
                    proxies = proxies_queue()
                    # res = requests_sys(url, headers=headers, data=data, POST=True)
                    res = proxy_requests_sys(url, headers=headers, data=data, POST=True, proxies=proxies)
                    if res:
                        js_data = res.json()
                        mapping[origin_label] = None
                        for info in js_data:
                            if info['locationName'].upper() == destination_city.upper():
                                destination_label = info['locationCode']
                                mapping[origin_label] = destination_label
                                break
                        ip_queue.put(proxies)
                        if mapping.get(origin_label):
                            headers = {
                                "accept": "*/*",
                                "accept-language": "zh-CN,zh;q=0.9",
                                "cache-control": "no-cache",
                                "content-type": "application/x-www-form-urlencoded; charset=UTF-8",
                                "origin": "https://www.matson.com",
                                "pragma": "no-cache",
                                "priority": "u=1, i",
                                "referer": "https://www.matson.com/matnav/schedules/interactive_vessel_schedule.html",
                                "sec-ch-ua": "\"Google Chrome\";v=\"129\", \"Not=A?Brand\";v=\"8\", \"Chromium\";v=\"129\"",
                                "sec-ch-ua-mobile": "?0",
                                "sec-ch-ua-platform": "\"Windows\"",
                                "sec-fetch-dest": "empty",
                                "sec-fetch-mode": "cors",
                                "sec-fetch-site": "same-origin",
                                "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
                                "x-requested-with": "XMLHttpRequest"
                            }
                            url = "https://www.matson.com/wp-content/plugins/matson-plugin/Api_calls/search.php"
                            current_date = datetime.datetime.now()
                            month_date = current_date.strftime("%m")
                            year_date = current_date.strftime("%Y")
                            date = datetime.datetime.now()
                            toDate = (date + datetime.timedelta(days=27)).strftime('%m%d%Y')
                            date = date.strftime('%m%d%Y')
                            data = {
                                "selectedOrigin": list(mapping.keys())[0],
                                "selectedDestination": mapping[origin_label],
                                # "selectedStartDate": f"{month_date}01{year_date}",
                                "selectedStartDate": date,
                                # "selectedEndDate": f"{month_date}30{year_date}",
                                "selectedEndDate": toDate
                            }
                            proxies = proxies_queue()
                            # res = requests_sys(url, headers=headers, data=data, POST=True)
                            res = proxy_requests_sys(url, headers=headers, data=data, POST=True, proxies=proxies)
                            if res:
                                ip_queue.put(proxies)
                                js_data = res.json()
                                logger.info(f"当前查询 起点:{origin_city} ----> 终点: {destination_city}")
                                logger.info(f"当前请求总数: {total_requests}, 失败请求总数: {loss_requests}")
                                for info in js_data:
                                    vvd = info['vvd']
                                    # 返回中转数据json
                                    sys_res = check_transfer(list(mapping.keys())[0], mapping[origin_label], vvd)
                                    item = {}
                                    if sys_res:
                                        logger.info(f'{vvd}: {origin_city} ---> {destination_city} 中转站数据')
                                        # print(sys_res)
                                        # print(sys_res.get('toLocationList'))
                                        if len(sys_res.get('toLocationList')) > 1:
                                            item['transfer_code'] = sys_res.get('toLocationList')[-2]
                                            transfer_code_info = port_mapping.get(item['transfer_code'].upper())['unLocationCode']
                                            item['transfer_code'] = f"{item['transfer_code']}({transfer_code_info})"
                                            # 最终到达时间
                                            item['arrive_date'] = datetime.datetime.strptime(sys_res.get('arrivalList')[1], '%m-%d-%Y').strftime("%Y-%m-%d")
                                    else:
                                        # 到达时间
                                        item['arrive_date'] = datetime.datetime.strptime(info["arrive"],
                                                                                         "%A %m/%d/%Y %H:%M").date().strftime(
                                            "%Y-%m-%d")
                                        item['transfer_code'] = None
                                    item['origin_code'] = f"{origin_city}({origin_unLocationCode})"
                                    # 目的地
                                    item['destination_code'] = f"{destination_city},{regionCode}({des_unLocationCode})"  # check
                                    item['depart_date'] = datetime.datetime.strptime(info["depart"], "%A %m/%d/%Y %H:%M").date().strftime("%Y-%m-%d")
                                    # item['arrive_date'] = datetime.datetime.strptime(info["arrive"],"%A %m/%d/%Y %H:%M").date().strftime("%Y-%m-%d")
                                    item['transport_contractor'] = "Matson"
                                    item['haulage_time'] = info['ttranstime'].split(" ")[0]
                                    item['ship_name'] = info['vessvoy'].split("*")[0]
                                    item['voyage_number'] = info['vessvoy'].split("*")[1] + info['vessvoy'].split("*")[2]
                                    if item['transfer_code'] is None:
                                        item['transfer_code'] = None
                                    print(item)
                                    insert_shipping_info(item)
                                break
                            elif res == 0:
                                logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 请求超时")
                                retry_count += 1
                        else:
                            logger.error(f"{origin_city}: {destination_city} 未获取到相应标签")
                            read_transport_json(transport='Matson', origin_city=origin_city, des_city=destination_city,
                                           fail_result="未获取到相应标签")
                            loss_requests += 1
                            break
                    elif res is None:
                        logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 获取数据失败")
                        read_transport_json(transport='Matson', origin_city=origin_city, des_city=destination_city, fail_result="获取数据失败")
                        loss_requests += 1
                        retry_count += 1
                    elif res == 0:
                        logger.info(f"起点:{origin_city} ----> 终点: {destination_city} 请求超时")
                        retry_count += 1
                if retry_count == 3:
                    loss_requests += 1
                    read_transport_json(transport='Matson', origin_city=origin_city, des_city=destination_city,
                                        fail_result="请求超时")
        else:
            logger.error(f"{origin_city}: 未获取到相应标签")
            read_transport_json(transport='Matson', origin_city=origin_city, des_city=destination_city, fail_result=f"{origin_city}:未获取到相应标签")
            loss_requests += 1
    except Exception as e:
        logger.error(f"{origin_city} ----> {destination_city}: {e}")
        print(traceback.format_exc())


def process_route(origin_city, destination_list):
    for destination_city in destination_list:
        matson_transport(origin_city, destination_city)

def matson_spider_run():
    global total_requests
    global loss_requests
    # origin_city_list = [
    #     "Shanghai", "XIAMEN", "NANSHA", "QINGDAO", "YANTIAN", "NINGBO"
    # ]
    #
    # destination_list = [
    #     "Los Angeles", "Long Beach", "Atlanta", "Baltimore", "Boston", "Buffalo", "Calgary", "Charleston",
    #     "Chicago", "Chippewa Falls", "Cincinnati", "Cleveland", "Dallas", "Denver",
    #     "Detroit", "Edmonton", "EI Paso", "Halifax", "Houston", "Jacksonville",
    #     "Kansas City", "Louisville", "Memphis", "Miami", "Minneapolis", "Montreal",
    #     "Nashville", "New Orleans", "New York", "Norfolk", "Oakland", "Omaha",
    #     "Philadelphia", "Pittsburgh", "Portland", "Prince Rupert", "Saint Louis",
    #     "Salt Lake City", "San Antonio", "Saskatoon", "Savannah", "Seattle",
    #     "Tacoma", "Tampa", "Toronto", "Vancouver", "Winnipeg"
    # ]


    # with concurrent.futures.ThreadPoolExecutor() as executor:
    #     futures = [executor.submit(process_route, origin_city, destination_list) for origin_city in origin_city_list]
    json_path_one = r"origin_and_destination.json"
    # json_path_one = r"origin_and_destination - 副本.json"
    # current_working_directory_one = os.getcwd() + "/src/crawler"
    json_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), json_path_one)
    with open(f'{json_path}', 'r', encoding='utf-8') as f:
        tmp = json.loads(f.read())

    origin_city_list = tmp['origin_city_list']
    destination_list = tmp['destination_list']
    del tmp, json_path
    with concurrent.futures.ThreadPoolExecutor() as executor:
        futures = []
        for origin_city in origin_city_list:
            future = executor.submit(process_route, origin_city, destination_list)
            futures.append(future)
            time.sleep(1)
        
        for future in concurrent.futures.as_completed(futures):
            try:
                future.result()
            except Exception as e:
                # 输出异常详细信息，包括出错的行号
                print(f"An error occurred: {e}")
                print(traceback.format_exc())

    read_excel(transport="Matson", total_requests=total_requests, loss_requests=loss_requests)

if __name__ == '__main__':
    matson_spider_run()
