import requests
from loguru import logger
import time
import json
from sqlalchemy import create_engine, exists, select, func
import datetime
from src.components.mysql import get_mysql
import random
import os
from src.service.models.website_shipping import WebsiteShipping
import pandas as pd
from curl_cffi import requests as curl_requests
import openpyxl
from openpyxl import load_workbook
from src.common.logger import get_logger
import platform

logger_app = get_logger(__name__)

current_working_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'crawler')
file_path = 'shipping_counter.xlsx'
file_path = os.path.join(current_working_directory, file_path)


def create_excel_file():
    # 创建一个新的Excel文件
    data = {
        'transport_contractor': [
            'EMC', 'COSCO', 'OOCL', 'CMA', 'Matson',
            'Maersk', 'ONE', 'Hapag-Lloyd', 'YML', 'HMM', 'MSC', 'ZIM'
        ],
        'Total requests': [''] * 11,
        'Loss Times': [''] * 11,
        'Loss rate': [''] * 11
    }

    df = pd.DataFrame(data)
    # 保存为Excel文件
    df.to_excel(file_path, index=False)


def read_excel(transport=None, total_requests=None, loss_requests=None):
    # 检查Excel文件是否存在，如果不存在则创建
    if not os.path.exists(file_path):
        logger.error(f"文件 {file_path} 不存在，正在创建...")
        create_excel_file()

    # 读取Excel文件
    try:
        print(file_path)
        df = pd.read_excel(file_path)

    except Exception as e:
        logger.error(f"读取Excel文件时出错: {e}")
        return

    # 查找对应的运输承包商
    emc_index = df.index[df['transport_contractor'] == transport].tolist()
    if emc_index:
        # 更新数据
        df.at[emc_index[0], 'Total requests'] = total_requests
        df.at[emc_index[0], 'Loss Times'] = loss_requests
        per_num = (loss_requests / total_requests) * 100 if total_requests > 0 else 0
        df.at[emc_index[0], 'Loss rate'] = f"{per_num:.2f}%"

        # 保存更新后的DataFrame回Excel
        try:
            df.to_excel(file_path, index=False)
        except Exception as e:
            logger.error(f"保存工作簿时出错: {e}")
    else:
        logger.error(f"未找到 {transport} 行")


def read_transport_json(transport=None, origin_city=None, des_city=None, fail_result=None, spider_time=None):
    file_path = f'log_json/{transport}_shipping_counter.json'
    file_path = os.path.join(current_working_directory, file_path)
    # file_path = os.path.normpath(os.path.join(current_working_directory, 'log_json', f'{transport}_shipping_counter.json'))

    # 如果文件不存在，创建一个新的 JSON 文件
    if not os.path.exists(file_path):
        logger.info(f"JSON file '{file_path}' does not exist. Creating a new one.")
        with open(file_path, 'w', encoding='utf-8') as f:
            json.dump([], f)

    # 读取现有的 JSON 文件
    with open(file_path, 'r', encoding='utf-8') as f:
        data = json.load(f)

    # 检查是否已经存在重复数据
    for entry in data:
        existing_origin = entry.get('origin_city')
        existing_dest = entry.get('des_city')

        if existing_origin == origin_city and existing_dest == des_city:
            logger.info(f"Duplicate entry found: {origin_city} -----> {des_city}. Entry not added.")
            return
    spider_time = datetime.datetime.now()
    formatted_time = spider_time.strftime("%Y-%m-%d %H:%M:%S")
    # 添加新数据
    new_entry = {
        'origin_city': origin_city,
        'des_city': des_city,
        'fail_result': fail_result,
        'spider_time': formatted_time  # 如果需要，可以取消注释
    }
    data.append(new_entry)

    # 保存更改到 JSON 文件
    with open(file_path, 'w', encoding='utf-8') as f:
        json.dump(data, f, ensure_ascii=False, indent=4)

    logger.info(f"{origin_city} -----> {des_city} 添加到 {transport} JSON 文件成功")


def sleep_time():
    time.sleep(random.randint(1, 3))


def requests_sys(target_url, headers=None, cookies=None, params=None, data=None, GET=False, POST=False):
    retry_count = 0
    while retry_count < 5:
        try:
            # 构造请求参数字典
            request_params = {
                'headers': headers,
                'cookies': cookies,
                'params': params,
                'data': data,
                'timeout': 10
            }
            # 选择请求方法
            if GET and POST:
                logger.error(f"{target_url}: 不能同时指定 GET 和 POST")
                return None
            elif GET:
                request_func = requests.get
            elif POST:
                request_func = requests.post
            else:
                logger.error(f"{target_url}: 必须指定 GET 或 POST 请求")
                return None

            # 发送请求
            res = request_func(target_url, **{key: value for key, value in request_params.items() if value is not None})

        except Exception as e:
            logger.error(f"{target_url}: 请求失败 - {e}")
            time.sleep(random.randint(1, 3))  # 随机重试时间
            retry_count += 1
            continue
        if res.status_code == 200:
            sleep_time()
            return res
        else:
            sleep_time()
            logger.info(f"{target_url}: 请求异常 {res.status_code}")
            return res
    logger.error(f"{target_url}: 请求失败次数已达到 {retry_count} 次")
    return 0


def proxy_requests_sys(target_url, headers=None, cookies=None, params=None, data=None, GET=False, POST=False,
                       proxies=None):
    retry_count = 0
    while retry_count < 3:
        try:
            # 构造请求参数
            request_params = {
                'headers': headers,
                'cookies': cookies if cookies else None,
                'params': params if params else None,
                'data': data if data else None,
                'proxies': proxies if proxies else None,
                'timeout': 10
            }
            if GET and POST:
                logger.error(f"{target_url}: 不能同时指定 GET 和 POST")
                return None
            elif GET:
                request_func = requests.get
            elif POST:
                request_func = requests.post
            else:
                logger.error(f"{target_url}: 必须指定 GET 或 POST 请求")
                return None
            # 执行请求
            res = request_func(target_url, **request_params)
        except requests.exceptions.ReadTimeout as e:
            logger.error(f"{target_url}: 发生读取超时，终止重试 - {e}")
            break
        except Exception as e:
            logger.error(f"{target_url}: 请求异常 - {e}")
            time.sleep(random.randint(1, 2))
            retry_count += 1
            continue
        if res.status_code == 200:
            sleep_time()
            return res
        else:
            logger.info(f"{target_url}: 请求异常 {res.status_code}")
            sleep_time()
            return None
    logger.error(f"{target_url}: 请求失败次数已达到 {retry_count} 次")
    return 0

def curl_requests_sys(target_url, headers=None, cookies=None, params=None, data=None, GET=False, POST=False,
                       proxies=None, impersonate=None):
    retry_count = 0
    while retry_count < 3:
        try:
            # 构造请求参数
            request_params = {
                'headers': headers,
                'cookies': cookies if cookies else None,
                'params': params if params else None,
                'data': data if data else None,
                'proxies': proxies if proxies else None,
                'impersonate': impersonate if impersonate else None,
                'timeout': 10
            }
            if GET and POST:
                logger.error(f"{target_url}: 不能同时指定 GET 和 POST")
                return None
            elif GET:
                request_func = curl_requests.get
            elif POST:
                request_func = curl_requests.post
            else:
                logger.error(f"{target_url}: 必须指定 GET 或 POST 请求")
                return None
            # 执行请求
            res = request_func(target_url, **request_params)
        except requests.exceptions.ReadTimeout as e:
            logger.error(f"{target_url}: 发生读取超时，终止重试 - {e}")
            break
        except Exception as e:
            logger.error(f"{target_url}: 请求异常 - {e}")
            time.sleep(random.randint(1, 2))
            retry_count += 1
            continue
        if res.status_code == 200:
            sleep_time()
            return res
        else:
            logger.info(f"{target_url}: 请求异常 {res.status_code}")
            sleep_time()
            return None
    logger.error(f"{target_url}: 请求失败次数已达到 {retry_count} 次")
    return 0

def insert_shipping_info(item):
    item['update_time'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    with get_mysql() as db:
        # 检查是否存在重复数据
        exists_query = db.query(
            exists().where(
                WebsiteShipping.origin_code == item['origin_code'],
                WebsiteShipping.destination_code == item['destination_code'],
                WebsiteShipping.depart_date == datetime.datetime.strptime(item['depart_date'], '%Y-%m-%d'),
                WebsiteShipping.arrive_date == datetime.datetime.strptime((datetime.datetime.strptime(
                    item['depart_date'], '%Y-%m-%d') + datetime.timedelta(days=int(item['haulage_time']))).strftime(
                    '%Y-%m-%d'), '%Y-%m-%d'),
                # WebsiteShipping.arrive_date == datetime.datetime.strptime(item['arrive_date'], '%Y-%m-%d'),
                # WebsiteShipping.shipping_lanes == item['shipping_lanes'], # 航线
                WebsiteShipping.ship_name == item['ship_name'],
                WebsiteShipping.transport_contractor == item['transport_contractor'],
                WebsiteShipping.voyage_number == item['voyage_number']
            )
        ).scalar()
        if not exists_query:
            shipping_info = WebsiteShipping(
                origin_code=item['origin_code'],
                destination_code=item['destination_code'],
                depart_date=datetime.datetime.strptime(item['depart_date'], '%Y-%m-%d'),
                # arrive_date=datetime.datetime.strptime(item['arrive_date'], '%Y-%m-%d'),
                arrive_date=datetime.datetime.strptime((datetime.datetime.strptime(item['depart_date'],
                                                                                   '%Y-%m-%d') + datetime.timedelta(
                    days=int(item['haulage_time']))).strftime('%Y-%m-%d'), '%Y-%m-%d'),
                transport_contractor=item['transport_contractor'],
                haulage_time=item['haulage_time'],
                ship_name=item['ship_name'],
                voyage_number=item['voyage_number'],
                transfer_code=item.get('transfer_code'),
                shipping_lanes=item.get('shipping_lanes'),  # 航线编号
                update_time=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
            )
            db.add(shipping_info)
            db.commit()  # 提交事务
            logger.success("数据已成功插入。")
        else:
            # 如果存在重复数据，执行更新操作
            db.query(WebsiteShipping).filter(
                WebsiteShipping.origin_code == item['origin_code'],
                WebsiteShipping.destination_code == item['destination_code'],
                WebsiteShipping.depart_date == datetime.datetime.strptime(item['depart_date'], '%Y-%m-%d'),
                # WebsiteShipping.arrive_date == datetime.datetime.strptime(item['arrive_date'], '%Y-%m-%d'),
                WebsiteShipping.ship_name == item['transport_contractor'],
                WebsiteShipping.ship_name == item['ship_name'],
                WebsiteShipping.voyage_number == item['voyage_number']

            ).update({
                WebsiteShipping.depart_date: datetime.datetime.strptime(item['depart_date'], '%Y-%m-%d'),
                # WebsiteShipping.arrive_date: datetime.datetime.strptime(item['arrive_date'], '%Y-%m-%d'),
                WebsiteShipping.arrive_date: datetime.datetime.strptime((datetime.datetime.strptime(item['depart_date'],
                                                                                                    '%Y-%m-%d') + datetime.timedelta(
                    days=int(item['haulage_time']))).strftime('%Y-%m-%d'), '%Y-%m-%d'),
                WebsiteShipping.haulage_time: item['haulage_time'],
                # WebsiteShipping.ship_name: item['ship_name'],
                # WebsiteShipping.voyage_number: item['voyage_number']
                WebsiteShipping.update_time: item['update_time']
                # 可以根据需要更新其他字段
            }, synchronize_session=False)
            db.commit()  # 提交事务
            logger.info("重复数据已更新。")


# 国内ip代理池
def get_proxies():
    ip_url = 'https://dps.kdlapi.com/api/getipbalance?secret_id=oc8ulwi8whcdbb3xavpr&signature=6m17y6vehnl2n2vkqfyx8fhgq6k1ltpw'
    ip_res = requests.get(url=ip_url)
    if ip_res.status_code == 200:
        js_data = ip_res.json()
        logger.info(f"ip剩余额度: {js_data['data']['balance']}")
        logger_app.info(f"ip剩余额度: {js_data['data']['balance']}")
    else:
        logger.error(f"获取ip代理额度api接口异常: {ip_res.status_code}")

    url = 'https://dps.kdlapi.com/api/getdps/?secret_id=oc8ulwi8whcdbb3xavpr&signature=6m17y6vehnl2n2vkqfyx8fhgq6k1ltpw&num=1&pt=1&format=json&sep=1'
    res = requests_sys(url, GET=True)
    if res:
        js_data = res.json()
        for info in js_data["data"]["proxy_list"]:
            username = "d2752102210"
            password = "xkdfnmpo"
            proxies = {
                "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": info},
                "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": info}
            }
            logger.info(f"国内ip代理获取中")
            return proxies


# 国外ip代理池
def get_abrod_proxies():
    ip_url = 'https://dev.kdlapi.com/api/getfpsflow?secret_id=obss346i951tsqhmec0o&signature=h99jel42gvijyfatj64fsdx9bnuj6n87'
    ip_res = requests.get(url=ip_url)
    if ip_res.status_code == 200:
        logger.info(f"Raw response: {ip_res.text}")
        try:
            js_data = ip_res.json()
            logger.info(f"当前剩余流量{js_data.get('data').get('flow_left')}MB")
            logger.info(f"当前剩余流量{js_data.get('data').get('flow_left')}MB")
        except ValueError:
            logger.error("Failed to parse JSON response")
    else:
        logger.error(f"获取ip代理额度api接口异常: {ip_res.status_code}")

    # 隧道域名:端口号
    tunnel = "us.o816.kdlfps.com:18866"

    # 用户名密码方式
    username = "f2787685726"
    password = "tcdhvplj"
    proxies = {
        "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": tunnel},
        "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": tunnel}
    }
    logger.info(f"国外ip代理获取中")
    return proxies


