import asyncio
import base64
import concurrent
import socket
import subprocess
import sys
import threading

import websockets
import json
import pybase64 as base64
import aiohttp
from aiohttp import ClientTimeout

from cls import SubConvert, StrText

best_nodes_detail = []
best_nodes_urls = []
SUB_URL_ARRAY1 = [
    'https://mirror.v2gh.com/https://raw.githubusercontent.com/Pawdroid/Free-servers/main/sub',
    'https://raw.githubusercontent.com/chengaopan/AutoMergePublicNodes/master/list.txt',
    'https://raw.githubusercontent.com/yebekhe/V2Hub/main/merged_base64',
    'https://raw.githubusercontent.com/soroushmirzaei/telegram-configs-collector/main/protocols/trojan',
    'https://raw.githubusercontent.com/soroushmirzaei/telegram-configs-collector/main/protocols/shadowsocks',
    'https://raw.githubusercontent.com/yebekhe/TelegramV2rayCollector/main/sub/base64/mix',
    "https://raw.githubusercontent.com/mfuu/v2ray/master/v2ray",
    "https://raw.githubusercontent.com/peasoft/NoMoreWalls/master/list_raw.txt",
    "https://raw.githubusercontent.com/ermaozi/get_subscribe/main/subscribe/v2ray.txt",
    "https://raw.githubusercontent.com/aiboboxx/v2rayfree/main/v2",
    "https://raw.githubusercontent.com/mahdibland/V2RayAggregator/master/sub/splitted/vmess.txt",
    "https://raw.githubusercontent.com/mahdibland/V2RayAggregator/master/sub/splitted/trojan.txt",
    "https://raw.githubusercontent.com/freefq/free/master/v2",
    "https://raw.githubusercontent.com/Pawdroid/Free-servers/main/sub",
    "https://raw.githubusercontent.com/w1770946466/Auto_proxy/main/Long_term_subscription1",
    "https://raw.githubusercontent.com/w1770946466/Auto_proxy/main/Long_term_subscription2",
    "https://raw.githubusercontent.com/w1770946466/Auto_proxy/main/Long_term_subscription3",
    "https://raw.githubusercontent.com/w1770946466/Auto_proxy/main/Long_term_subscription4",
    "https://raw.githubusercontent.com/w1770946466/Auto_proxy/main/Long_term_subscription5",
    "https://raw.githubusercontent.com/w1770946466/Auto_proxy/main/Long_term_subscription6",
    "https://raw.githubusercontent.com/w1770946466/Auto_proxy/main/Long_term_subscription7",
    "https://raw.githubusercontent.com/w1770946466/Auto_proxy/main/Long_term_subscription8",
    'https://raw.githubusercontent.com/MrPooyaX/VpnsFucking/main/Shenzo.txt',
    'https://raw.githubusercontent.com/MrPooyaX/SansorchiFucker/main/data.txt',
    'https://raw.githubusercontent.com/soroushmirzaei/telegram-configs-collector/main/protocols/vless',
    'https://raw.githubusercontent.com/ts-sf/fly/main/v2',
    'https://raw.githubusercontent.com/aiboboxx/v2rayfree/main/v2',
    'https://raw.githubusercontent.com/mahdibland/ShadowsocksAggregator/master/Eternity.txt',
    'https://raw.githubusercontent.com/yebekhe/TelegramV2rayCollector/main/sub/normal/mix',
    'https://raw.githubusercontent.com/sarinaesmailzadeh/V2Hub/main/merged',
    'https://raw.githubusercontent.com/freev2rayconfig/V2RAY_SUBSCRIPTION_LINK/main/v2rayconfigs.txt',
    'https://raw.githubusercontent.com/soroushmirzaei/telegram-configs-collector/main/protocols/shadowsocks',
    'https://raw.githubusercontent.com/ALIILAPRO/v2rayNG-Config/main/sub.txt',
    'https://raw.githubusercontent.com/mfuu/v2ray/master/v2ray',
    'https://raw.githubusercontent.com/resasanian/Mirza/main/sub',
    'https://raw.githubusercontent.com/soroushmirzaei/telegram-configs-collector/main/protocols/reality',

]
# 全局变量
unique_ids = set()
unique_node_names =set()
urls_list = set()
ping_count = 0
async def fetch_url(session, url):
    try:
        async with session.get(url, timeout=ClientTimeout(total=250, connect=80)) as response:
            if response.status != 200:
                print(f"[GET Code {response.status}] Download sub error on link: {url}")
                return None
            content = await response.read()
            try:
                print(f"Get node link on sub : {url} ")
                # 解密
                return base64.b64decode(content).decode('utf-8')
            except:
                print('未使用base64加密，直接添加')
                #return content.decode('utf-8')
                return content.decode('utf-8') if len(content) >10 else None
    except Exception as e:
        print(f"[Unknown Error] Download sub error on link: {url}")
        print(e)
        return None


async def select_sub_urls(urls):
    async with aiohttp.ClientSession() as session:
        tasks = [fetch_url(session, url) for url in urls]
        return [result for result in await asyncio.gather(*tasks) if result]


def port_open(ip, port):

    port = int(port)
    try:
        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        s.settimeout(1)
        s.connect((ip, port))
        s.shutdown(1)
        print(f'{ip}:{port}\033[1;32m.... is OK.\033[0m')
        return True
    except Exception as e:
        print(f'{ip}:{port}\033[1;31m.... is down!!!\033[0m')
        return False


def node_ping(node_url):

    global urls_list, unique_node_names , ping_count
    # 获取线程本地数据
    # 确保线程安全
    try:
        ping_count = ping_count + 1
        print(f"正在ping第 {ping_count} 个节点 ")
        server = ''
        port = 0
        if len(node_url) < 10:
            print(f"当前节点不满足条件： {j}")
            return ''
        j = node_url.replace('\n', '')
        if j.startswith("vmess://"):
            str1 = SubConvert.check_url_v2ray_vmess(j)
            jsonitem = base64.b64decode(str1[8:]).decode('utf-8')
            node = json.loads(jsonitem)
            server = node['add']
            port = node['port']

        elif j.startswith("vless://"):
            node = SubConvert.vless_to_clash(j)

            server = node['server']
            port = node['port']


        elif j.startswith("ss://"):
            onenode = SubConvert.url_ss_to_json(j)
            if onenode:
                node = json.loads(onenode)
                server = node['server']
                port = node['port']
            else:
                return ''
        elif j.startswith("trojan://"):
            server = StrText.get_str_btw(j, "@", ":", 0)
            port = StrText.get_str_btw(j, "@", "#", 0).split(":", 1)[1] if j.find("?") == -1 else StrText.get_str_btw(
                StrText.get_str_btw(j, "@", "#", 0), ":", "?", 0)

        elif j.startswith("ssr://"):
            onenode = base64.b64decode(j[6:]).decode('utf-8')
            node = onenode.split('/?')[0].split(':')
            server = node[0]
            port = node[1]

        else:
            return ''

        if (server+':' + str(port)) not in unique_node_names:

            unique_node_names.add(server+':' + str(port))
            if not port_open(server, port):
                print(f"[ping {server}:{port}测试结果：失败]")
                return ''
            #urls_list.add(j)
        else:
            print('重复已经过滤')
            return ''
        return j
    except Exception as ex:
        print(f"Error processing node: {ex}")
        return ''


def read_clash_yaml_as_string(file_path):
    with open(file_path, 'r', encoding='utf-8') as file:
        clash_yaml_string = file.read()
        return str(clash_yaml_string)


async def connect_websocket():
    global best_nodes_detail, best_nodes_urls
    # 示例用法
    file_path = './o/allnode.txt'
    #file_path = './o/clash.yaml'
    clash_yaml_string = read_clash_yaml_as_string(file_path)
    if len(clash_yaml_string) < 10:
        print("未从节点列表获取节点信息无法进一步测试节点")
        return
    #suburls = base64.b64decode(clash_yaml_string).decode('utf-8')
    #nodes = clash_yaml_string.split('\n')

    # uri = "ws://192.168.2.182:10888/test"
    uri = "ws://127.0.0.1:10888/test"
    params = {
        "concurrency": 7,
        "fontSize": 24,
        "group": "?empty?",
        "language": "en",
        "pingMethod": "googleping",
        "sortMethod": "rspeed",
        "speedtestMode": "speedonly",
        "testMode": 2,
        "theme": "rainbow",
        "timeout": 18,
        "unique": True,
        # "subscription": "trojan://518a6ff8-7233-4f28-8a40-d3fa82a5875d@5gzdx.233235.xyz:706?allowInsecure=1&sni=5gzdx.233235.xyz#0%7C-https%3A//t.me/MrXbin-107",
        "subscription": clash_yaml_string,
        "outputMode": 3

    }
    #  "subscription": base64.b64decode(clash_yaml_string).decode('utf-8')

    async with websockets.connect(uri) as websocket:
        # 发送参数
        await websocket.send(json.dumps(params))
        print(f"发送参数: {params}")
        # 在主循环外部初始化一个字典
        best_nodes_dict = {}
        servers_list = []
        # 持续接收消息
        while True:
            try:
                response = await asyncio.wait_for(websocket.recv(), timeout=9999)
                data = json.loads(response)
                if data["info"] == "eof":
                    break
                if data.get("info", "") == "gotservers":
                    servers_list.extend(data.get("servers", []))

                if data.get("id", 0) not in unique_ids and data.get("info", "N/A") == "gotspeed" \
                        and data.get("speed", "N/A") != "N/A" and "MB" in data.get("speed", "") \
                        and len(data.get("link", "")) > 10:

                    unique_ids.add(data.get("id", 0))
                    best_nodes_urls.append(data.get("link", ""))
                    best_nodes_detail.append(data)

                    print(f'收到第{len(unique_ids)}个消息: \033[1;32m....{data}.\033[0m')
                else:
                    print(f"------------不满足条件的消息: {data}")
                # 在这里处理接收到的数据
            except websockets.exceptions.ConnectionClosed:
                print("连接已关闭")
                break


# 写入文件
async def write_to_clash_nodes(best_nodes_urls, best_nodes_detail):
    with open('./o/to_clash_nodes.txt', 'w', encoding='utf-8') as f:
        for item in best_nodes_urls:
            f.write(item + '\n')

    sorted_data = sorted(best_nodes_detail, key=lambda x: x['traffic'], reverse=True)

    # Print the sorted data
    with open('./o/to_clash_nodes_bak.json', 'w', encoding='utf-8') as f:
        f.write(json.dumps(sorted_data, indent=4))

    print("speed测速结果，已经写完文件")


async def all_nodes_ping(urls):
    # tasks = [asyncio.create_task(node_ping(url)) for url in urls]
    # return [result for result in await asyncio.gather(*tasks) if result]
    with concurrent.futures.ThreadPoolExecutor(max_workers=44) as executor:
        futures = [executor.submit(node_ping, j) for j in urls]
    for future in concurrent.futures.as_completed(futures):
        result = future.result()
        urls_list.add(result)
        print(f'获取的节点 ： Result: {result}')
        print(f'\033[1;32m.取的节点 ： Result: {result}.\033[0m')

async def main():
    global best_nodes_detail, best_nodes_urls, urls_list
    new_flag = len(sys.argv) >= 2 and sys.argv[1].lower() == 'true'
    try:
        if not  new_flag:
            sub_links = await select_sub_urls(SUB_URL_ARRAY1)

            with open('./o/allnode.txt', 'w', encoding='utf-8') as f:
                f.write('\n'.join(sub_links))

        urls = read_clash_yaml_as_string('./o/allnode.txt')
        nodes = urls.split('\n')

        print(f'------ping之前，总节点数: {len(nodes)}----------')
        # 先过滤一部分ping不通的节点，降低测速压力
        await all_nodes_ping(nodes)
        print(f'######ping之后，总节点数: {len(urls_list)}##########')
        if len(urls_list) < 1:
            print("获取节点失败，无法测速")
            return
        with open('./o/allnode.txt', 'w', encoding='utf-8') as f:
            for item in urls_list:
                f.write(item + '\n')
        await connect_websocket()
        # 写入文件to_clash_nodes_bak.json to_clash_nodes.txt
        await write_to_clash_nodes(best_nodes_urls, best_nodes_detail)

    except Exception as e:
        print(f"发生错误: {e}")


if __name__ == "__main__":
    asyncio.run(main())
