import requests
from bs4 import BeautifulSoup
import json

protocol_list = ["tcp", "udp", "http", "https", "stcp", "sudp"]
url = 'http://1.14.190.198:8080/api/proxy/'
# 设置认证信息
auth = ('admin', 'admin')


def fetch_user(protocol = "tcp",url = url, auth = auth):
    """
    抓取指定 URL 的网页内容，并返回格式化的 HTML。

    :param url: 要抓取的网页 URL
    :param auth: 认证信息，形式为 (用户名, 密码)
    :return: 格式化的 HTML 内容或错误信息
    """
    # 发送 GET 请求，包含认证信息
    url = url + protocol
    response = requests.get(url, auth=auth)

    # 检查请求是否成功
    if response.status_code == 200:
        # 解析 HTML 内容
        soup = BeautifulSoup(response.text, 'html.parser')

        # 获取网页的所有内容
        content = soup.prettify()  # 获取格式化后的 HTML
        return json.loads(content)["proxies"]
    else:
        return f'请求失败，状态码：{response.status_code}'

def fetch_all_user():
    user_list = []
    for proto in protocol_list:
        user = fetch_user(proto)
        user_list += user
    return    user_list
# 使用示例
def fetch_all_name():
    user_list = fetch_all_user()
    name_list = []
    for user in user_list:
        name = user.get("name")
        if name != None:
            name_list.append(name)
    return name_list

def fetch_all_port():
    user_list = fetch_all_user()
    port_list = []
    for user in user_list:
        conf = user.get("conf")
        if conf != None:
            port = conf.get('remotePort')
            status = user.get('status')
            if port != None and status == 'online':
                port_list.append(port)
    return port_list

if __name__ == "__main__":
    # 定义要抓取的网页 URL
    # 调用函数并输出结果
    #data = fetch_all_user()
    #data = fetch_all_name()
    data = fetch_all_port()
    print(data)
