import requests

# 设置代理IP池，列表
proxy_list = [
    {'http': 'http://182.34.26.49:9999'},
    {'http': 'http://221.1.90.67:9000'},
    {'http': 'http://119.3.213.143:8081'},
    {'http': 'http://113.121.21.220:9999'}
]

base_url = 'http://erabbit.itheima.net/#/'
header = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36 Edg/114.0.1823.86'
}

# 遍历代理IP，要对proxy_list操作，用copy()
for per_ip in proxy_list.copy():
    try:
        # 发送get请求，超时3秒
        response = requests.get(base_url, headers=header, proxies=per_ip, timeout=3)
    except:
        # 失败输出无效，并从列表中删除
        print(f"IP地址：{per_ip.get('http')}无效")   # 字典用get()方法
        proxy_list.remove(per_ip)
    else:
        # 成功输出有效
        print(f"IP地址：{per_ip.get('http')}有效")
