#!/usr/bin/python
# coding:utf-8

import random
import requests
from bs4 import BeautifulSoup
from fake_useragent import UserAgent

def getHeaders():

    ua = UserAgent()

    # 随机生成user agent
    USER_AGENT = random.choice(ua.chrome)

    # 设置请求头
    headers = {
        # heard部分直接通过chrome部分request header部分
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.9',
        'Cache-Control': 'no-cache',
        'Connection': 'keep-alive',
        'Host': 'www.qb5200.com',
        'Pragma': 'no-cache',
        'Upgrade-Insecure-Requests': '1',
        'User-Agent': USER_AGENT
    }
    return headers


def get_ip_list(url, headers):
    """
    获取代理服务器列表
    :param url:
    :param headers:
    :return:
    """
    web_data = requests.get(url, headers=headers)
    soup = BeautifulSoup(web_data.text, features='html.parser')
    ips = soup.find_all('tr')
    ip_list = []
    for i in range(1, len(ips)):
        ip_info = ips[i]
        tds = ip_info.find_all('td')
        ip_list.append(tds[5].text + '://' + tds[1].text + ':' + tds[2].text)
    return ip_list



# headers = {
#     'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'
# }
#
#
# url = 'https://www.kuaidaili.com/free/intr/'
# ip_list = get_ip_list(url, headers=headers)
# print(ip_list)

