import random

from requests import get
from lxml import etree
from bs4 import BeautifulSoup as bs4

class proxy():
    prox = ['HTTP://114.239.148.216:9999', 'HTTP://59.57.38.13:9999', 'HTTP://223.198.6.183:9999',
            'HTTP://223.198.10.146:9999', 'HTTP://121.226.214.243:9999', 'HTTP://114.239.173.45:9999',
            'HTTP://59.52.185.11:9999', 'HTTP://36.25.42.251:9999', 'HTTP://117.69.200.251:9999',
            'HTTP://183.154.54.86:9999', 'HTTP://121.226.214.178:9999', 'HTTP://223.198.31.235:9999',
            'HTTP://123.163.27.160:9999', 'HTTP://223.198.21.76:9999', 'HTTP://123.163.97.197:9999',
            'HTTP://121.226.214.208:9999', 'HTTP://223.198.14.142:9999', 'HTTP://114.96.167.33:9999',
            'HTTP://114.239.3.85:808', 'HTTP://171.35.166.184:9999', 'HTTP://49.77.209.51:9999',
            'HTTP://1.197.16.120:9999', 'HTTP://61.145.49.239:9999', 'HTTP://117.95.82.219:9999',
            'HTTP://223.198.22.0:9999', 'HTTP://60.167.102.250:9999', 'HTTP://144.255.148.103:61234',
            'HTTP://112.194.40.206:9999', 'HTTP://223.198.22.1:9999', 'HTTP://223.198.22.134:9999',
            'HTTP://183.166.163.30:9999', 'HTTP://113.120.34.32:9999', 'HTTP://180.122.146.220:43691',
            'HTTP://223.198.21.21:9999', 'HTTP://60.167.22.25:9999', 'HTTP://49.70.85.159:9999',
            'HTTP://115.211.225.44:9999', 'HTTP://125.78.167.205:9999', 'HTTP://114.226.244.121:9999',
            'HTTP://183.164.238.53:9999', 'HTTP://183.164.239.14:22055', 'HTTP://49.77.209.230:9999',
            'HTTP://36.25.41.116:9999', 'HTTP://61.145.49.57:9999', 'HTTP://114.239.2.90:9999',
            'HTTP://125.123.209.9:9999', 'HTTP://180.113.189.147:9999', 'HTTP://117.57.91.234:9999',
            'HTTP://114.239.144.67:808', 'HTTP://183.146.156.121:9999', 'HTTP://49.70.48.224:9999',
            'HTTP://117.95.195.232:9999', 'HTTP://223.198.21.3:9999', 'HTTP://49.77.209.151:9999',
            'HTTP://120.35.200.163:9999', 'HTTP://163.204.247.42:9999', 'HTTP://123.169.97.60:9999',
            'HTTP://183.166.163.159:9999', 'HTTP://60.167.23.182:9999', 'HTTP://117.88.5.39:3000',
            'HTTP://182.108.60.75:9999', 'HTTP://106.122.169.127:9999', 'HTTP://175.154.123.210:9999']

    def __int__(self):
        pass

    def get_prox(self):
        return self.prox

    def get_proxy0(self,page):
        hd = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'}
        con4 = []
        for i in range(1, page + 1):
            url = "https://www.xicidaili.com/nn/{}".format(i)
            html = get(url=url, headers=hd).text
            con = etree.HTML(html)
            con1 = con.xpath('//*[@class="odd"]/td[2]/text()')  # port
            con2 = con.xpath('//*[@class="odd"]/td[3]/text()')  # ip
            con3 = con.xpath('//*[@class="odd"]/td[6]/text()')  # 类型
            for a, b, c in zip(con1, con2, con3):
                if c == "HTTP":
                    con4.append(c + "://" + a + ":" + b)
        return (con4)


    def get_proxy(self,page):
        hd = {"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"Accept-Encoding":"gzip, deflate, br",
"Accept-Language":"zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
"Connection":"keep-alive",
"Cookie":"channelid=0; sid=1597768474372823; _ga=GA1.2.852324126.1597768475; _gid=GA1.2.1550009200.1597768475; Hm_lvt_7ed65b1cc4b810e9fd37959c9bb51b31=1597768475,1597768513,1597806811; Hm_lpvt_7ed65b1cc4b810e9fd37959c9bb51b31=1597807360",
"Host":"www.kuaidaili.com",
"Sec-Fetch-Dest":"document",
"Sec-Fetch-Mode":"navigate",
"Sec-Fetch-Site":"same-origin",
"Sec-Fetch-User":"?1",
"Upgrade-Insecure-Requests":"1",
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4225.0 Safari/537.36 Edg/86.0.611.0"}
        con4 = []
        for i in range(1, page + 1):
            url = "https://www.kuaidaili.com/free/inha/{}/".format(i)
            print(url)
            html = get(url=url, headers=hd).text
            con = bs4(html, "html.parser")
            con1 = con.find("tbody")
            if not con1:
                print(con)
                continue
            con2 = con1.find_all("tr")
            for i in con2:
                con3 = i.find_all("td")
                a = con3[0].text
                b = con3[1].text
                c = con3[3].text
                if c == "HTTP":
                    con4.append(c + "://" + a + ":" + b)
        return con4


if __name__ == '__main__':
    p = proxy()
    print(p.get_proxy(2))
