import json
import random
import telnetlib
import time

import pymysql
import requests
from bs4 import BeautifulSoup


class IpProxyPool():
    def __init__(self):
        self.host = '192.168.2.105'
        self.user = 'root'
        self.pwd = 'root'
        self.table = 'heanny'
        self.timeout = 5
        self.proxyFile = 'proxies.json'
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36',
            'Accept-Encoding': 'gzip, deflate, sdch',
        }
        self.Proxies = []

    def savesql(self, sql):
        self.db = pymysql.connect(self.host, self.user, self.pwd, self.table)
        self.db.set_charset('utf8')
        # db.set_charset('utf-8')
        self.cursor = self.db.cursor()
        self.cursor.execute(sql)
        # data = cursor.fetchone()#读取第一行信息
        data = self.cursor.fetchall()  # 读取全部信息
        print(data)
        self.db.commit()
        self.db.close()
        return data

    def checkProxt(self, ip, port):
        try:
            Tel = telnetlib.Telnet(ip, port=port, timeout=self.timeout)
        except:
            return False
        else:
            Tel.close()
            return True

    def getHtmlByProxy(self, url):
        try:
            proxy = random.choice(self.Proxies) if self.Proxies else None
            print(proxy)
            res = requests.get(url, headers=self.headers, proxies={
                'https' if proxy['isHttps'] == 1 else 'http': '{}:{}'.format(proxy['ip'], proxy['port'])},
                               timeout=5) if proxy else requests.get(url, headers=self.headers)
            print('code', res.status_code)
            if res.status_code == 200:
                res.encoding = 'utf-8'
                return res.text
            else:
                return self.getHtmlByProxy(url)
        except Exception as e:
            print('error:', e)
            return self.getHtmlByProxy(url)

    def checkProxy(self):
        Proxies = self.savesql("select ip,port,isHttps from proxies")
        NewProxy = []
        print(len(Proxies))
        for p in Proxies:
            if self.checkProxt(p[0], p[1]):
                NewProxy.append(p)
                self.savesql("update `proxies` set  isValid = 1 where ip = '{}' and port = '{}'".format(*p))
            else:
                self.savesql("update `proxies` set  isValid = 0 where ip = '{}' and port = '{}'".format(*p))

        with open(self.proxyFile, 'w+') as f:
            f.write(json.dumps(NewProxy))
        return

    def getKuaidaili(self):
        for i in range(1, 6):
            url = "https://www.kuaidaili.com/free/inha/{}/".format(i)
            res = self.getHtmlByProxy(url)
            html = BeautifulSoup(res, 'html.parser')
            div = html.find('div', attrs={'id': 'list'})
            table = div.find('table').find('tbody')
            trs = table.find_all('tr')
            Proxies = []
            for tr in trs:
                if tr:
                    ip = tr.find('td', attrs={'data-title': 'IP'}).text
                    port = tr.find('td', attrs={'data-title': 'PORT'}).text
                    anonymous = 1 if '匿' in tr.find('td', attrs={'data-title': '匿名度'}).text else 0
                    isHttps = 1 if tr.find('td', attrs={'data-title': '类型'}).text == 'HTTPS' else 0
                    verifyTime = tr.find('td', attrs={'data-title': '最后验证时间'}).text
                    Proxies.append(
                        {'ip': ip, 'port': port, 'anonymous': anonymous, 'isHttps': isHttps, 'verifyTime': verifyTime,
                         'isValid': 0})
            self.Proxies += Proxies
            time.sleep(2)
        return

    def get89IP(self):
        Proxies = []
        for i in range(1, 6):
            url = "http://www.89ip.cn/index_{}.html".format(i)
            res = self.getHtmlByProxy(url)
            html = BeautifulSoup(res, 'html.parser')
            table = html.find('table', attrs={'class': 'layui-table'}).find('tbody')
            trs = table.find_all('tr')
            for tr in trs:
                tds = tr.find_all('td')
                ip = tds[0].text.replace('\t', '').replace('\n', '').replace(' ', '')
                port = tds[1].text.replace('\t', '').replace('\n', '').replace(' ', '')
                isHttps = 0
                anonymous = 0
                verifyTime = tds[4].text.replace('\t', '').replace('\n', '')
                Proxies.append(
                    {'ip': ip, 'port': port, 'anonymous': anonymous, 'isHttps': isHttps, 'verifyTime': verifyTime,
                     'isValid': 0})
            time.sleep(2)
        self.Proxies += Proxies

    def getXicidaili(self):
        Proxies = []
        for i in range(1, 6):
            url = 'https://www.xicidaili.com/nn/{page}'.format(page=1)
            res = self.getHtmlByProxy(url)
            html = BeautifulSoup(res.text, 'html.parser')
            table = html.find('table', attrs={'id': 'ip_list'})
            trs = table.find_all('tr')
            for tr in trs:
                td = tr.find_all('td')
                # print(td)
                if td:
                    ip = td[1].text
                    port = td[2].text
                    isAnon = True if td[4].text == '高匿' else False
                    isHttps = True if td[5].text == 'HTTPS' or td[5].text == 'https' else False
                    vTime = td[9].text
                    Proxies.append(
                        {'ip': ip, 'port': port, 'isHttps': isHttps, 'anonymous': isAnon, 'isValid': 0,
                         'verifyTime': vTime})
            self.Proxies+=Proxies
        return

    def pushProxies(self):
        for p in self.Proxies:
            print('Proxy', p)
            # p['verifyTime'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(p['updated_at']))
            p['lifeTime'] = '-'
            sql = "insert IGNORE into `proxies`(ip,port,isHttps,isValid,verifyTime,anonymous) values ('{ip}','{port}','{isHttps}','{isValid}','{verifyTime}','{anonymous}')".format(
                **p)
            self.savesql(sql)
        return

    def getProxyFromFile(self):
        with open('proxies.json') as f:
            self.Proxies = list(
                map(lambda x: {'ip': x[0], 'port': x[1], 'anonymous': 0, 'isHttps': x[2], 'verifyTime': '-',
                               'isValid': 0}, json.loads(f.read())))

    def run(self):
        self.getProxyFromFile()
        # self.getKuaidaili()
        self.getXicidaili()
        self.get89IP()
        print(self.Proxies)
        self.pushProxies()
        self.checkProxy()


if __name__ == '__main__':
    proxy = IpProxyPool()
    proxy.run()
