#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time    : 2018/1/19 0019 13:54
# @Author  : Arliki
# @email   : hkdnxycz@outlook.com
# @File    : xici


import requests
import random
import time
from bs4 import BeautifulSoup
from multiprocessing import Process, Queue


class xici(object):
    def __init__(self, page=5):
        self.old_pro = []
        self.old_s_pro = []
        self.http_pro = []
        self.https_pro = []
        self.headers = {
            'Accept': '*/*',
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36',
            'Accept-Encoding': 'gzip, deflate, sdch',
            'Accept-Language': 'zh-CN,zh;q=0.8'
        }
        self.page = page
        self.get_proxies()

    def get_proxies(self):
        page = random.randint(1, 3)
        page_stop = self.page + page
        while page < page_stop:
            print(page)
            url = "http://www.xicidaili.com/nn/%d" % page
            html = requests.get(url, headers=self.headers).content
            soup = BeautifulSoup(html, 'lxml')
            ip_list = soup.find(id='ip_list')
            for odd in ip_list.find_all(class_="odd"):
                protocol = odd.find_all('td')[5].get_text().lower() + "://"
                if protocol == "http://":
                    self.old_pro.append(protocol + ":".join([x.get_text() for x in odd.find_all('td')[1:3]]))
                else:
                    self.old_s_pro.append(protocol + ":".join([x.get_text() for x in odd.find_all('td')[1:3]]))
            page += 1
            time.sleep(1)

    def check_ips(self):
        old_queue = Queue()
        old_s_queue = Queue()
        new_queue = Queue()
        new_s_queue = Queue()
        wokers = []
        for _ in range(15):
            wokers.append(Process(target=self.check_one_ip, args=(old_queue, old_s_queue, new_queue, new_s_queue)))
        for woker in wokers:
            woker.start()
        for http in self.old_pro:
            old_queue.put(http)
        for https in self.old_s_pro:
            old_s_queue.put(https)
        for _ in self.old_pro:
            old_queue.put(0)
        for _ in self.old_s_pro:
            old_s_queue.put(0)
        for woker in wokers:
            woker.join()
        self.http_pro = []
        self.https_pro = []
        while 1:
            try:
                self.http_pro.append(new_queue.get(timeout=1))
            except:
                break
        while 1:
            try:
                self.https_pro.append(new_s_queue.get(timeout=1))
            except:
                break
        print("verify ip is done!")

    def check_one_ip(self, old_queue, old_s_queue, new_queue, new_s_queue):
        while 1:
            http = old_queue.get()
            https = old_s_queue.get()
            if http != 0:
                proxies = {'http': http}
                try:
                    if requests.get('http://www.baidu.com', proxies=proxies, timeout=2).status_code == 200:
                        new_queue.put(http)
                        print(http, "is success")
                except:
                    print(http, "is fail")
            if https != 0:
                proxies = {'https': https}
                try:
                    if requests.get('http://www.baidu.com', proxies=proxies, timeout=2).status_code == 200:
                        new_s_queue.put(https)
                        print(https, "is success")
                except:
                    print(https, "is fail")
            if http == 0 and https == 0: break



class kuai(object):
    def __init__(self, page, page_stop):
        self.old_pro = []
        self.new_pro = []
        self.page = page
        self.page_stop = page_stop
        self.headers = {
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36',
            'Accept-Encoding': 'gzip, deflate, br',
            'Accept-Language': 'zh-CN,zh;q=0.9',
            'Host': 'www.kuaidaili.com',
            'Referer': 'https: //www.kuaidaili.com/free/'
        }
        self.get_list()

    def get_list(self):
        while self.page <= self.page_stop:
            time.sleep(1)
            url = "https://www.kuaidaili.com/free/inha/%d/" % self.page
            html = requests.get(url, headers=self.headers).content
            soup = BeautifulSoup(html, 'lxml')
            ip_list = soup.find('tbody')
            for odd in ip_list.find_all('tr'):
                protocol = odd.find_all('td')[3].get_text().lower() + "://"
                self.old_pro.append(protocol + ":".join([x.get_text() for x in odd.find_all('td')[0:2]]))
            self.page += 1

    def send_worker(self):
        old_queue = Queue()
        new_queue = Queue()
        workers = []
        for _ in range(15):
            workers.append(Process(target=self.get_worker, args=(old_queue, new_queue)))
        for worker in workers:
            worker.start()
        print(self.old_pro)
        for pro in self.old_pro:
            old_queue.put(pro)
        for _ in self.old_pro:
            old_queue.put(0)
        for woker in workers:
            woker.join()
        while 1:
            try:
                t4 = new_queue.get(timeout=1)
                print(t4)
                self.new_pro.append(t4)
            except:
                break
        print("is ok")

    def get_worker(self, old_queue, new_queue):
        while 1:
            pro = old_queue.get()
            if pro == 0: break
            pro2 = {'http': pro}
            try:
                if requests.get("http://test.arliki.com", proxies=pro2, timeout=2).status_code == 200:
                    new_queue.put(pro)
                    print(pro, "is success")
            except:
                print(pro, "is fail")


def run(start, end):
    while 1:
        xici = kuai(start, end)
        xici.send_worker()
        print("http:----------------")
        print(xici.new_pro)
        check = list()
        with open('http_ip.txt', 'a+', encoding='utf-8') as f:
            for i in xici.new_pro:
                if i in check:
                    pass
                else:
                    check.append(i)
                    f.write(i + "\n")
        with open('http_ip.txt', 'r') as f:
            num = f.readlines()
        if len(num) < 20:
            time.sleep(5)
            run(start + end, end)
        else:
            break


def run2():
    a = xici()
    a.check_ips()
    print(a.http_pro)
    # print(a.https_pro)
    if len(a.http_pro) == 0:
        pass
    else:
        with open('http_proxies.txt', 'a+', encoding='utf-8') as f:
            for x in set(a.http_pro):
                f.write(x + "\n")
                # if len(a.https_pro)==0:
                #     pass
                # else:
                #     with open('https_proxies.txt', 'a+', encoding='utf-8') as f:
                #         for x in set(a.https_pro):
                #             f.write(x + "\n")


if __name__ == '__main__':
    run(1, 20)
    # run2()
