# -*- coding: utf-8 -*-
# @Time    : 2018/12/23/023 22:55
# @Author  : 山那边的瘦子
# @Email   : 690238539@qq.com
# @File    : spider.py

import random
import requests
import telnetlib
import threading
from queue import Queue
from lxml import html
from models import IPS
from apps import app


class Spider(object):
    def __init__(self, page=None):
        self.default_url = 'https://www.xicidaili.com/nn/{page}'
        self.user_agent = [
            'Mozilla/5.0 (Linux; Android 4.1.1; Nexus 7 Build/JRO03D) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.166 Safari/535.19',
            'Mozilla/5.0 (Linux; U; Android 4.0.4; en-gb; GT-I9300 Build/IMM76D) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30',
            'Mozilla/5.0 (Linux; U; Android 2.2; en-gb; GT-P1000 Build/FROYO) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1',
            'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:21.0) Gecko/20100101 Firefox/21.0',
            'Mozilla/5.0 (Android; Mobile; rv:14.0) Gecko/14.0 Firefox/14.0',
            'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.94 Safari/537.36',
            'Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.133 Mobile Safari/535.19',
            'Mozilla/5.0 (iPad; CPU OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3',
            'Mozilla/5.0 (iPod; U; CPU like Mac OS X; en) AppleWebKit/420.1 (KHTML, like Gecko) Version/3.0 Mobile/3A101a Safari/419.3'
        ]
        self.proxy = {
            # 'http': '219.238.186.188:8118',
            # 'https': 'http://58.215.140.6:8080'
        }
        self.page = page
        self.url_list_queue = Queue()
        self.content_queue = Queue()
        self.data_items_queue = Queue()
        self.proxy_ips_queue = Queue()
        self.jsonObj = {
            'code': 200,
            'message': 'The proxy IP address was successfully obtained',
            'data': list()
        }

    def init_urls(self):
        with app.app_context():
            ips = IPS.objects.all()
            ips.delete()
        print('删除之前代理ip')
        if self.page is not None:
            for i in range(1, self.page + 1):
                self.url_list_queue.put(self.default_url.format(page=i))
        else:
            self.url_list_queue.put(self.default_url.format(page=1))

    def get_content(self):
        while True:
            url = self.url_list_queue.get()
            conn = requests.get(
                url=url,
                headers={
                    'User-Agent': random.choice(self.user_agent)
                },
                proxies=self.proxy
            )
            conn.encoding = 'utf-8'
            self.content_queue.put(conn.text)
            self.url_list_queue.task_done()

    def get_data(self):
        while True:
            content = self.content_queue.get()
            lxml_obj = html.fromstring(content).xpath('//tr[@class="odd"]')
            for o in lxml_obj:
                self.data_items_queue.put(o.xpath('td/text()'))
            self.content_queue.task_done()

    def verify_proxy_ip(self):
        while True:
            proxy_ip = self.data_items_queue.get()
            try:
                telnetlib.Telnet(proxy_ip[0], proxy_ip[1], timeout=1)
            except:
                pass
            else:
                print('--------此代理IP可用--------')
                self.proxy_ips_queue.put({
                    'ip': proxy_ip[0],
                    'port': proxy_ip[1],
                    # 强迫症，请无视
                    'type': proxy_ip[5].replace('\n', 'HTTP').replace(' ', ''),
                    # 强迫症，请无视
                    'survival': proxy_ip[10].replace('天', ' Days').replace('分钟', ' Minutes ').replace('小时', ' Hours ')
                })
            self.data_items_queue.task_done()

    def handle_data(self):
        while True:
            data = self.proxy_ips_queue.get()
            # for key, value in data.items():
            #     print(f'{key}：{value}')
            # self.jsonObj['data'].append(data)
            with app.app_context():
                ip = IPS(
                    ip=data['ip'],
                    port=data['port'],
                    type=data['type'],
                    survival=data['survival'])
                ip.save()
            self.proxy_ips_queue.task_done()

    def run(self):
        threads = list()

        init_urls = threading.Thread(target=self.init_urls)
        threads.append(init_urls)

        get_content = threading.Thread(target=self.get_content)
        threads.append(get_content)

        get_data = threading.Thread(target=self.get_data)
        threads.append(get_data)

        verify_proxy_ip = threading.Thread(target=self.verify_proxy_ip)
        threads.append(verify_proxy_ip)

        handle_data = threading.Thread(target=self.handle_data)
        threads.append(handle_data)

        for t in threads:
            t.setDaemon(True)
            t.start()

        for q in [self.url_list_queue, self.content_queue, self.data_items_queue, self.proxy_ips_queue]:
            q.join()

        # import json

        # with open('代理IP池.json', 'w') as file:
        #     file.write(json.dumps(self.jsonObj, indent=4))
        print('本次任务完成')


if __name__ == '__main__':
    ips = Spider(page=2)
    ips.run()
