# -*- coding:UTF-8 -*-
import datetime
import re
import time
import json

import requests
from bs4 import BeautifulSoup
from multiprocessing import Process, Queue
from concurrent.futures import ThreadPoolExecutor


class WebInfo(Process):

    def __init__(self):
        super(WebInfo, self).__init__()

        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36'
        }
        self.Q = Queue()

    def get_q(self):
        return self.Q.get()

    def put_q(self, data):
        self.Q.put(data)

    def is_web_info(self, ip, url, status, mid=None):
        """是否含有动态加载地址"""
        if not status:
            ip = url.split('http://')[-1]

        try:
            req = requests.get(url=url, headers=self.headers, timeout=(4.05, 10))
            req.encoding = 'utf-8'
            is_true = req.text.find('window.location.href')
            is_true = bool(is_true if is_true != -1 else 0)
            info = req.text

            data = {
                'ip': ip,
                'url': url,
                'd_url': None if not mid else mid['d_url'],
                'html': info,
            }

            return is_true, data
        except Exception as exp:
            # print(f'获取网页失败，报错信息为{exp}，网页网址为{self.url}')
            return False, f'获取网页失败，{exp}'

    def get_url(self, data):
        """获取动态加载地址"""
        soup = BeautifulSoup(data['html'], "html.parser")
        text_list = soup.find_all(string=re.compile("window.location.href"))
        if len(text_list) == 1:
            # window.location.href = "/doc/page/login.asp?_" + (new Date()).getTime();
            text = text_list[0].strip()
            is_exit = bool(text.find('(new Date()).getTime()'))
            if is_exit:
                string = re.findall(r'"(.*?)"', text)
                data['d_url'] = data['url'] + string[0] + str(int(round(time.time() * 1000)))
                return data
        else:
            with open('web_info_err_ip.json', 'a', encoding='utf-8') as fw:
                json.dump(data, fw)
            return data

    def callback(self, data):
        """处理数据的回调方法"""
        print(data)
        # with open('web_info_ok_ip.json', 'a', encoding='utf-8') as fw:
        #     json.dump(data, fw)

    def thread_pool(self, ip, url):
        is_true, data1 = self.is_web_info(ip, url, status=False)
        if is_true:
            results = self.get_url(data1)
            if results['d_url']:
                is_true, data1 = self.is_web_info(results['ip'], results['d_url'], status=True, mid=results)
        self.callback(data1)

    def get_web_info(self):
        """触发web处理程序"""
        executor = ThreadPoolExecutor(max_workers=20)
        while True:
            # print(datetime.datetime.now(), bool(self.Q.empty()))
            if not self.Q.empty():
                ip = self.Q.get()
                # print('取出%s' % ip)
                url = 'http://' + ip
                executor.submit(self.thread_pool, *(ip, url))

            # time.sleep(0.2)

    def run(self):
        self.get_web_info()


if __name__ == '__main__':
    p1 = WebInfo()
    p1.start()

    p1.Q.put('192.168.2.65')
    # p1.Q.put('192.168.2.66')

    # for i in range(1, 253):
    #     p1.Q.put(f'192.168.2.{i}')
