from concurrent.futures import ThreadPoolExecutor, as_completed
from time import time, sleep
import requests
import threading
from multiprocessing import Manager, Process, cpu_count
import pycurl
import os
from math import ceil
from io import BytesIO
import demjson
import datetime
from prettytable import PrettyTable
import sys

threading.stack_size(65536)


class DetectorClass:
    def __init__(self):

        self.console_api_url = open("server.txt").read().replace("\n", "").replace("\r", "")
        self.process_list = []
        self.lock = Manager().Lock()
        self.success_list = Manager().list()
        self.failed_list = Manager().list()
        self.complete_list = Manager().list()
        self.task_config = Manager().dict()
        self.task_config['start_time'] = "ready"
        self.task_config['start_time_int'] = 0
        self.task_info = {}

    def isLock(self):
        return os.path.exists("db/LOCK")

    def Lock(self):
        self.save_log("db/LOCK", 'w', self.get_now_datetime())

    def unLock(self):
        try:
            os.remove("db/LOCK")
        except:
            pass

    def save_log(self, filename, mode, content):
        '''一句话保存文件'''
        f = open(filename, mode)
        f.write(content)
        f.close()

    def http_get(self, url, headers={}, allow_redirects=0, timeout=30, charset="UTF-8",
                 user_agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36"):
        '''对Pycurl的封装'''
        header = []
        for key in headers.keys():
            head = key + ': ' + headers[key]
            header.append(head)
        bytes = BytesIO()
        c = pycurl.Curl()
        c.setopt(pycurl.URL, url)
        c.setopt(pycurl.CONNECTTIMEOUT, timeout)
        c.setopt(pycurl.WRITEFUNCTION, bytes.write)
        c.setopt(pycurl.TIMEOUT, timeout)
        c.setopt(pycurl.FOLLOWLOCATION, allow_redirects)
        c.setopt(pycurl.MAXREDIRS, 2)
        c.setopt(pycurl.SSL_VERIFYPEER, 0)
        c.setopt(pycurl.SSL_VERIFYHOST, 0)
        c.setopt(pycurl.HEADER, True)
        c.setopt(pycurl.HTTPHEADER, header)
        c.setopt(pycurl.USERAGENT, user_agent)
        c.perform()
        content = bytes.getvalue()
        result = content.decode(charset, 'ignore')
        del header, content, bytes, c
        return result

    def get_now_datetime(self):
        return datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S')

    def thread_func(self, targets, task_config, success_list, failed_list, complete_list, lock):
        def request_url(ip):

            result = 1
            resp = ''
            try:
                resp = self.http_get(task_config["type"] + '://' + ip + '/', headers=task_config['headers'],
                                     timeout=task_config['timeout'])

                for Exclude in task_config["exclude_list"]:
                    if Exclude in resp:
                        result = 2
                        break
                if result == 1:
                    for Contain in task_config["contain_list"]:
                        if Contain not in resp:
                            result = 2
                            break
            except:
                result = 0

            return {"result": result, "ip": ip, "resp": resp}

        executor = ThreadPoolExecutor(max_workers=task_config["thread"])

        all_task = [executor.submit(request_url, (url)) for url in targets]

        for fature in as_completed(all_task):
            data = fature.result()
            with lock:

                if (data['result'] == 1):
                    # 匹配成功
                    success_list.append(data)
                    success_info = demjson.encode({
                        "task_id": task_config["task_id"],
                        "data": data
                    })
                    try:
                        response = requests.post(self.console_api_url + "/report_success", data={
                            "success_info": success_info
                        })
                        if response.text.find("success") > -1:
                            self.save_log("static/log.txt", 'a+',
                                          "{}  Report Success......\n".format(self.get_now_datetime()))
                        else:
                            self.save_log("static/log.txt", 'a+',
                                          "{} Report Failed: {}\n".format(self.get_now_datetime(), response.text))

                    except Exception as e:
                        self.save_log("static/log.txt", 'a+',
                                      "{}  Report Exception : {}\n".format(self.get_now_datetime(), e))
                    # 此处向控制台上报
                if (data['result'] == 2):
                    complete_list.append(data['ip'])
                if (data['result'] == 0):
                    failed_list.append(data['ip'])
                del data

                failed_count = len(failed_list)
                complete_count = len(complete_list)
                success_count = len(success_list)

                current_count = failed_count + success_count + complete_count
                used_time = time() - task_config["start_time_int"]
                surplus_count = task_config["count"] - current_count
                avg_speed = round(current_count / used_time, 4)
                need_time = int(surplus_count * avg_speed) / 1000
                progress = str(round((current_count / task_config["count"]) * 100, 2)) + "%"

                loginfo = {
                    "task_id": task_config["task_id"],
                    "count": task_config["count"],
                    "current_count": current_count,
                    "failed_count": failed_count,
                    "success_count": success_count,
                    "complete_count": complete_count,
                    "used_time": round(used_time, 4),
                    "need_time": need_time,
                    "speed": avg_speed,
                    "progress": progress
                }
                self.save_log("static/task.json", 'w', demjson.encode(loginfo))

                x = PrettyTable()
                x.add_column("任务ID", [task_config["task_id"]])
                x.add_column("任务总数", [loginfo['count']])
                x.add_column("成功请求", [loginfo['complete_count']])
                x.add_column("失败请求", [loginfo['failed_count']])
                x.add_column("成功匹配", [loginfo['success_count']])
                x.add_column("使用时间", [round(loginfo['used_time'], 2)])
                x.add_column("需要时间", [loginfo['need_time']])
                x.add_column("请求速度", [avg_speed])
                x.add_column("开始时间", [task_config['start_time']])
                x.add_column("当前进度", [loginfo['progress']])
                self.save_log('static/task_status.txt', 'w', str(x))
                print(x)

    def get_ip_list(self, thread_count_all):
        try:
            # ip_file_url = "http://47.75.110.1/1000.txt"
            ip_file_url = self.console_api_url + "/static/data/" + self.task_info['ip'] + ".txt"
            ip_list = requests.get(ip_file_url).text.split('\n')
            ip_count = len(ip_list)
            count_peer_thread = ceil(ip_count / thread_count_all)
            list = [ip_list[x * count_peer_thread:x * count_peer_thread + count_peer_thread] for x in
                    range(0, thread_count_all)]
            return {
                "count": ip_count,
                "list": list
            }
        except Exception as e:
            print(e)
            return False

    def start(self):

        if self.isLock() == True:
            self.save_log("static/log.txt", 'a+',
                          "{} Report Failed: {}\n".format(self.get_now_datetime(),
                                                          "server is lock on " + open("db/LOCK").read()))
            return

        process_count = cpu_count()

        print("Starting Scanner......", process_count)

        if (self.task_info['thread'] <= process_count):
            thread_count_peer_process = self.task_info['thread']
        else:
            thread_count_peer_process = ceil(self.task_info['thread'] / process_count)

        thread_count_all = int(thread_count_peer_process * process_count)

        targets_info = self.get_ip_list(thread_count_all)
        targets_list = targets_info['list']

        self.task_config = self.task_info
        self.task_config["thread"] = thread_count_peer_process
        self.task_config["start_time_int"] = time()
        self.task_config["start_time"] = self.get_now_datetime()
        self.task_config["count"] = targets_info['count']
        # 多进程启动任务
        for targets in targets_list:
            p = Process(target=self.thread_func, args=(
                targets, self.task_config, self.success_list, self.failed_list, self.complete_list, self.lock))
            p.start()
            self.process_list.append(p)

        self.Lock()

        for process in self.process_list:
            process.join()

        self.unLock()

        self.stop()

    def stop(self):
        for process in self.process_list:
            process.terminate()
            self.process_list.remove(process)
        self.unLock()
        print("Stop Success.")
        sys.exit()

    def read_file(self, filename):
        file = open(filename, 'r')
        content = file.read()
        file.close()
        return content


if __name__ == '__main__':
    detector = DetectorClass()
    detector.task_info = demjson.decode(detector.read_file("taskinfo.json"))
    threading.Thread(target=detector.start).start()
