#!/usr/bin/python3
import sys
import time
import argparse
import logging
import asyncio
import aiohttp
import psutil
import queue
# import signal
from termcolor import colored
from multiprocessing import Process, Queue, Value, Lock


logging.basicConfig(level=logging.WARNING,
                    format='%(message)s',
                    datefmt='%Y-%m-%d %M:%S')

__version__ = "0.0.1"


# global SIGEXIT
# SIGEXIT = False

# def myHandler(signum, frame):
#     if not SIGEXIT:
#         logging.warning("收到信号，将会退出")
#     SIGEXIT = True

def increment_counter(counter, lock):
    with lock:
        counter.value += 1

def convert_seconds(seconds):
    hours = seconds // 3600
    minutes = (seconds % 3600) // 60
    seconds = seconds % 60
    return int(hours), int(minutes), round(seconds, 2)

def example():
    print(colored('# 测试https://www.xxx.com', 'blue'))
    print(colored(f'{sys.argv[0]} -c 100 -t 100000 -u https://www.xxx.com', 'yellow'))

def parse_argument():
    parser = argparse.ArgumentParser(
        description='并发请求HTTP，会启用与CPU个数相同的进程，进程下再启用设置的协程数',
        formatter_class=argparse.RawTextHelpFormatter)
    parser.add_argument('-c', '--concurrency', type=int, default=30, 
                        help='协程并发数，默认30')
    parser.add_argument('-t', '--tasks', type=int, default=100, 
                        help='要完成的任务数，默认100')
    parser.add_argument('-u', '--url', type=str, default='',
                        help='测试的URL')
    parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + __version__)
    sub_command = parser.add_subparsers()
    cmd_example = sub_command.add_parser('example', help='显示一些命令使用实例')

    cmd_example.set_defaults(func=example)
    options = parser.parse_args()
    return options

def parsed_params(args):

    if args.url == '' or args.url is None:
        logging.error("请输入要测试的URL")
        return
    
    if args.concurrency == 0:
        logging.error("消费工作协程数必须大于等于1")
        return
    
    params_parsed = {
        'concurrency': args.concurrency,
        'url': args.url,
        'cpu_number': psutil.cpu_count(),
        'tasks': int(args.tasks),
    }

    return params_parsed

def generate_tasks(params_parsed):
    # global SIGEXIT
    for i in range(params_parsed['tasks']):
        # if SIGEXIT:
        #     break
        task = {
            'url': params_parsed['url'],
            'no': i,
            'http_code': None,
            'http_error': None,
            'net_error': None,
        }
        yield task
    while True:
        yield None

def task_producer(task_queue, params_parsed):
    for task in generate_tasks(params_parsed):
        task_queue.put(task)

async def task_customer(tasks_q, result_q):
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36'
    }
    async with aiohttp.ClientSession() as session:
        while True:
            try:
                task = tasks_q.get()
            except Exception as e:
                logging.error("等待任务出现错误：%s" % e)
                return
            if task is None:
                return
            try:
                async with session.get(task['url'], headers=headers, verify_ssl=False) as resp:
                    task['http_code'] = resp.status
                    async for chunk in resp.content.iter_chunked(1024):
                        pass
            except Exception as e:
                task['net_error'] = "%s" % e
                if task['net_error'] == "":
                    task['net_error'] = "unknow error"
                
            result_q.put(task)


def task_display(result_q, params_parsed, flag_task_done):
    exit_flag = params_parsed['cpu_number']
    status_map = {}
    status_err = 0
    task_done = 0
    while True:
        try:
            result = result_q.get(timeout=0.1)
        except queue.Empty:
            if flag_task_done.value == exit_flag:
                break
            continue
        except:
            if flag_task_done.value == exit_flag:
                break
            continue
        if result['http_code'] is not None:
            if result['http_code'] not in status_map:
                status_map[result['http_code']] = 1
            else:
                status_map[result['http_code']] += 1
        else:
            status_err += 1
        task_done += 1
        done_percent = task_done * 100 / params_parsed['tasks'] 
        if result['net_error'] is not None:
            print('\n出错原因: ', colored(f"{result['net_error']}", 'red'))
        print(
            f'\rcurr:', colored(f'{done_percent:.2f}%', 'blue'), 
            "完成请求数:", colored(f'{task_done}', 'green'), 
            '出错数:', colored(f'{status_err}', 'red'), end='')
    print()
    for key in status_map:
        print(f'{key}:', colored(f'{status_map[key]}', 'blue'))
    print(colored(f'------------------------------------'))
    print("完成请求数:", colored(f'{task_done}', 'green'), '出错数:', colored(f'{status_err}', 'red'))


async def work(loop, params_parsed, tasks_q, result_q):
    coroutines = []

    for _ in range(params_parsed['concurrency']):
        coroutines.append(loop.create_task(task_customer(tasks_q, result_q)))

    done, pending = await asyncio.wait(coroutines, return_when=asyncio.FIRST_EXCEPTION)
    for d in done:
        if d.exception():
            d.print_stack()
    for p in pending:
        p.cancel()

def do_task(tasks_q, result_q, params_parsed, flag_task_done, lock):
    event_loop = asyncio.new_event_loop()
    asyncio.set_event_loop(event_loop)
    try:
        event_loop.run_until_complete(work(event_loop, params_parsed, tasks_q, result_q))
    except Exception as e:
        logging.error("asyncio error: %s" % e)
    else:
        event_loop.close()
    increment_counter(flag_task_done, lock)
    return True


def prepare():
    args = parse_argument()
    if hasattr(args, 'func'):
        args.func()
        return True
    params_parsed = parsed_params(args)
    if not params_parsed:
        return
    
    print(
        "cpu数:", colored(f"{params_parsed['cpu_number']},", 'yellow'),
        "单进程协数:", colored(f"{params_parsed['concurrency']},", 'yellow'),
        "任务数:", colored(f"{params_parsed['tasks']},", 'yellow'),
        "\n检测域名:", colored(f"{params_parsed['url']}", 'yellow'),
        "\n------------------------------------"
    )

    # signal.signal(signal.SIGABRT, myHandler)
    # signal.signal(signal.SIGHUP, myHandler)
    # # signal.signal(signal.SIGKILL, myHandler)
    # signal.signal(signal.SIGINT, myHandler)
    # signal.signal(signal.SIGQUIT, myHandler)
    # signal.signal(signal.SIGTERM, myHandler)
    # signal.pause()

    tasks_q = Queue(maxsize=300)
    result_q = Queue(maxsize=300)
    flag_task_done = Value('d', 0) 
    lock = Lock()

    pro = Process(target=task_producer, daemon=True, args=(tasks_q, params_parsed))
    pro.start()

    process_list = []
    for i in range(params_parsed['cpu_number']):
        process_list.append(Process(target=do_task, daemon=True, args=(tasks_q, result_q, params_parsed, flag_task_done, lock)))
    
    start_time = time.time()
    for i in range(len(process_list)):
        process_list[i].start()
    
    display = Process(target=task_display, daemon=True, args=(result_q, params_parsed, flag_task_done))
    display.start()

    for p in process_list:
        p.join()
    end_time = time.time()
    display.join()
    pro.kill()
    pro.join()
    print(colored(f'------------------------------------'))
    hours, minutes, seconds = convert_seconds(end_time - start_time)
    print("执行时间:", colored(f'{hours}小时{minutes}分钟{seconds}秒', 'yellow'))


if __name__ == "__main__":
    prepare()
