import random
from utils.asynccmd import AsyncCmdMixin
import pipes
import asyncio
import json
from export.export import export_to_excel
from config.settings import PACKAGE_PATH
import os


def filter_url(x): return x['url']


class HTTPXScan(AsyncCmdMixin):
    def __init__(self, path, domains: list):
        self._exec_path = path
        self.domains = domains
        # 存放存活的域名
        self.results = []

    @property
    def columns_dicts(self):
        return {
            'scheme': '协议',
            'port': '端口',
            'path': '请求路径',
            'body-sha256': '内容(sha256)',
            'header-sha256': '请求头(sha256)',
            'a': 'A记录',
            'title': '标题',
            'webserver': 'webserver',
            'host': '主机地址',
            'response-time': '响应时间'
        }

    async def scan(self, arguments=""):
        """TODO: 修改为批量执行"""
        results = []
        file_name = ''.join(random.sample('zyxwvutsrqponmlkjihgfedcba', 15))
        file_path = PACKAGE_PATH / f'tmp/{file_name}'
        with open(file_path, 'w+') as f:
            f.write('\n'.join(self.domains))
        cmd = f'{self._exec_path} -l {file_path} -silent -json'
        result = await self.run_cmd(cmd)
        if result is None:
            self.results = []
            os.remove(file_path)
        for x in result:
            try:
                results.append(json.loads(x))
            except Exception as e:
                continue
        self.results = results
        os.remove(file_path)

    def export(self, is_save, save_path=None):
        if not is_save:
            return
        export_to_excel(self.results, sheet_name='存活域名', xlsx_name='test',
                        columns_dict=self.columns_dicts, save_path=save_path)


class HTTPX:
    def __init__(self, exec_path, domains):
        self.httpx = HTTPXScan(exec_path, domains)
        self.results = []

    def run(self, save_path, is_save=True):
        asyncio.run(self.httpx.scan())
        self.results = list(set(map(filter_url, self.httpx.results)))
        logger.info('存活域名：')
        print('\n'.join(self.results))
        self.httpx.export(is_save, save_path=save_path)
        