import os
from typing import List, Callable
import json
import aiohttp
import asyncio
from io import BytesIO
import zipfile
import uuid


FILE_TYPE_LOG = 1
FILE_TYPE_TASK = 2


def get_robot_servers() -> List[str]:
    with open("config.json") as f:
        config = json.load(f)
        return config.get("robots")  # config["robots"]


class FileFetchControl:

    robots = get_robot_servers()

    def __init__(self,loop):
        self.loop = loop

    def unzip_file(self,ip: str,file_type: int,file: BytesIO):
        data_path = os.path.join(os.getcwd(),"data",ip,str(uuid.uuid4()))
        if not os.path.exists(data_path):
            os.makedirs(data_path)

        with zipfile.ZipFile(file) as f:
            f.extractall(data_path)
        return data_path

    async def download_log(self,ip: str,content: str, pid: str,file_date_start: str,file_date_end :str,callback: Callable):
        params = {
            "content":content,
            "pid": "*" if pid == "" else pid,
            "file_date_start":file_date_start,
            "file_date_end":file_date_end
        }
        result ={
            "success":False
        }
        url = f"http://{ip}:8000/log/filter"
        async with aiohttp.ClientSession() as client:
            try:
                async with client.get(url,params=params) as resp:
                    if resp.status == 200:
                        data = await resp.read()
                        result['message'] = self.unzip_file(ip,FILE_TYPE_LOG,BytesIO(data))
                        result["success"] = True
                    else:
                        rs = await resp.json()
                        result['message'] = rs['detail']
                    if callback:
                        callback(ip,result)
            except Exception as e:
                if callback:
                    result['message'] = str(e)
                    callback(ip, result)

    async def download_task(self,ip: str,taskid: str, pid: str,callback: Callable):
        ppid = "*" if pid == "" else pid
        result ={
            "success":False
        }
        url = f"http://{ip}:8000/task/{ppid}/{taskid}"
        async with aiohttp.ClientSession() as client:
            try:
                async with client.get(url) as resp:
                    if resp.status == 200:
                        data = await resp.read()
                        result['message'] = self.unzip_file(ip,FILE_TYPE_LOG,BytesIO(data))
                        result["success"] = True
                    else:
                        rs = await resp.json()
                        result['message'] = rs['detail']
                    if callback:
                        callback(ip,result)
            except Exception as e:
                if callback:
                    result['message'] =str(e)
                    callback(ip, result)

    def get_robot_log(self,ips:List[str],content: str,pid: str,file_date_start: str,file_date_end :str,callback: Callable):
        print(ips)
        if ips and len(ips) > 0:
            asyncio.set_event_loop(self.loop)
            tasks = []
            tasks = [ asyncio.ensure_future(self.download_log(ip, content, pid, file_date_start,file_date_end,
                                                              lambda ip,res:callback(ips.index(ip),res))) for ip in ips ]


            self.loop.run_until_complete(asyncio.wait(tasks))
            #loop.run_forever()
            #loop.run_until_complete(asyncio.wait(tasks))

    async def get_robot_log_async(self,ips:List[str],content: str,pid: str,file_date_start: str,file_date_end :str,callback: Callable):
        if ips and len(ips) > 0:
            tasks = [ asyncio.create_task(self.download_log(ip, content, pid, file_date_start,file_date_end,
                                                              lambda ip,res:callback(ips.index(ip),res))) for ip in ips ]
            await asyncio.gather(*tasks)


    async def get_robot_task_async(self,ips:List[str], pid: str,taskid :str,callback: Callable):
        if ips and len(ips) > 0:
            tasks = [ asyncio.create_task(self.download_task(ip, taskid, pid,
                                                             lambda ip,res:callback(ips.index(ip),res))) for ip in ips ]
            await asyncio.gather(*tasks)

    def get_robot_task(self,ips:List[str],content: str,pid: str,file_date: str):
        if not ips or len(ips) == 0:
            ips = FileFetchControl.robots
        print(ips)


if __name__ == "__main__":
    import time
    # callback = lambda index,result: print(index,result)
    # f = FileFetchControl()
    # f.get_robot_log(['127.0.0.1'], '807867', '', '20210523', None)
    # time.sleep(8)
    # f.get_robot_log(['127.0.0.1'], '807867', '', '20210523', None)
    async def test():
        url = 'http://10.96.10.25:8000'
        result = {}
        async with aiohttp.ClientSession() as client:
            try:
                async with client.get(url) as resp:
                    print(resp.content)
                    if resp.status == 200:
                        data = await resp.read()
                    else:
                        rs = await resp.json()
            except Exception as e:
                print("异常了",e)

    asyncio.run(test())
