# -*- coding: utf-8 -*-
import asyncio
import aiohttp
import logging
import os
import time

from aiohttp import ClientSession
from aiohttp import TCPConnector
from aiohttp_socks import ProxyConnector

from .api import task_creat, task_comsume, task_status, download_mini, size, get_session
from .utils import resource_path, loaddata


async def newInfos(url: str, outputPath: str, length: int, chunkSize: int=6059)->dict:
    infos = {}
    chunks = length // chunkSize + 1 if length%chunkSize != 0 else length // chunkSize
    for index in range(chunks):
        start = index * chunkSize
        end = (index+1)*chunkSize - 1
        if end > length:
            end = length-1
        info = {
            "Key": index,
            "Url": url,
            "Output": outputPath,
            "Start": start,
            "Length": end - start+1,
            "DownLen": 0,
            "Scale": 0.0,
            "Status": -1,
            "Error": None,
            "Retry": 5
        }
        infos[index] = info
    if chunks == 0:
        infos[0] = {
            "Key": 0,
            "Url": url,
            "Output": outputPath,
            "Start": 0,
            "Length": 0,
            "DownLen": 0,
            "Scale": 0.0,
            "Status": -1,
            "Error": None,
            "Retry": 5
        }
    return infos


async def download(url: str, outputPath: str, *, proxy: str=None, timeout: int=300, headers:dict=None, chunkSize: int = 60590, threadNum: int = 22, verbose: bool=False):
    connector = TCPConnector(ssl=False)
    if proxy:
        logging.info(f'[+]proxy: {proxy}')
        connector = ProxyConnector.from_url(proxy, ssl=False)
    timeout = {'total': timeout, 'connect': None, 'sock_connect': None, 'sock_read': None}
    async with get_session(proxy=proxy, timeout=timeout, headers=headers) as session:
        infos = loaddata(f"{outputPath}.json")
        if not infos:
            length, status = await size(url, session)
            # length = 14019
            # length, status = 0, True
            if not status:
                logging.warning(f'[-]failed to head: {url}')
                return
            if length < 0:
                length = 0
            if not os.path.exists(outputPath):
                if len(os.path.dirname(outputPath)) > 0 and not os.path.exists(os.path.dirname(outputPath)):
                    os.makedirs(os.path.dirname(outputPath), exist_ok=True)
                fp = open(outputPath, "wb")
                fp.truncate(length)
                fp.close()
            infos = await newInfos(url, outputPath, length, chunkSize)
        # logging.info(infos)
        if len(infos) < threadNum:
            threadNum = len(infos)
        for key, info in infos.items():
            if info["Status"] in (-2, 0):
                info["Status"] = -1
                info["Retry"] = 10
                info["Error"] = None

        tasks = asyncio.Queue(threadNum)
        create = asyncio.Queue(threadNum)
        status = asyncio.Queue()
        download_tasks = []
        for index in range(threadNum):
            download_tasks.append(task_comsume(index, session, tasks, status, create))
        logging.info("开始下载")
        tasks_working = [task_creat(infos, threadNum, tasks, status, create), task_status(outputPath, infos, status, verbose=verbose)] + download_tasks
        await asyncio.gather(*tasks_working)
        logging.info("下载完成")

async def new_file_infos(url: str, outputPath: str, session: ClientSession, *, chunkSize: int=60950, threadNum: int = 22, verbose: bool=False)->dict:
    length, status = await size(url, session)
    # length = 14019
    # length, status = 0, True
    if not status:
        logging.warning(f'[-]failed to head: {url}')
        return {}
    if length < 0:
        length = 0
    if not os.path.exists(outputPath):
        if len(os.path.dirname(outputPath)) > 0 and not os.path.exists(os.path.dirname(outputPath)):
            os.makedirs(os.path.dirname(outputPath), exist_ok=True)
        fp = open(outputPath, "wb")
        fp.truncate(length)
        fp.close()
    return await newInfos(url, outputPath, length, chunkSize)


async def download_sync(url: str, outputPath: str, *, proxy: str=None, timeout: int=300, headers: dict=None):
    connector = TCPConnector(ssl=False)
    if proxy:
        logging.info(f'[+]proxy: {proxy}')
        connector = ProxyConnector.from_url(proxy, ssl=False)
    timeout = {'total': timeout, 'connect': None, 'sock_connect': None, 'sock_read': None}
    async with get_session(proxy=proxy, timeout=timeout, headers=headers) as session:
        return await download_mini(url, outputPath, session)
