#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Project ：t-player
@File    ：download.py
@Author  ：kehao
@Date    ：2021/8/1 22:10
"""
import json
import logging
import os
import warnings
from urllib.parse import urlparse

import requests
import urllib3

urllib3.disable_warnings()
warnings.filterwarnings("ignore")
logger = logging.getLogger()

cache_dir = '.cache'
headers = {
    'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36'
}

try:
    with open('./proxy.json', mode='rt', encoding='utf8') as f:
        proxies = json.load(f)
except FileNotFoundError:
    proxies = {}

no_proxies = proxies.get('no_proxy', '').split(';')
enable_proxy = ("true".lower() == proxies.get('enable', 'false').lower())


def cache_file(filename):
    if not os.path.exists(cache_dir):
        os.mkdir(cache_dir)
    return os.path.join(cache_dir, filename)


def with_proxy(cc):
    if not enable_proxy:
        return False
    for no_proxy in no_proxies:
        if cc.endswith(no_proxy):
            return False
    else:
        return True


def download(filename, url):
    assert len(filename) > 0
    if os.path.exists(cache_file(filename)):
        return cache_file(filename)
    try:
        if with_proxy(urlparse(url).netloc):
            logger.info('downloading %s, with proxy', filename)
            r = requests.get(url=url, headers=headers, verify=False, timeout=(30, 120), proxies=proxies)
        else:
            logger.info('downloading %s', filename)
            r = requests.get(url=url, headers=headers, verify=False, timeout=(30, 120))
        r.raise_for_status()
        with open(cache_file(filename), "wb") as f:
            for chunk in r.iter_content(chunk_size=1024):
                f.write(chunk)
    except BaseException as e:
        logger.error("failed to download %s: %s", filename, e)
    return cache_file(filename)
