from urllib.request import urlopen
import requests
import json
import logging
import base64

LOGGER = logging.getLogger("NODES")
FORMAT = "%(asctime)s %(message)s"
logging.basicConfig(format=FORMAT)
LOGGER.setLevel(logging.DEBUG)


def proxypool_urls(filename):
    with open(filename, "r") as f:
        info = json.load(f)
    return info


def fetch_node_from_url(url):
    req = requests.get(url)
    if req.status_code == 200:
        content = req.content.decode(req.encoding)
    else:
        LOGGER.error("Error to fetch node from {}".format(url))
    return content


def is_base64_code(s):
    """Check s is Base64.b64encode"""
    assert not isinstance(s, str) or not isinstance(
        s, bytes), "must be string or bytes not now is {}".format(type(s))
    if isinstance(s, bytes):
        s = s.decode("utf-8")

    _base64_code = [
        "A",
        "B",
        "C",
        "D",
        "E",
        "F",
        "G",
        "H",
        "I",
        "J",
        "K",
        "L",
        "M",
        "N",
        "O",
        "P",
        "Q",
        "R",
        "S",
        "T",
        "U",
        "V",
        "W",
        "X",
        "Y",
        "Z",
        "a",
        "b",
        "c",
        "d",
        "e",
        "f",
        "g",
        "h",
        "i",
        "j",
        "k",
        "l",
        "m",
        "n",
        "o",
        "p",
        "q",
        "r",
        "s",
        "t",
        "u",
        "v",
        "w",
        "x",
        "y",
        "z",
        "0",
        "1",
        "2",
        "3",
        "4",
        "5",
        "6",
        "7",
        "8",
        "9",
        "+",
        "/",
        "=",
    ]
    _base64_code_set = set(_base64_code)  # 转为set增加in判断时候的效率
    # Check base64 OR codeCheck % 4
    code_fail = [i for i in s if i not in _base64_code_set]
    if code_fail or len(s) % 4 != 0:
        return False
    return True


def fetch_nodes(info, cache="cached_node.json"):
    with open(cache, "ab+") as f:
        for type_name, url in info.items():
            content = fetch_node_from_url(url)
            if is_base64_code(content):
                url_mesg = decode_base64(content)
                f.write(url_mesg)
            else:
                f.write(content.encode('utf-8'))


def decode_base64(info):
    if isinstance(info, str):
        info = info.encode("utf-8")
    return base64.b64decode(info)


def encode_to_vmess(info):
    assert isinstance(info, str) or isinstance(
        info, bytes), "info must be str or bytes but now is {}".format(
            type(info))
    if isinstance(info, str):
        info = info.encode("utf-8")
    return base64.b64encode(info)


if __name__ == "__main__":
    filename = "urls.json"
    config = proxypool_urls(filename)
    fetch_nodes(config, "cached_node.json")
