# -*- coding: utf-8 -*-
import json
import sys
from pathlib import Path

from loguru import logger

from . import http
from .constants import STL_PATH, ZIP_PATH
from .utils import _filter_name


def parser_info(uri, params, file_name):
    """
    It creates a session to speed up the _downloads

    :param uri:
    :param params:
    :param file_name:
    """

    data = http.fetch(uri, params=params).json()
    hits = data['hits']

    # Save the data
    json.dump(hits, open(file_name, "w"))

    # Reading the json file
    # data_pd = json.load(open(file_name))

    # The page has objects?
    if len(hits) == 0:
        logger.info("No more pages- Finishing the program")
        # _save_data()
        sys.exit()

    # Is it an error page?
    for n in hits:
        if n == "error":
            logger.info("No more pages- Finishing the program")
            # _save_data()
            sys.exit()

    logger.info("Parsing data from {} objects from thingiverse".format(len(data)))

    for item in hits:
        # logger.warning(item)

        creator = item["creator"]

        logger.info("{name} -> {public_url}".format(**item))

        # Name and last name
        logger.info("Name: {first_name} {last_name}".format(**creator))

        # If the name and last name are empty, we use the username
        # TODO check if the name is already on the list or is new->call the twitter api
        # 3 in [1, 2, 3] # => True

        # if (creator["first_name"] == "" and creator["last_name"] == ""):
        #     hall_of_fame.append("{name}\n".format(**creator))
        # else:
        #     hall_of_fame.append("{first_name} {last_name}\n".format(**creator))


class Fetch:
    def __init__(self, page_num=1):
        self.page_num = page_num

    def batch(self, **kwargs):
        for i in range(0, self.page_num):
            self.download(params={'page': i + 1}, **kwargs)

    def download(self, uri, params=None, file_name=None, mode="none", zip_flag=False, all_files=False):
        """
        It creates a session to speed up the _downloads

        :param uri:
        :param params:
        :param file_name:
        :param mode:
        """

        resp = http.fetch(uri, params=params)
        data = resp.json()

        # Save the data
        json.dump(data, open(file_name, "w"), indent=4, sort_keys=True, ensure_ascii=False)
        data_pd = data.get("hits") if mode == "search" else data

        # The page has objects?
        if len(data_pd) == 0:
            logger.info("No more pages- Finishing the program")
            sys.exit()

        # Is it an error page?
        for n in data_pd:
            if n == "error":
                logger.info("No more pages- Finishing the program")
                sys.exit()

        logger.info(f"Downloading {len(data_pd)} objects from thingiverse")

        # for object in range(len(data_pd)):
        for value in data_pd:
            # logger.info(object)
            # logger.info(object)
            object_id = str(value["id"])

            logger.info("{name} -> {public_url}".format(**value))
            logger.info(f"Object id: {object_id}")

            # 拼装路径
            file_path = Path(STL_PATH, _filter_name(value["name"]))
            file_dist = Path(ZIP_PATH, _filter_name(value["name"]))
            file_dist = str(file_dist) + ".zip"

            # 创建目录
            Path(file_path).is_dir() or Path(file_path).mkdir(parents=True)
            Path(file_dist).is_dir() or Path(file_dist).mkdir(parents=True)

            creator = value["creator"]

            # User name
            logger.info("{first_name} {last_name}".format(**creator))
            logger.info("    " + value["name"])

            if zip_flag:  # Download Thingiverse ZIP files
                self.parse_zip(object_id, file_dist)
            else:
                self.parse_stl(object_id, file_path, all_files)

    @staticmethod
    def parse_zip(object_id, file_dist):
        try:
            resp = http.fetch(f'things/{object_id}/package-url')
            info = resp.json()

            logger.info("Downloading ZIP file")
            logger.warning((info.get("public_url")))

            http.download(info.get("public_url"), file_dist)
        except Exception as e:
            raise e

    @staticmethod
    def parse_stl(object_id, file_path, all_files=False):
        # 读取全部文件
        resp = http.fetch(f'things/{object_id}/files')
        info = resp.json()

        for v in info:

            if all_files:
                # Download all the files
                logger.info("    " + v["name"])
                http.download(v["download_url"], file_path)
            else:
                # Download only the .stls
                if ".stl" in v["name"]:
                    logger.info("    " + v["name"])
                    logger.info("    " + v["download_url"])

                    # Download the file
                    response = http.fetch(v["download_url"])
                    location = response.headers.get('location')
                    http.download(location, file_path)

        return True
