# -*- coding: utf-8 -*-
import json
import time
import socket
import urllib.request as api_request
import urllib.error as api_error

from crawler.tools import get_random_user_agent, get_random_lib_key
from project.storage import Storage


class PackageCrawler:
    def __init__(self, pkg):
        self.pkg = pkg
        self.version_list = []

    def crawl_pkg_info(self):
        # 构建API请求
        api_key = get_random_lib_key()
        url = f"https://libraries.io/api/Maven/{self.pkg}?api_key={api_key}"

        # 调用API
        header = {'User-Agent': get_random_user_agent()}
        r = api_request.Request(url, headers=header)
        try:
            response = api_request.urlopen(r, timeout=30)
            response_dict = json.load(response)
        except api_error.HTTPError as e:
            if e.code == 429:
                print(f"错误429: 爬取过快，尝试重新爬取包 {self.pkg}")
                time.sleep(30)
                return self.crawl_pkg_info()
            else:  # 包不存在
                print(f"错误{e.code}: 无法通过API请求获取包 {self.pkg}")
                Storage.err_pkg_list.append(self.pkg)
                return None
        except socket.error:  # 网速问题
            print(f"出错啦! 尝试重新爬取包 {self.pkg}")
            time.sleep(30)
            return self.crawl_pkg_info()

        # 解析json
        versions = response_dict['versions']
        version_list = []
        for version in versions:
            version_no = version['number']
            version_list.append(version_no)
        self.version_list = version_list
        return response_dict

    def crawl_version_detail(self, pkg_id):
        # 构建API请求
        name = pkg_id.split('@')[0]
        version = pkg_id.split('@')[1]
        api_key = get_random_lib_key()
        url = f"https://libraries.io/api/Maven/{name}/{version}/dependencies?api_key={api_key}"
        print(url)

        # 调用API
        header = {'User-Agent': get_random_user_agent()}
        r = api_request.Request(url, headers=header)
        try:
            response = api_request.urlopen(r, timeout=30)
            dependency_dict = json.load(response)
        except api_error.HTTPError as e:  # 无法获取依赖
            if e.code == 404:
                print(f"错误404: 爬取依赖失败 {name}@{version}")
                return None
            if e.code == 429:
                print(f"错误429: 爬取过快，尝试重新爬取依赖 {name}@{version}")
                time.sleep(30)
                return self.crawl_version_detail(pkg_id)
            else:  # 依赖不存在
                print(f"错误{e.code}: 无法通过API请求获取依赖 {name}@{version}")
                return None
        except socket.error:  # 网速问题
            print(f"出错啦! 尝试重新爬取依赖 {name}@{version}")
            time.sleep(30)
            return self.crawl_version_detail(pkg_id)

        return dependency_dict

    def get_version_list(self):
        return self.version_list
