from GlobalVar import globalVar
import requests
import os
import json
import logging


class Crawl:
    def __init__(self):
        self.url = 'https://registry.npmjs.org/'
        self.root_path = './projects/'
        logging.basicConfig(filename='crawl.log', level=logging.INFO,
                            format='%(asctime)s %(levelname)s: %(message)s')

    # 找到前1000个的所有闭包，下载json到本地
    def get_closure(self, continueBefore=False, preCount=0):
        if not os.path.exists('./pkg_queue.txt'):
            # 先把前1000个的json爬下来
            for pkg_name in globalVar.starter_pkgs:
                pkg_json = self.get_json(pkg_name)
                if "error" in pkg_json or "_id" not in pkg_json:
                    globalVar.miss_pkg.append(pkg_name)
                    continue
                else:
                    globalVar.pkg_queue.append(pkg_name)
            globalVar.save_list(globalVar.pkg_queue, './pkg_queue.txt')
            globalVar.save_list(globalVar.miss_pkg, './miss_pkg.txt')

        count = preCount

        if continueBefore:
            globalVar.pkg_queue = globalVar.load_list(f'./pkg_queue_logs/pkg_queue_{count}.txt')
            print(f'loaded ./pkg_queue_logs/pkg_queue_{count}.txt')
            globalVar.miss_pkg = globalVar.load_list(f'./miss_pkg_logs/miss_pkg_{count}.txt')
            print(f'loaded ./miss_pkg_logs/miss_pkg_{count}.txt')
            globalVar.finished_pkg = globalVar.load_list(f'./finished_pkg_logs/finished_pkg_{count}.txt')
            print(f'loaded ./finished_pkg_logs/finished_pkg_{count}.txt')
        else:
            globalVar.pkg_queue = globalVar.load_list('./pkg_queue.txt')
            globalVar.miss_pkg = globalVar.load_list('./miss_pkg.txt')

        # 找到队列中的所有闭包


        while globalVar.pkg_queue:
            pkg_name = globalVar.pkg_queue.pop(0)
            pkg_json = self.get_json(pkg_name)
            if "error" in pkg_json or "_id" not in pkg_json:
                globalVar.miss_pkg.append(pkg_name)
                continue
            # 以下处理获得的json
            # 先找到其所有依赖；然后依次判断这些依赖包是否已考虑，如果没考虑，加入队列；最后将这个包加入完成队列
            # 1. 找到这个包的所有版本
            if "versions" not in pkg_json:
                globalVar.finished_pkg.append(pkg_name)
                continue
            versions = pkg_json['versions']
            for ver in versions.keys():
                # 找到每个版本的所有依赖，分成devDependencies和dependencies
                dep_list = []
                if "dependencies" in versions[ver]:
                    dep_list +=list(versions[ver]["dependencies"].keys())
                if "devDependencies" in versions[ver]:
                    dep_list +=list(versions[ver]["devDependencies"].keys())

                for sub_pkg_name in dep_list:
                    if (sub_pkg_name not in globalVar.pkg_queue) \
                            and (sub_pkg_name not in globalVar.finished_pkg) \
                            and (sub_pkg_name not in globalVar.miss_pkg):
                        globalVar.pkg_queue.append(sub_pkg_name)
            globalVar.finished_pkg.append(pkg_name)
            print(f'finished all version dependencies detect for pkg：{pkg_name}')
            logging.info(f'finished all version dependencies detect for pkg：{pkg_name}')
            count += 1
            if count % 100 == 0:
                self.log100(count)
        self.log100(count)

    def log100(self, count):
        print(f"count = {count}")
        logging.info(f'------------size of pkg_queue: {len(globalVar.pkg_queue)}------------')
        logging.info(f'------------size of finished_pkg_list: {len(globalVar.finished_pkg)}------------')
        logging.info(f'------------size of miss_pkg_list: {len(globalVar.miss_pkg)}------------')

        os.makedirs('./finished_pkg_logs', exist_ok=True)
        globalVar.save_list(globalVar.finished_pkg, f'./finished_pkg_logs/finished_pkg_{count}.txt')
        os.makedirs('./pkg_queue_logs', exist_ok=True)
        globalVar.save_list(globalVar.pkg_queue, f'./pkg_queue_logs/pkg_queue_{count}.txt')
        os.makedirs('./miss_pkg_logs', exist_ok=True)
        globalVar.save_list(globalVar.miss_pkg, f'./miss_pkg_logs/miss_pkg_{count}.txt')


    def get_all_dependencies(self):
        pass

    def get_json(self, pkg_name):
        json_filename = self.root_path + (pkg_name+'.json').replace('/','_')
        if not os.path.exists(json_filename):
            pkg_json = self.download_json(pkg_name)
            return pkg_json
        with open(json_filename, 'r', encoding="utf-8") as f:
            return json.load(f)

    def download_json(self, pkg_name):
        json_filename = self.root_path + (pkg_name+'.json').replace('/','_')
        url = self.url + pkg_name
        try:
            # self.pkg_json = requests.get(url).json() # 网络原因？使用requests访问卡死，而直接浏览器访问快速返回，使用curl也是
            # r = os.popen('curl -s %s' % url).readlines()     # -s是静音模式，命令行不会输出curl的执行信息
            req = requests.get(url, timeout=30)
            pkg_json = req.json()
            if "error" in pkg_json or "_id" not in pkg_json:
                # pkg_json = "URL error"
                # globalVar.miss_pkg.append(pkg_name)
                return {"error": "Page not found"}
            else:
                write_data = json.dumps(pkg_json, ensure_ascii=False, indent=2, separators=(',', ': '))
                os.makedirs(self.root_path, exist_ok=True)
                with open(json_filename, 'w', encoding='utf-8') as f:
                    f.write(write_data)
                print(f'finish download pkg_json: {pkg_name}')
                return pkg_json
        except:
            #pkg_json = "Command error"
            pkg_josn = {"error": "Command error"}
            # globalVar.miss_pkg.append(pkg_name)
            return {"error": "Command error"}





