from queue import Queue
import numpy as np
from package import Package
from project import Project
from contributor import Contributor

pkg_list = []  # 记录所有包名
proj_info = {}   # 存放项目信息
pkg_queue = Queue()  # 存放未分析、存数据库的包名
abnormal_pkg_list = []  # 存放版本过多的包的包名
# 目前已知的许可证名字级别对应
license_dict = {'Unlicense': 0,
                'AGPL-3.0': 5,
                'GPL-2.0': 4, 'GPL-3.0': 4, 'EPL-2.0': 4, 'MPL-2.0': 4,
                'LGPL-3.0': 3, 'LGPL-2.1': 3,
                'Apache-2.0': 2, 'CC0-1.0': 2, 'BSD-3-Clause': 2, 'BSD-2-Clause': 2, 'MIT': 2,
                }

pkg_id = [] # 存放所有存入数据库的节点名（包名@版本 或 包名）
nodes = []  # 存放数据库结点需要的Package对象
ver_empty_list = [] # 空版本的包
error_pkg_list = [] # 爬取出现错误的包
pkg_dep = [] # 爬虫获取的依赖信息列表
pkg_dep_pkg = [] # 包-包依赖关系列表
ver_dict = {}  # 字典存放包名-版本名对应，ver_dict[a]为名称为a的包的版本序列号，在单独分析包名时获取
contributor_dict ={} # 字典存放项目名-开发者列表关系
vul_dict = {} # 字典存放包id-漏洞列表关系

def load_pkg_list():
    pkg_list = np.load("./pkg_list.npy").tolist()
    return pkg_list

def load_abnormal():
    abnormal_pkg_list = np.load("./abnormal_pkg_list.npy").tolist()
    print(len(abnormal_pkg_list))

def load_ver_empty_list():
    ver_empty_list = np.load("./ver_empty_list.npy").tolist()
    print(len(ver_empty_list))

def load_error_pkg():
    error_pkg_list = np.load("./error_pkg_list.npy").tolist()
    print(error_pkg_list)
    print(len(error_pkg_list))

def add_error_pkg():
    x = input()
    if x != "exit":
        error_pkg_list.append(x)
        add_error_pkg()
    else:
        np.save("./error_pkg_list.npy", error_pkg_list)

def load_pkg_id(filecount):
    pkg_id = np.load("./pkg_id/pkg_id_%s.npy" % filecount).tolist()
    return pkg_id

def load_pkg_node(filecount):
    pkg_id = np.load("./pkg_id/pkg_id_%s.npy" % filecount).tolist()
    print("now save %s nodes into database" % len(pkg_id))
    for each in pkg_id:
        if '@' not in each:
            continue
        name = each.split('@')[0]
        version = each.split('@')[1]
        nodes.append(Package(each, name, version))

def load_proj_node():
    pkg_list = np.load("./pkg_list.npy").tolist()
    proj_info = np.load("./proj_info.npy", allow_pickle=True).item()
    print("now save %s nodes into database" % len(pkg_list))
    for index in range(len(pkg_list)):
        each = pkg_list[index]
        dict = proj_info[each]
        nodes.append(Project(each, dict))

def load_contri_node():
    contri_list = np.load("./contributor/contributor_list.npy", allow_pickle=True).tolist()
    for contri in contri_list:
        nodes.append(Contributor(contri))

def load_ver_dict():
    all_dict = {}
    max_file = np.load("./break_point.npy").tolist()[2] + 1
    for i in range(1, max_file):
        mydict = np.load("./pkg_ver/pkg_ver_%s.npy" % i, allow_pickle=True).item()
        all_dict.update(mydict)
    return all_dict

def load_contri_dict(file_count):
    contributor_dict = np.load("./contributor/contributor_%s.npy" % file_count, allow_pickle=True).item()
    return contributor_dict

def load_vul_dict(file_count):
    vul_dict = np.load("./vuls/vuls_%s.npy" % file_count, allow_pickle=True).item()
    return vul_dict

def load_pkg_dep_pkg(file_count):
    pkg_dep_pkg = np.load("./pkg_dep_pkg_max/pkg_dep_pkg_max_%s.npy" % file_count, allow_pickle=True).tolist()
    return pkg_dep_pkg

def clear_all():
    pkg_id.clear()
    ver_dict.clear()
    pkg_dep.clear()
    nodes.clear()
    pkg_dep_pkg.clear()
    contributor_dict.clear()
    vul_dict.clear()