from Crawl import crawl_pkg, crawl_pkg_version, crawl_pkg_dep, crawl_proj_info, crawl_contributors, crawl_vulnerability
from analyse_dep import *
from deal_data import save_to_db
import global_var
import numpy as np
from random import uniform
import time


if __name__ == '__main__':
    # 爬取一定基数的包名
    # crawl_pkg(10000)
    global_var.pkg_list = np.load("./pkg_list.npy").tolist()
    global_var.ver_empty_list = np.load("./ver_empty_list.npy").tolist()
    global_var.abnormal_pkg_list = np.load("./abnormal_pkg_list.npy").tolist()
    global_var.error_pkg_list = np.load("./error_pkg_list.npy").tolist()
    global_var.proj_info = np.load("./proj_info.npy", allow_pickle=True).item()
    pkg_set = set(global_var.pkg_list)
    print("totoal num: %s" % len(global_var.pkg_list))

    break_point = np.load("./break_point.npy").tolist() # break_point存了三个参数，第一个是处理到的包名index，第二个已存进数据库的最后一个文件index，第三个是目前的最后一个文件index
    start = break_point[0]  # 开始分析的包index
    file_count = break_point[2] + 1 # 下一个文件名index
    print("start:%s" % start)
    print("file_cout:%s" % file_count)

    pkg_queue_list = global_var.pkg_list[start:]
    print("not crawled num: %s" % len(pkg_queue_list))
    for each in pkg_queue_list:
        global_var.pkg_queue.put(each)

    count = 1   #包名处理计数
    while not global_var.pkg_queue.empty():
        print(count)
        cur_pkg = global_var.pkg_queue.get()
        global_var.proj_info[cur_pkg] = crawl_proj_info(cur_pkg)    # 爬取项目信息
        global_var.contributor_dict[cur_pkg] = crawl_contributors(cur_pkg)      # 爬取开发者信息

        if crawl_pkg_version(cur_pkg) != -1:
            # sleep_time = uniform(0.2, 0.4)
            # time.sleep(sleep_time)
            if len(global_var.ver_dict[cur_pkg]) > 300: # 版本太多影响整体派取依赖速度，存下来之后再分析，此时ver_dict已有包名对应版本信息
                global_var.abnormal_pkg_list.append(cur_pkg)
                print("%s is abnormal" % cur_pkg)

            else:
                for ver in global_var.ver_dict[cur_pkg]:    # 对每个id进行分析
                    id = cur_pkg + "@" + ver

                    # 爬取依赖信息
                    global_var.pkg_dep.append({'id': id, 'dependence': crawl_pkg_dep(id)})
                    dependencies = global_var.pkg_dep[-1]['dependence']
                    # 分析依赖名去重并加入队列
                    for dep in dependencies:    #对每个依赖项入队前检查
                        dep_name = get_dep_name(dep['name'])
                        if dep_name is None or dep_name in pkg_set:
                            continue
                        else:
                            pkg_set.add(dep_name)
                            global_var.pkg_list.append(dep_name)
                            global_var.pkg_queue.put(dep_name)

                    # 爬取漏洞信息
                    vuls = crawl_vulnerability(cur_pkg, ver)
                    if len(vuls) != 0:
                        global_var.vul_dict[id] = vuls
                    sleep_time = uniform(0.1, 0.2)
                    time.sleep(sleep_time)

                    print("%s ok!" % id)

        if count % 100 == 0 or global_var.pkg_queue.empty():    # 每隔100个包名存一次
            # 本地化npy文件存储
            record = open("./record.txt", "a")
            record.write("pkg_id_%s has %s packages\n" % (file_count, len(global_var.pkg_id)))
            record.close()
            np.save("./pkg_id/pkg_id_%s.npy" % file_count, global_var.pkg_id)
            np.save("./pkg_ver/pkg_ver_%s.npy" % file_count, global_var.ver_dict)
            np.save("./pkg_dep/pkg_dep_%s.npy" % file_count, global_var.pkg_dep)
            np.save("./contributor/contributor_%s.npy" % file_count, global_var.contributor_dict)
            np.save("./vuls/vuls_%s" % file_count, global_var.vul_dict)
            np.save("./pkg_list.npy", global_var.pkg_list)
            np.save("./abnormal_pkg_list.npy", global_var.abnormal_pkg_list)
            np.save("./ver_empty_list.npy", global_var.ver_empty_list)
            np.save("./error_pkg_list.npy", global_var.error_pkg_list)
            np.save("./break_point.npy", [start+count, break_point[1], file_count])
            np.save("./proj_info.npy", global_var.proj_info)
            global_var.clear_all()
            print("batch %s is ok!" % file_count)
            print("totally has %s abnormal package" % len(global_var.abnormal_pkg_list))

            # 分析依赖，生产依赖文件
            get_dep_relations(file_count, file_count+1)

            # 存储数据进入数据库
            save_to_db()

            file_count += 1

        count += 1
        print("queue length: %s" % len(global_var.pkg_queue.queue))

