# import csv
# import crawler
# import os.path as hasOrNo
#
# def make_del():
#     cve_names = []
#     with open('cve/cve_names.csv') as f:
#         reader = csv.reader(f, delimiter=' ', quotechar='|')
#         for item in reader:
#             cve_names.append(item)
#     i = 0
#     links = []
#     infos = []
#     for item in cve_names:
#         print(i)
#         url = 'https://www.cvedetails.com/cve/' + item[0]
#         info, link = crawler.get_info(url)
#         links.append(link)
#         infos.append(info)
#         i += 1
#
#     for item in infos:
#         source_datas = []
#         fineName = item["CVE编号"][:8]
#         header = list(item.keys())
#         source_datas.append(item)
#         if hasOrNo.exists(fineName):
#             with open(fineName, "a+", encoding='utf-8', newline='') as f:
#                 writer = csv.DictWriter(f, fieldnames=header)
#                 writer.writerows(source_datas)
#         else:
#             with open(fineName, "a+", encoding='utf-8', newline='') as f:
#                 writer = csv.DictWriter(f, fieldnames=header)
#                 writer.writeheader()
#                 writer.writerows(source_datas)
#
#     for item in links:
#         source_datas = []
#         fileName = item["CVE编号"][:8] + "_link"
#         header = list(item.keys())
#         if hasOrNo.exists(fileName):
#             with open(fileName, "a+", encoding='utf-8', newline='') as f:
#                 writer = csv.DictWriter(f, fieldnames=header)
#                 writer.writerows(source_datas)
#         else:
#             with open(fileName, "a+", encoding='utf-8', newline='') as f:
#                 writer = csv.DictWriter(f, fieldnames=header)
#                 writer.writeheader()
#                 writer.writerows(source_datas)
#
#
# if __name__ == '__main__':
#     make_del()
import csv
import crawler
import random
import time


def make_del():
    cve_names = []
    with open('cve/cve_names_9.csv') as f:
        reader = csv.reader(f, delimiter=' ', quotechar='|')
        for item in reader:
            cve_names.append(item)
    i = 0
    for item in cve_names:
        print(i)
        url = 'https://www.cvedetails.com/cve/' + item[0]
        crawler.get_info(url)
        i += 1
        sleepTime = random.randint(5,18)
        print(sleepTime)
        time.sleep(sleepTime)


if __name__ == '__main__':
    make_del()