#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author  : hu_cl
# @Contact : 760730895@qq.com
# @Date    : 2020/10/16 9:36
# @File    : Process_Thread_leak.py   获取漏洞数据
import random
import config
import pymysql
import requests
import time
from bs4 import BeautifulSoup
from math import floor
import multiprocessing
import sched

s = sched.scheduler(time.time, time.sleep)
req_url_base = 'http://www.cnnvd.org.cn'  # 漏洞主网址
leak_url = '/web/vulnerability/querylist.tag'
req_header = {
    "Host": "www.cnnvd.org.cn",
    "Accept-encoding": "gzip, deflate",
    "Accept-language": "zh-CN,zh;q=0.9",
    "Cookie": "SESSION=9be3292c-e1a1-4122-9d99-31234215cecd; topcookie=a1",
}

user_agent_list = [
    "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
    "Chrome/85.0.4183.121 Safari/537.36",
    "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; "
    "SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)",
    "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
]


class MysqlDb(object):
    # 对存储config配置表和log配置表的操作
    def __init__(self, database_name):
        self.mysql_db = pymysql.connect(**database_name)
        self.mysql_cursor = self.mysql_db.cursor()
        self.insert_data_list = []

    def execute_sql(self, sql):
        try:
            self.mysql_cursor.execute(sql)
        except Exception as e:
            print(f'执行sql出错, {e}')
        else:
            return bool(self.mysql_cursor.fetchone())

    def insert_data_mysql(self, table, item):
        keys, vals = list(item.keys()), list(item.values())
        field = ','.join(keys)
        val = ','.join(['%s'] * len(item))
        insert_sql = "replace into " + table + " (" + field + ") VALUES (" + val + ")"
        self.insert_data_list.append(vals)
        if len(self.insert_data_list) == 1:
            try:
                self.mysql_cursor.executemany(insert_sql, self.insert_data_list)
            except Exception as e:
                print(f'插入数据出错, {e}')
            else:
                self.mysql_db.commit()
                self.insert_data_list.clear()


def partition(num_list, size):
    """
    Returns a new list with elements
    of which is a list of certain size.

        >>> partition([1, 2, 3, 4], 3)
        [[1, 2, 3], [4]]
    """
    return [num_list[i:i + size] for i in range(0, len(num_list), size)]


def get_nums(req_url, header):
    try:
        r = requests.get(req_url, params=header)
        soup = BeautifulSoup(r.text, "html.parser")
        section_pages = soup.select('.page input')
        for page in section_pages:
            return page.get('value')
    except Exception as e:
        print(f'错误信息为{e}')
        time.sleep(6)
        get_nums(req_url, header)


def thread_process(nums, req_url):
    pool = multiprocessing.Pool(20)
    for num in nums:
        pool.apply_async(get_url, (num, req_url, ))
    pool.close()
    pool.join()


def get_url(url_num, req_url):
    try:
        chart_dir = {}
        req_urls = req_url + '?pageno={}&repairLd='.format(url_num + 1)
        url_totals = requests.get(req_urls, params=req_header)
        soup_links = BeautifulSoup(url_totals.text, "html.parser")
        section_links = soup_links.select('.fl .a_title2')
        for data in section_links:
            chart_dir[data.get('href')] = data.text.strip()
        get_detail_info(chart_dir)
    except Exception as e:
        print(f'获取网页失败，报错信息为{e}，请稍等，6s后尝试重新获取 ，网页网址为{req_urls}')
        time.sleep(6)
        get_url(url_num, req_url)


def get_detail_info(data):
    for key, value in data.items():
        url_one = req_url_base + key
        file_main_db = MysqlDb(config.MYSQL_CONFIG)
        isNull = file_main_db.execute_sql(f"select 1 from intf_leak_detail WHERE cnnvd_no = '{key[28:]}'")
        file_main_db.mysql_db.close()
        if not isNull:
            try:
                url_totals = requests.get(url_one, params=req_header)
                next_get_detail(url_totals, key, value)
            except Exception as e:
                print(f'错误信息为{e},{key[28:]},{value}')
                time.sleep(3)
                get_detail_info({key: value})


def next_get_detail(url_totals, key, value):
    items = {}
    soup_detail = BeautifulSoup(url_totals.text, "html.parser")
    section_details = soup_detail.find(class_='detail_xq w770').find_all('li')
    items['CNNVD编号'] = key[28:]
    items['漏洞名称'] = value
    items['受影响实体'] = None
    items['补丁'] = None
    for detail in section_details[1:9]:
        items[detail.find('span').get_text().strip("：")] = detail.find('a').get_text().strip() \
            if detail.find('a') else None
    section_datas = soup_detail.find_all(class_='d_ldjj')
    for detail in section_datas:
        text_list = []
        if detail.find_all('p'):
            for context in detail.find_all('p'):
                text_list.append(context.get_text().strip())
        else:
            for context in detail.find_all(class_='a_title2'):
                text_list.append(context.get_text().strip())
        items[detail.find('h2').get_text()] = ''.join(text_list)
    item = {
        'cnnvd_no': items['CNNVD编号'],
        'cnnvd_name': items['漏洞名称'],
        'risk_level': items['危害等级'],
        'cve_no': items['CVE编号'],
        'leak_type': items['漏洞类型'],
        'addtime': items['发布时间'],
        'risk_type': items['威胁类型'],
        'updatetime': items['更新时间'],
        'factory': items['厂\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0商'],
        'sources_vulnerability': items['漏洞来源'],
        'vulnerability_profile': items['漏洞简介'],
        'vulnerability': items['漏洞公告'],
        'sphere_influence': items['受影响实体'] if items['受影响实体'] else None,
        'patch': items['补丁'],
    }
    download_data_mysql(item)


def download_data_mysql(data):
    print(f"正在处理{data['cnnvd_no']}的数据")
    file_main_db = MysqlDb(config.MYSQL_CONFIG)
    file_main_db.insert_data_mysql('intf_leak_detail', data)
    file_main_db.mysql_db.close()


def main():
    req_url = req_url_base + leak_url
    req_header['User-Agent'] = random.choice(user_agent_list)
    page_total = get_nums(req_url, req_header)
    print(page_total)
    ls = list(range(int(page_total)))
    n = 2  # 控制开启进程数量
    res = partition(ls, int(floor(len(ls)/n)))
    for nums in res:
        p = multiprocessing.Process(target=thread_process, args=(nums, req_url,))
        print('第{}个进程'.format(res.index(nums)))
        p.start()


if __name__ == '__main__':
    delay = 86400  # 一个天
    s.enter(delay, 2, main())
    s.run()
