# -*- coding: utf-8 -*-
import scrapy
from bs4 import BeautifulSoup

from dingdian.init_mysql import session
from dingdian.items import CnnvdItem
from scrapy.http import Request

from dingdian.models import VulCnnvd
from dingdian.schedule import schedule


class Myspider(scrapy.Spider):
    name = 'cnnvd'
    bash_url='http://www.cnnvd.org.cn/web/vulnerability/querylist.tag?pageno=1&repairLd='
    bashurl = 'http://www.cnnvd.org.cn/web/vulnerability/querylist.tag?pageno='
    bashurll = '&repairLd='
    headers = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'Accept-Language': 'zh-CN,zh;q=0.9',
        'Connection': 'keep-alive',
        'Referer': 'http://www.cnnvd.org.cn/web/vulnerability/querylist.tag?pageno=10903&repairLd=',
        'Upgrade-Insecure-Requests': '1',
        'User-Agent':  'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Mobile Safari/537.36',
        'X-DevTools-Emulate-Network-Conditions-Client-Id': '1CFA3F506A86DC2600278F0D224F34E4'
    }
    pageNum = 0
    vulNum = 0
    totalVuls = 0
    currentVuls = 0.0

    def start_requests(self):
        url = self.bash_url
        yield Request(url,headers=self.headers,callback=self.parse)

    def parse(self,response):
        #print response.text
        pageSource = BeautifulSoup(response.text,'lxml')
        self.pageNum = int(pageSource.find(class_="page").find('input')['value'])
        for i in range(1,self.pageNum+1):
            href = self.bashurl + str(i) + self.bashurll
            yield Request(href,self.get_url)

    def get_url(self,response):
        hrefs = BeautifulSoup(response.text,'lxml').find('div',class_="list_list").find_all('a',class_="a_title2")
        self.vulNum = len(hrefs)
        self.totalVuls = self.vulNum*self.pageNum
        for n in range(0, self.vulNum):
            #print(hrefs[n]['href'])
            url = 'http://www.cnnvd.org.cn' + hrefs[n]['href']
            if (session.query(VulCnnvd).filter(VulCnnvd.url == url).all()) != []:
               print "已经解析过该网页"
               self.currentVuls += 1.0
               schedule(self.totalVuls, self.currentVuls)
               continue
            yield Request(url, callback=self.save_vul)

    def save_vul(self,response):
        self.currentVuls += 1.0
        schedule(self.totalVuls, self.currentVuls)
        soup1 = BeautifulSoup(response.text, 'lxml').find('div', class_="detail_xq w770")
        item = CnnvdItem()
        # print(soup1)
        item['url'] = response.url
        item['title'] = soup1.find('h2').get_text().strip()
        try:
            item['cveID'] = soup1.find('a', attrs={'target':"_blank",'rel':'nofollow'}).get_text().strip()
        except Exception:
            item['cveID'] = None
        try:
            item['level'] = soup1.select('li')[1].find('a').get_text().strip()
        except Exception:
            item['level'] = None
        try:
            item['type'] = soup1.select('li')[3].find('a').get_text().strip()
        except Exception:
            item['type'] = None
        if item['type'] == '资料不足':
            item['type'] = None
        item['publishTime'] = soup1.select('li')[4].find('a').get_text().strip()
        item['updateTime'] = soup1.select('li')[6].find('a').get_text().strip()
        try:
            item['source'] = soup1.select('li')[8].get_text().strip()
        except Exception:
            item['source'] = None
        try:
            messages = BeautifulSoup(response.text, 'lxml').find('div', class_='d_ldjj').select('p')
            length = len(messages)
            item['message'] = ''
            for n in range(0,length):
                item['message'] = item['message'] + messages[n].get_text().strip()+'\n'
        except Exception:
            item['message'] = 'None'
        try:
            soup3 = BeautifulSoup(response.text, 'lxml').find('div', class_='d_ldjj m_t_20').select('p')
            length = len(soup3)
            item['vendorPatch'] = ''
            for n in range(0,length):
                item['vendorPatch'] = item['vendorPatch'] + soup3[n].get_text().strip()+'\n'
        except Exception:
            item['vendorPatch'] = 'None'
        item['chinese'] = True
        item['total'] = self.totalVuls
        item['current'] = int(self.currentVuls)
        #print item
        yield item

