#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2020-04-18 17:59:04
# Project: tanzhou

from pyspider.libs.base_handler import *
import pymysql

class Handler(BaseHandler):
    crawl_config = {
    }

    @every(minutes=60)
    def on_start(self):
        self.crawl('http://top.baidu.com/buzz?b=1&fr=topindex', callback=self.index_page)

    @config(age=30 * 60)
    def index_page(self, response):
        for each in response.etree.xpath('//table[@class="list-table"]/tr')[1:]:
            try:
                title=each.xpath('./td[@class="keyword"]/a[@class="list-title"]')[0]
                hot=each.xpath('./td[@class="last"]/span')[0]
                print(title.text,hot.text)
            except:
                pass
        sql = "insert into novel (category,author,book_name,status,description,c_time,url)" \
              "values(%s,%s,%s,%s,%s,%s,%s)"
        self.cursor.execute(sql, (
            item['category'],
            item['author'],
            item['book_name'],
            item['status'],
            item['description'],
            item['c_time'],
            item['url']
        ))
        novel_id = self.cursor.lastrowid
        self.conn.commit()  # 提交
    #@config(priority=2)
    #def detail_page(self, response):
        #return {
            #"url": response.url,
            #"title": response.doc('title').text(),
        #}
