'''
Author: 奔跑的乌龟
Date: 2020-12-05 13:25:53
LastEditTime: 2020-12-05 14:44:44
email: 435327238@qq.com
FilePath: \waibaoe:\爬虫12-4\Db.py
'''
import requests
from lxml import etree
import html
import time
import gethtml
import Db
import article

# 解析Html并插入数据
def analysisHtml(url,maxBookNumber):

    try:
        res = gethtml.getHtml(url)

        # 判断数据有效性
        if (res=="1"):
            return "1"

        # 解析操作
        selector = etree.HTML(res.text.encode('ISO-8859-1').decode('UTF-8'))

        page = selector.xpath('//div[@class="book_con_list"]/ul/li/a/@href')
        title = selector.xpath('//div[@class="book_con_list"]/ul/li/a/text()')    
        
        

        i = 0
        # 定义插入表数据结果
        sql = 'insert into chapter(booknumber,chaptertitle,chapterpage,content) values(%s,%s,%s,%s);'
        
        while(True):
            # 获取文章
            pages = page[i][0:-5]
            analysisUrl = "http://www.aomolit.com/t/"+str(maxBookNumber)+"/"+str(pages)+".html"
            content = article.analysisHtml(analysisUrl)
            # 循环插入数据
            Db.insert(sql,[maxBookNumber,str(title[i]),pages,content])
            print("爬取成功，数据是："+str(title[i])+pages)
            i += 1
            if (i>=len(page)):
                break
    except BaseException:
        return "1"

def produceurl():
  
    try:
        # 查询数据库已存储的数据最大booknumber值
        value = "SELECT * FROM chapter WHERE  booknumber=(SELECT MAX(booknumber) FROM chapter);"
        maxBookNumber = Db.query(value)
        maxBookNumber = maxBookNumber[0][1]
    except BaseException:
        maxBookNumber = 1
    stop = 120000
    while(True):
        url = 'http://www.aomolit.com/t/' + str(maxBookNumber)
        result = analysisHtml(url,maxBookNumber)
        maxBookNumber += 1
        if (result=="1"):
            print("该数据不存在，将跳过该数据！")
            continue
        print("爬取成功url是："+url)
        if (maxBookNumber>=stop):
            break

if __name__ == "__main__":
    produceurl()