#!usr/bin/env python  
# -*- coding:utf-8 _*-
""" 
@author:pengfeifu
@file: xbiquge.py 
@version:
@time: 2021/11/06 
@email:1324734112@qq.com
@desc： 斗罗大陆网络爬虫
@function：常用函数
"""
import re
import time

import requests
import random
import os
import bs4
from bs4 import BeautifulSoup
import sys
import importlib
import mysqldb_pool

importlib.reload(sys)

headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36"
}

server_url = "https://www.xbiquge.la"

book_servers = [{
    "title": "圣墟",
    "book_server": "https://www.xbiquge.la/13/13959/"
}]
'''
, {
    "title": "斗罗大陆",
    "book_server": "https://www.xbiquge.la/1/1710/"
}
, {
    "title": "武炼巅峰",
    "book_server": "https://www.xbiquge.la/0/10/"
}
'''

file_save_url = "D:\\net\\book"


def directory_mkdir(file_path):
    if os.path.exists(file_path) is False:
        os.makedirs(file_path)
    os.chdir(file_path)


def get_content(book_src):
    print("正在请求路径：", book_src)
    book_res = requests.get(book_src, headers=headers)
    book_soup = BeautifulSoup(str(book_res.content, "utf8"), "html.parser")
    text_content = book_soup.find('div', class_="content_read").find("div", id="content")
    # print(text_content.contents)
    # 获取div标签id属性content的内容 \xa0 是不间断空白符 &nbsp;
    text = text_content.text.replace('\xa0' * 4, '\n')
    return text


def file_write(contents, file_name):
    try:
        with open(file_name, "a", encoding="utf-8") as f:
            f.write(contents)
    except IOError:
        print("文件写入失败")
    finally:
        f.close()


class Novel(object):
    def __init__(self, novel, cover, author, total_titles):
        self.novel = novel
        self.cover = cover
        self.author = author
        self.total_titles = total_titles


class NovelTitle(object):
    def __init__(self, novel, title):
        self.novel = novel
        self.title = title


class NovelContent(object):
    def __init__(self, novel, title, content):
        self.novel = novel
        self.title = title
        self.content = content


def write_tb_novel(novels):
    values = []
    for novel in novels:
        values.append([novel.novel, novel.cover, novel.author, novel.total_titles])

    insert_sql = "insert into tb_novel(novel_name,novel_cover,author,total_titles) values (%s,%s,%s,%s)\n" \
                 " ON DUPLICATE KEY UPDATE novel_name=values(novel_name),\n" \
                 " novel_cover=values(novel_cover),author=values(author),total_titles=values(total_titles)\n"
    mysql_pool = mysqldb_pool.MysqlDBPool(config_section="db_mysql_aliyun")
    count = mysql_pool.execute_many(sql=insert_sql, values=values)
    mysql_pool.sql_commit()
    print(f"小说插入成功{count}条")


def write_tb_title(titles):
    values = []
    for title in titles:
        values.append([title.novel, title.title])
    insert_sql = "insert into tb_title(novel_name,title_name) values (%s,%s)\n" \
                 " ON DUPLICATE KEY UPDATE novel_name=values(novel_name),\n" \
                 " title_name=values(title_name)\n"
    mysql_pool = mysqldb_pool.MysqlDBPool(config_section="db_mysql_aliyun")
    count = mysql_pool.execute_many(sql=insert_sql, values=values)
    mysql_pool.sql_commit()
    print(f"小说标题插入成功{count}条")


def write_tb_content(contents):
    values = []
    for content in contents:
        values.append([content.novel, content.title, content.content])
    insert_sql = "insert into tb_content(novel_name,title_name,content) values (%s,%s,%s)\n" \
                 " ON DUPLICATE KEY UPDATE novel_name=values(novel_name),\n" \
                 " title_name=values(title_name),content=values(content)\n"
    mysql_pool = mysqldb_pool.MysqlDBPool(config_section="db_mysql_aliyun")
    count = mysql_pool.execute_many(sql=insert_sql, values=values)
    mysql_pool.sql_commit()
    print(f"小说插入成功{count}条")


if __name__ == "__main__":
    novels, titles, contents = [], [], []
    for book_server in book_servers:
        # 小说名
        book_name = book_server.get("title")
        book_path = file_save_url + "\\" + book_name
        directory_mkdir(book_path)
        rep = requests.get(book_server.get("book_server"), headers=headers)
        soup = BeautifulSoup(str(rep.content, "utf8"), "html.parser")
        # 小说封面
        img = soup.find("div", id="fmimg").find("img")
        novel_cover = img.attrs["src"]
        # 标题链接
        a_list = soup.find("div", id="list").find_all("a")
        print(f"{book_name}章节共计：{len(a_list)}")
        # 作者
        p_author = soup.find("div", id="info").find_all("p")[0]
        author_name = re.findall(r"者：(.*)", p_author.text)
        # 封装小说list
        novels.append(Novel(book_name, img.attrs["src"], author=author_name[0], total_titles=len(a_list)))
        counter = 0
        for a in a_list:
            counter += 1
            time.sleep(random.randint(1, 2))
            # a.attrs["href"]=a.get("href")
            book_src = server_url + a.attrs["href"]
            # 标题list
            titles.append(NovelTitle(book_name, a.string))
            # 读取网络文本内容
            content = get_content(book_src)
            contents.append(NovelContent(novel=book_name, title=a.string, content=content))
            # 写入保存
            file_write(content, book_path + "\\" + str(counter) + a.string + ".txt")
    write_tb_novel(novels)
    write_tb_title(titles)
    write_tb_content(contents)
