
from bs4 import BeautifulSoup # 网页解析 获取数据
import re   # 正则表达式，进行文字匹配
import urllib.request, urllib.error # 指定url，获取网页数据
import pymysql # 数据库操作
from io import BytesIO
import gzip # 解压
import zlib # 解压


'''
http://www.xbiquge.la/0/
'''



Name = re.compile(r'<h1>(.*?)</h1>')
Kg = re.compile(r'\s+')
Author = re.compile(r'<p>作者：(.*?)</p>')


# 提取指定数据
def getData(baseurl):
    datalist = []
    count = 558
    for i in range(0, 78079):
        serial = str(count+1)
        url = baseurl + serial
        count += 1
        html = loadData(url)
        soup = BeautifulSoup(html, "html.parser")
        for item in soup.find_all('div', id="info"):
            item = str(item)
            data = []
            data.append(serial)
            names = re.findall(Name, item)
            data.append(names)
            item = re.sub(Kg, '', item)
            item = str(item)
            authors = re.findall(Author, item)
            data.append(authors)
            datalist.append(data)
            for i in datalist:
                list1 = []
                list1.append(i[0])
                list2 = i[1]
                list3 = i[2]
            saveDataDB(list1, list2, list3)


# 得到指定一个URL网页内容
def loadData(url):
    head = {
        "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36"
    }
    request = urllib.request.Request(url, headers=head)
    request.add_header('Accept-encoding', 'gzip,deflate')
    response = urllib.request.urlopen(request)
    content = response.read()
    encoding = response.info().get('Content-Encoding')
    if encoding == 'gzip':
        content = gzips(content)
    elif encoding == 'deflate':
        content = deflate(content)
    return content

# 解压缩 gzip
def gzips(data):
    buf = BytesIO(data)
    f = gzip.GzipFile(fileobj=buf)
    content = f.read().decode('utf-8')
    return content

# 解压缩 zlib
def deflate(data):
    try:
        return zlib.decompress(data, -zlib.MAX_WBITS).decode('utf-8')
    except zlib.error:
        return zlib.decompress(data).decode('utf-8')


# mysql保存数据
def saveDataDB(list1, list2, list3):
    conn = pymysql.connect(host='127.0.0.1', port=3306, user='python', password='Mysql_123', db='python', charset='utf8')
    cur = conn.cursor()
    for i in range(len(list2)):
        sql = '''
                    insert into Fiction_ (
                     serial,book_name,authors) 
                values(("%s"),("%s"),("%s"))''' % (list1[i], list2[i], list3[i])
        cur.execute(sql)
        conn.commit()
    cur.close()
    conn.close()
    print('保存数据库成功！！')


# 创建数据表
def init_db():
    sql = '''
        create table Fiction_
        (
        id int PRIMARY KEY AUTO_INCREMENT,
        book_name char(100) ,
        authors char(100),
        serial char(10)
        )ENGINE=InnoDB  DEFAULT CHARSET=utf8 AUTO_INCREMENT=1;             
    '''  # 创建表结构
    conn = pymysql.connect(host='127.0.0.1', port=3306, user='python', password='Mysql_123', db='python', charset='utf8')
    cursor = conn.cursor()
    cursor.execute(sql)
    conn.commit()
    conn.close()


# 主函数
def main():
    baseurl = "http://www.xbiquge.la/0/"
    #init_db()
    getData(baseurl)



if __name__ == "__main__":
    main()
