# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup  
import re  
import urllib.request, urllib.error  
import tkinter as tk  

name = "xxx"  
Number = 2  

Title = re.compile(r'<h1 class="wap_none">(.*?)</h1>')  
lst = re.compile(r'<div id="chaptercontent" class="Readarea ReadAjax_content">(.*?)</br>') 

def main(url):
    savepath = name + ".txt" 
    datalist = getData(url)
    saveData(datalist, savepath)

# 爬取网页
def getData(base_url):
    datalist = [] 
    print("爬取中.......")

    for i in range(1, Number): 
        url = base_url + str(i) + '.html'
        html = askURL(url) 
        data = []
        title = re.findall(Title, html)
        data.append(title[0] if title else "No Title")
        soup = BeautifulSoup(html, "html.parser")
        for item in soup.find_all('div', class_="Readarea ReadAjax_content"): 
            item = str(item)
            re_h = re.compile('</?\w+[^>]*>') 
            link = re_h.sub('', item)  
        data.append(link)
        datalist.append(data)

    return datalist

# 得到指定一个URL的网页内容
def askURL(url):
    head = {  
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36"
    }

    request = urllib.request.Request(url, headers=head)
    html = ""
    try:
        response = urllib.request.urlopen(request)
        html = response.read().decode("utf-8")
    except urllib.error.URLError as e:
        if hasattr(e, "code"):
            print(e.code)
        if hasattr(e, "reason"):
            print(e.reason)
    return html

# 保存数据到文本文件
def saveData(datalist, savepath):
    with open(savepath, 'w', encoding='utf-8') as f:
        for data in datalist:
            f.write("章节:\n")
            f.write(data[0] + "\n\n")
            f.write("内容:\n")
            f.write(data[1] + "\n\n")
            f.write("="*50 + "\n\n")
    print(f"数据已保存到 {savepath}")

# 创建UI界面
def create_ui():
    def start_crawling():
        url = entry.get()
        if url:
            main(url)
            print("爬取完毕！")

    root = tk.Tk()
    root.title("Python爬虫")
    root.geometry("400x200")

    label = tk.Label(root, text="爬取链接:")
    label.pack()

    global entry
    entry = tk.Entry(root, width=50)
    entry.pack()

    button = tk.Button(root, text="开始爬取", command=start_crawling)
    button.pack()

    root.mainloop()

if __name__ == "__main__":
    create_ui()
