import csv
import requests
from bs4 import BeautifulSoup

url = r"https://book.douban.com/top250"

headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36",
}
books = []
for page in range(0, 10):
    urls = url + "?start=" + str(page * 25)
    response = requests.get(urls, headers=headers)
    soup = BeautifulSoup(response.text, "html.parser")

    for item in soup.select("tr.item"):
        title = item.select_one(".pl2 a")["title"]
        score = item.select_one(".rating_nums").text
        books.append({"title": title, "score": score})

with open("douban_book.csv", 'w', newline='', encoding='utf-8') as file:
    writer = csv.writer(file)
    writer.writerow(["title", "score"])
    for book in books:
        writer.writerow([book["title"], book["score"]])
