import requests
from bs4 import BeautifulSoup
import datetime
import os
import warnings

class HouseScraper:
    def __init__(self, base_url):
        self.base_url = base_url

    def fetch_page(self, page_number):
        url_next = f"{self.base_url}house/i{page_number}"
        print(f"looking for site:{url_next}")
        try:
            res = requests.get(url_next)
            if res.status_code == 200:
                return BeautifulSoup(res.text, "html.parser")
            else:
                warnings.warn(f"Failed to fetch page {page_number} with status code: {res.status_code}")
        except Exception as e:
            warnings.warn(f"An error occurred while fetching page {page_number}: {e}")
        return None

    def parse_houses(self, soup):
        if not soup:
            return []

        total = soup.find("div", class_="shop_list shop_list_4")
        if not total:
            warnings.warn(f"No houses found on the page")
            return []

        houses = total.find_all("dl", class_="clearfix")
        for house in houses:
            try:
                name = house.find("h4", class_="clearfix").find("span").text
                text1 = house.find("p", class_="tel_shop").text
                # Split and strip each part, but handle cases where there are more or fewer items
                parts = [item.strip() for item in text1.split("|")]
                room, mianji, louceng, chaoxiang, year = parts[:5] + [''] * (5 - len(parts))
                
                master = house.find("p", class_="tel_shop").find("a").text
                address = house.find("p", class_="add_shop").find("a").text
                price = house.find("dd", class_="price_right").find("b").text
                average_price = (house.find("dd", class_="price_right").find_all("span"))[1].text
                yield (name, room, mianji, louceng, chaoxiang, year, master, address, price, average_price)
            except Exception as e:
                warnings.warn(f"Error parsing a house: {e}")

class DataSaver:
    def __init__(self, filename):
        self.filename = filename
        if not os.path.exists("csv"):
            os.makedirs("csv")

    def save_data(self, data):
        with open(f"csv/{self.filename}.csv", mode='w', encoding='utf-8') as fp:
            for row in data:
                print(','.join(row), file=fp)

class MainController:
    def __init__(self, base_url, pages_to_scrape):
        self.scraper = HouseScraper(base_url)
        self.data_saver = DataSaver(datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))
        self.pages_to_scrape = pages_to_scrape

    def run(self):
        all_data = []
        for i in range(31, 31 + self.pages_to_scrape):
            soup = self.scraper.fetch_page(i)
            if soup:
                all_data.extend(list(self.scraper.parse_houses(soup)))
        self.data_saver.save_data(all_data)

# 使用示例
if __name__ == "__main__":
    yeshu = input("输入爬取页数: ")
    controller = MainController("https://sh.esf.fang.com/", int(yeshu))
    controller.run()