import csv
import requests
from parsel import Selector


class HousingCrawler:
    def __init__(self):
        self.url = 'https://cs.lianjia.com/ershoufang/pg1/'
        self.headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36",
        }
        self.session = requests.session()
        self.csv_writer = None

    def open_csv_file(self):
        f = open('data.csv', 'a', newline='', encoding='utf-8-sig')
        fieldnames = ['标题', '售价', '单价', '小区', '商圈', '户型', '面积', '朝向', '装修', '楼层', '年份', '建筑结构']
        self.csv_writer = csv.DictWriter(f, fieldnames=fieldnames)
        self.csv_writer.writeheader()

    def get_html(self):
        response = self.session.get(url=self.url, headers=self.headers)
        self.parse_html(response.text)

    def parse_html(self, html):
        selector = Selector(text=html)
        li_list = selector.css('.sellListContent li')
        for li in li_list:
            title = li.css('.info .title a::text').get()
            total_price = li.css('.info .priceInfo .totalPrice span::text').get()
            unit_price = li.css('.info .priceInfo .unitPrice span::text').get()
            position = li.css('.info .flood .positionInfo a::text').getall()
            house_info = li.css('.info .address .houseInfo::text').get()
            house_info = house_info.split('|')
            house_type = house_info[0]
            house_area = house_info[1]
            house_face = house_info[2]
            house_fitment = house_info[3]
            house_floor = house_info[4]
            house_year = house_info[5] if len(house_info) == 7 else '暂无数据'
            house_structure = house_info[5] if len(house_info) == 6 else house_info[6]
            print(
                title, total_price, unit_price, position[0], position[1],
                house_type, house_area, house_face, house_fitment, house_floor,
                house_year, house_structure
            )
            self.save_data(
                title, total_price, unit_price, position[0], position[1],
                house_type, house_area, house_face, house_fitment, house_floor,
                house_year, house_structure
            )

    def save_data(self, *args):
        (
            title, total_price, unit_price, position0, position1,
            house_type, house_area, house_face, house_fitment, house_floor,
            house_year, house_structure
        ) = args
        self.csv_writer.writerow({
            '标题': title,
            '售价': total_price,
            '单价': unit_price,
            '小区': position0,
            '商圈': position1,
            '户型': house_type,
            '面积': house_area,
            '朝向': house_face,
            '装修': house_fitment,
            '楼层': house_floor,
            '年份': house_year,
            '建筑结构': house_structure,
        })

    def run(self):
        self.open_csv_file()
        self.get_html()


if __name__ == '__main__':
    spider = HousingCrawler()
    spider.run()
