# -*-coding: Utf-8 -*-
# @File : main_spider .py
# author: Chimengmeng
# blog_url : https://www.cnblogs.com/dream-ze/
# Time：2023/10/10
import csv
import os

import requests
from lxml import etree
from fake_useragent import UserAgent

from settings import base_settings, db_settings


class Spider():
    def __init__(self):
        self.headers = {
            'User-Agent': UserAgent().random
        }
        self.db_handler = DbHandler()

    def spider_house_data_list(self):
        house_data_list = []
        tag_url = 'https://nb.anjuke.com/sale/jiangbeia/'
        response = requests.get(url=tag_url, headers=self.headers)
        page_text = response.text
        tree = etree.HTML(page_text)
        div_lists = tree.xpath('/html/body/div[1]/div/div/section/section[3]/section[1]/section[2]/div')
        # 房名 ： house_name
        # 详情链接 : house_detail_url
        # 房型详情 : house_detail
        # 建面 : house_area
        # 建造时间 : house_create
        # 封面 : house_cover
        # 总价 : house_price_total
        # 每平价格 : house_pre_price
        # 市区 : house_center
        # 地区 : house_place
        # 详情地址 : house_location
        # 标签 : house_tag

        # 是否具有预售证 : house_sale_certify
        # 房屋的装修情况（毛坯，简装修） : house_renovation
        # 公司 房屋类型（别墅）: house_type
        # 交房时间 : house_delivery
        # 开盘时间 : house_open_time
        # 售房情况（在售） : house_sale
        for div in div_lists:
            try:
                house_info = {}
                house_name = div.xpath('./a/div[2]/div[1]/section/div[2]/p[1]/text()')[0]
                house_detail_url = div.xpath('./a/@href')[0]
                house_detail = div.xpath('./a/div[2]/div[1]/div[1]/h3/@title')[0]
                house_area = div.xpath('./a/div[2]/div[1]/section/div[1]/p[2]/text()')[0].strip()
                house_create = div.xpath('./a/div[2]/div[1]/section/div[1]/p[5]/text()')[0].strip()
                house_cover = div.xpath('./a/div[1]/img/@src')[0]
                house_price_total = div.xpath('./a/div[2]/div[2]/p[1]/span[1]/text()')[0].strip() + '万'
                house_pre_price = div.xpath('./a/div[2]/div[2]/p[2]/text()')[0].strip()
                house_location = ''
                house_location_span_list = div.xpath('./a/div[2]/div[1]/section/div[2]/p[2]/span')
                for house_location_span in house_location_span_list:
                    house_location += house_location_span.xpath('./text()')[0].strip() + '-'
                house_tag_span_list = div.xpath('./a/div[2]/div[1]/section/div[3]/span')
                house_tag = ''
                for house_tag_span in house_tag_span_list:
                    house_tag += house_tag_span.xpath('./text()')[0].strip() + '-'
                house_info['house_name'] = house_name
                house_info['house_detail_url'] = house_detail_url
                house_info['house_detail'] = house_detail
                house_info['house_area'] = house_area
                house_info['house_create'] = house_create
                house_info['house_cover'] = house_cover
                house_info['house_price_total'] = house_price_total
                house_info['house_pre_price'] = house_pre_price
                house_info['house_location'] = house_location
                house_info['house_tag'] = house_tag
                house_data_list.append(house_info)
            except Exception as e:
                print(f'Error :>>> {e}')
            print('house_data_list :>>> ', house_data_list)
            break
        return house_data_list

    def save_to_csv(self):
        house_data_list = self.spider_house_data_list()
        for house_data in house_data_list:
            house_name = house_data['house_name']
            house_detail_url = house_data['house_detail_url']
            house_detail = house_data['house_detail']
            house_area = house_data['house_area']
            house_create = house_data['house_create']
            house_cover = house_data['house_cover']
            house_price_total = house_data['house_price_total']
            house_pre_price = house_data['house_pre_price']
            house_location = house_data['house_location']
            house_tag = house_data['house_tag']
            self.db_handler.save_row_data([
                house_name,
                house_detail_url,
                house_detail,
                house_area,
                house_create,
                house_cover,
                house_price_total,
                house_pre_price,
                house_location,
                house_tag,
            ])


class DbHandler():
    def __init__(self):
        # csv 文件根目录
        self.csv_data_dir = os.path.join(db_settings.CSV_DATA_DIR, 'HouseInfoData.csv')
        # 不存在文件则新建文件
        if not os.path.exists(self.csv_data_dir):
            with open(self.csv_data_dir, 'w', encoding='utf-8', newline='') as csvfile:
                writer = csv.writer(csvfile)
                writer.writerow([
                    'house_name',
                    'house_detail_url',
                    'house_detail',
                    'house_area',
                    'house_create',
                    'house_cover',
                    'house_price_total',
                    'house_pre_price',
                    'house_location',
                    'house_tag',
                ])

    def save_row_data(self, row):
        '''

        :param row: 写入每一行的数据
        :return:
        '''
        with open(self.csv_data_dir, 'a', encoding='utf-8', newline='') as csvfile:
            writer = csv.writer(csvfile)
            writer.writerow(row)


s = Spider()
res = s.save_to_csv()
