import csv
import json
import os
import requests
from utils.random_request import random_ua
from utils.query import query


def init():
    """
    初始化文件夹和文件，并写入表头
    注意：如果文件已经存在，不会覆盖
    :return:
    """
    if not os.path.exists("./data"):
        os.mkdir("./data")
    if not os.path.exists("./data/house_info.csv"):
        with open("./data/house_info.csv", "w", newline="", encoding="utf-8") as f:
            writer = csv.writer(f)
            writer.writerow(
                [
                    "title",
                    "cover",
                    "city",
                    "region",
                    "address",
                    "rooms_desc",
                    "area_range",
                    "all_ready",
                    "prices",
                    "house_decoration",
                    "company",
                    "house_type",
                    "on_time",
                    "open_date",
                    "tags",
                    "total_price_range",
                    "sale_status",
                    "detail_url",
                ]
            )


def write_to_csv(row):
    """
    写入一行数据
    :param row:
    :return:
    """
    with open("./data/house_info.csv", "a", newline="", encoding="utf-8") as f:
        writer = csv.writer(f)
        writer.writerow(row)


def read_city_url_from_csv():
    """
    从csv文件中读取城市url和房源数量，使用生成器yield返回
    注意：会跳过表头
    :return:
    """
    with open("./data/city_url_houseNum.csv", "r", encoding="utf-8") as f:
        reader = csv.reader(f)
        # 跳过文件头部
        next(reader)
        for row in reader:
            yield row


def get_data(url):
    """
    获取对应网址具体的房源信息
    :param url:
    :return:
    """
    headers = {"User-Agent": random_ua()}
    try:
        print(f"正在请求 {url} ...")
        response = requests.get(url, headers=headers, timeout=5)

        if response.status_code == 200:
            return response.json()["data"]["list"]
        else:
            return None
    except requests.exceptions.RequestException as e:
        return None


# 通过xpath解析房源信息
"""
def parse_data(city_url, data):
	# 解析房源信息
	root = etree.HTML(data)

	house_list = root.xpath('/html/body/div[3]/ul[2]/li')
	for house in house_list:
		# /html/body/div[3]/ul[2]/li[1]/a/img
		img_url = '.'.join(house.xpath('./a/img/@data-original')[0].split(".")[:-2])
		title = house.xpath('./div/div[1]/a/text()')[0]
		detail_url = city_url + house.xpath('./div/div[1]/a/@href')[0]
		house_type = house.xpath('./div/div[1]/span[1]/text()')[0]
		sale_status = house.xpath('./div/div[1]/span[2]/text()')[0]
		city = house.xpath('./div/div[2]/span[1]/text()')[0]
		region = house.xpath('./div/div[2]/span[2]/text()')[0]
		address = house.xpath('./div/div[2]/a/text()')[0]
		rooms_desc = house.xpath('./div/a/span[1]/text()')[0] + "/" + house.xpath('./div/a/span[2]/text()')[0]
		area_range = house.xpath('./div/div[3]/span[1]/text()')[0].replace("建面 ", "")
		tmp = []
		for tag in house.xpath('./div/div[5]/span'):
			tmp.append(tag.xpath("./text()")[0])
		tags = "|".join(tmp)
		prices = house.xpath('./div/div[6]/div[1]/span/text()')[0]
		total_price_range = house.xpath('./div/div[6]/div[2]/text()')[0]
		print(img_url)
		break
"""


def parse_data(city_url, data):
    """
    解析房源信息（json格式），并写入csv文件
    :param city_url:
    :param data:
    :return:
    """
    for house_info in data:
        try:
            title = house_info["title"]
            cover = house_info["cover_pic"]
            city = house_info["city_name"].replace("市", "")
            region = house_info["district_name"]
            address = house_info["address"]
            rooms_desc = json.dumps(
                house_info["frame_rooms_desc"].replace("居", "").split("/"),
                ensure_ascii=False,
            )
            area_range = json.dumps(
                house_info["resblock_frame_area_range"].replace("㎡", "").split("-"),
                ensure_ascii=False,
            )
            all_ready = house_info["permit_all_ready"]
            prices = house_info["average_price"]
            house_decoration = house_info["decoration"]
            company = house_info["developer_company"][0]
            house_type = house_info["house_type"]
            on_time = house_info["on_time"]
            open_date = house_info["open_date"]
            tags = json.dumps(house_info["tags"], ensure_ascii=False)
            total_price_range = json.dumps(
                house_info["reference_total_price"].split("-"), ensure_ascii=False
            )
            sale_status = house_info["sale_status"]
            detail_url = city_url + house_info["url"]

            write_to_csv(
                [
                    title,
                    cover,
                    city,
                    region,
                    address,
                    rooms_desc,
                    area_range,
                    all_ready,
                    prices,
                    house_decoration,
                    company,
                    house_type,
                    on_time,
                    open_date,
                    tags,
                    total_price_range,
                    sale_status,
                    detail_url,
                ]
            )
        except:
            continue


def drop_table():
    """
    清空表
    :return:
    """
    query("drop table if exists house_info")


def save_to_mysql(clear=False):
    """
    将数据保存到mysql数据库
    :param clear: 写入前是否清空表，默认为False
    :return:
    """
    tmp = []
    with open("./data/house_info.csv", "r", encoding="utf-8") as f:
        reader = csv.reader(f)
        # 跳过文件头部
        next(reader)
        for row in reader:
            tmp.append(row)
    if clear:
        query("truncate table house_info")
    query(
        sql="""
			INSERT INTO house_info
		        (title, cover, city, region, address, rooms_desc, area_range, all_ready, prices, house_decoration,
		        company, house_type, on_time, open_date, tags, total_price_range, sale_status, detail_url)
	        VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
	        """,
        params=tmp,
        many=True,
    )
    print(len(tmp[0]), tmp[0])


def main():
    init()
    for row in read_city_url_from_csv():
        city_name = row[0]
        city_url = row[1]
        house_num = int(row[2])
        # 分多少页爬取
        page_total = house_num // 10 + 1
        surplus_num = house_num % 10
        # 如果等于0，则刚好合适，否则还需要多向后一页
        if surplus_num > 0:
            page_total += 1

        for page_num in range(1, page_total):
            print(f"正在爬取{city_name}的房源信息...正在爬取第{page_num}页...")
            url = f"{city_url}/loupan/pg{page_num}/?_t=1"
            data = get_data(url)
            if data is None:
                break
            parse_data(city_url, data)
        print("----------------------------")


if __name__ == "__main__":
    # main()
    # DELETE FROM house_info WHERE prices = '0' or city is null or address is NULL;
    save_to_mysql(clear=True)
