from time import sleep
import pandas as pd
import requests
from lxml import etree

headers = {
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
    'Accept-Encoding': 'gzip, deflate, br',
    'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
    'Cache-Control': 'max-age=0',
    'Connection': 'keep-alive',
    'Cookie': 'lianjia_uuid=33368d3c-b889-48f9-a590-b5475cdb3dc9; lianjia_ssid=5f5f846d-9d09-4045-bb39-dfe34f1f9b45; sajssdk_2015_cross_new_user=1; sensorsdata2015jssdkcross=%7B%22distinct_id%22%3A%2218bf71c9b4b861-0f8d99d6257496-4c657b58-2073600-18bf71c9b4cd59%22%2C%22%24device_id%22%3A%2218bf71c9b4b861-0f8d99d6257496-4c657b58-2073600-18bf71c9b4cd59%22%2C%22props%22%3A%7B%22%24latest_traffic_source_type%22%3A%22%E4%BB%98%E8%B4%B9%E5%B9%BF%E5%91%8A%E6%B5%81%E9%87%8F%22%2C%22%24latest_referrer%22%3A%22https%3A%2F%2Fwww.baidu.com%2Fother.php%22%2C%22%24latest_referrer_host%22%3A%22www.baidu.com%22%2C%22%24latest_search_keyword%22%3A%22%E8%B4%9D%E5%A3%B3%E7%BD%91%22%2C%22%24latest_utm_source%22%3A%22baidu%22%2C%22%24latest_utm_medium%22%3A%22pinzhuan%22%2C%22%24latest_utm_campaign%22%3A%22wybeijing%22%2C%22%24latest_utm_content%22%3A%22biaotimiaoshu%22%2C%22%24latest_utm_term%22%3A%22biaoti%22%7D%7D; select_city=440100; Hm_lvt_9152f8221cb6243a53c83b956842be8a=1700658033; Hm_lpvt_9152f8221cb6243a53c83b956842be8a=1700658061; srcid=eyJ0Ijoie1wiZGF0YVwiOlwiNDcwMzYwYTAwZGY1NTBmYzZkOWNlYmQ4MjlhYmYwNjNhZTIxZGVlNGNlNTBhMGMyNGEzODFmZWQ5OGM2MmVkY2YwMzc3MGRhYzYxYWUyMjI1ZWFjMjEyMmJmYjhiNjBiYmFjYWEzYzI3MGNjMGZhZDIyOWE0MzU1OTllYjQ4YWUxOWVhMGRlYmI1MDNhYTI0ZWM4MmE3NjdhNDkzNzM3NDQ5NDRjN2M3MWEwM2Q5MzYzOTg0OGNiNmJjYTdmZWExZWRkODJkNThiYWJjM2ViNzA2YzhhNGM3NDc3Nzk1YTBmMTgyNmRmMGVlMzRkOGI1Y2ZhM2NhM2VjNjlhNzlhYVwiLFwia2V5X2lkXCI6XCIxXCIsXCJzaWduXCI6XCIyZjllZmQzNVwifSIsInIiOiJodHRwczovL2d6LmtlLmNvbS9lcnNob3VmYW5nLyIsIm9zIjoid2ViIiwidiI6IjAuMSJ9',
    'Host': 'gz.ke.com',
    'Referer': 'https://gz.ke.com/',
    'Sec-Ch-Ua': '"Microsoft Edge";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
    'Sec-Ch-Ua-Mobile': '?0',
    'Sec-Ch-Ua-Platform': "Windows",
    'Sec-Fetch-Dest': 'document',
    'Sec-Fetch-Mode': 'navigate',
    'Sec-Fetch-Site': 'same-origin',
    'Sec-Fetch-User': '?1',
    'Upgrade-Insecure-Requests': '1',
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0'
}

urls = ['https://gz.ke.com/ershoufang/']
house_list = []


def do_main():
    join_url_list()
    req_get()
    save_house_data()


def join_url_list():
    for i in range(2, 101):
        urls.append('https://gz.ke.com/ershoufang/pg%s/' % i)


def req_get():
    for url in urls:
        response = requests.get(url, headers=headers)
        html = response.content.decode()
        html_doc = etree.HTML(html)
        info_list = []
        info_list.extend(
            html_doc.xpath('/html/body/div[1]/div[4]/div[1]/div[4]/ul//li[@class="clear"]/div[@class="info clear"]'))
        handle_data(info_list)
        sleep(1)


def handle_data(info_list):
    for info in info_list:
        title = info.xpath('./div[@class="title"]/a/text()')[0]
        address = info.xpath('./div[@class="address"]/div[@class="flood"]/div/a/text()')[0]
        house_info = ''.join(info.xpath('./div[@class="address"]/div[@class="houseInfo"]/text()'))
        tag = ','.join(info.xpath('./div[@class="address"]/div[@class="tag"]//span/text()'))
        total_price = info.xpath('./div[@class="address"]/div[@class="priceInfo"]/div[1]/span/text()')[0]
        unit_price = info.xpath('./div[@class="address"]/div[@class="priceInfo"]/div[@class="unitPrice"]/span/text()')[
            0]
        house = [title, address, house_info, tag, total_price, unit_price]
        house_list.append(house)


def save_house_data():
    df = pd.DataFrame(house_list, columns=['title', 'address', 'house_info', 'tag', 'total_price', 'unit_price'])
    df.to_csv('../static/data/house_info.csv', index=False)


if __name__ == '__main__':
    do_main()
