# -*- coding:utf-8 -*-
"""
    for data in house_list:
        item = {}
        item["title"] = data.xpath('./div[1]/div[1]/a/text()')
        item["flood"] = "- ".join(data.xpath("./div[1]/div[2]/div//a/text()"))
        item["address"] = data.xpath("./div[1]/div[3]/div/text()")[0]
        item["tag"] = data.xpath("./div[1]/div[5]//span/text()")
        item["price"] = data.xpath("./div[1]/div[6]/div/span/text()")
        print(item)
"""

import requests
import pymysql
import pymongo
from lxml import etree


# class="sellListContent"
# @class="info clear"
# @class="title"


class HouseData(object):
    def __init__(self):
        # 连接mysql
        self.coon = pymysql.connect(
            host="127.0.0.1",
            port=3306,
            user="root",
            password="15096962415",
            db="13_class"
        )
        # 创建游标
        self.cursor = self.coon.cursor()

        # 连接mongoDB
        self.client = pymongo.MongoClient(host="localhost", port=27017)
        self.db = self.client["13_class"]

        self.url = "https://km.lianjia.com/ershoufang/pg{}/"
        self.headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36"
        }

    # 将需要爬取的url放入一个列表
    def get_url_list(self):
        url_list = []
        for num in range(1, 6):
            url_list.append(self.url.format(num))
        return url_list

    # 向目标网站发起请求
    def get_data_index(self, url):
        response = requests.get(url, headers=self.headers)
        response.encoding = "utf-8"
        if response.status_code == 200:
            return response.text
        else:
            return None

    # 解析数据
    def parse_data(self, response):
        html = etree.HTML(response)
        house_list = html.xpath('//ul[@class="sellListContent"]//li')
        for data in house_list:
            title = data.xpath('./div[1]/div[1]/a/text()')[0]
            flood = "- ".join(data.xpath("./div[1]/div[2]/div//a/text()"))
            address = data.xpath("./div[1]/div[3]/div/text()")[0]
            tag = data.xpath("./div[1]/div[5]//span/text()")[0]
            price_one = data.xpath("./div[1]/div[6]/div[1]/span/text()")[0]
            price_two = data.xpath("./div[1]/div[6]/div[2]/span/text()")[0]
            # sql = "insert into house_data(title,flood,address,tag,price_one, price_two) values (%s,%s,%s,%s,%s,%s)"
            # params = [(title, flood, address, tag, price_one, price_two)]
            # self.cursor.executemany(sql, params)
            # self.coon.commit()

            yield {
                "标题": title,
                "位置": flood,
                "户型": address,
                "标签": tag,
                "总价": price_one,
                "单价": price_two
            }
    def write_data(self, item):
        with open("house.txt","a",encoding="utf-8") as f:
            f.write(str(item)+"\n")

    # 实现业务
    def main(self):
        url_list = self.get_url_list()
        for url in url_list:
            response = self.get_data_index(url)
            for item in self.parse_data(response):
                print(item)
                self.write_data(item)
                # self.db.house_data.insert_one(dict(item))


if __name__ == '__main__':
    house = HouseData()
    house.main()
