import asyncio
import functools

import aiohttp
from lxml import etree
from pymongo import MongoClient


class House(object):
    def __init__(self):
        self.headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36"
        }
        cookies = 'sid=9895e0ed-be6b-4a07-8412-fd84c75fdc55; qchatid=c77c8bb8-3e79-43ff-a27c-ae006d62167c; cookieId=99530d0b-366e-41e9-9982-032a6406d77f; JSESSIONID=aaaXNkhAvy7jAaM6013wx; CITY_NAME=NANJING; Hm_lvt_eaaf2b6886b56980102bdc75a8151d46=1605143100; WINDOW_DEVICE_PIXEL_RATIO=1.25; Hm_lvt_de678bd934b065f76f05705d4e7b662c=1605143100; _ga=GA1.3.1061449364.1605143100; _gid=GA1.3.935622573.1605143100; Hm_lpvt_eaaf2b6886b56980102bdc75a8151d46=1605145762; Hm_lpvt_de678bd934b065f76f05705d4e7b662c=1605145762'
        self.cookies_dict = {temp[:temp.find("=")]: temp[temp.find("=") + 1:] for temp in cookies.split("; ")}

        client = MongoClient("127.0.0.1", 27017)
        self.collection = client["test"]["house"]

    def insert(self, task_obj):
        list = []
        html = etree.HTML(task_obj.result())
        info_list = html.xpath('//ul/li[@class="items clearfix"]')
        # print("提取到的个数：", len(info_list))

        for info in info_list:
            item = {}
            title_text = info.xpath('.//div[@class="list-main-header clearfix"]/a/text()')
            item['title'] = title_text[0]
            item['大小'] = info.xpath('.//div[@class="house-metas clearfix"]/p/text()')[0]
            item['面积'] = info.xpath('.//div[@class="house-metas clearfix"]/p/text()')[1]
            item['状态'] = info.xpath('.//div[@class="house-metas clearfix"]/p/text()')[2]
            # # 过滤
            heig_str = info.xpath('.//div[@class="house-metas clearfix"]/p/text()')[3]
            he_info = heig_str.replace('\r\n\t\t\r\n', '')
            he_info_2 = he_info.replace('\t\t\t\t', '')
            # print(he_info_2)
            item['高度'] = he_info_2
            item['方向'] = info.xpath('.//div[@class="house-metas clearfix"]/p[5]/text()')[0]

            if not info.xpath('.//div[@class="house-metas clearfix"]/p[6]/text()'):
                item['时间'] = None
            else:
                date = info.xpath('.//div[@class="house-metas clearfix"]/p[6]/text()')
                date_info =date[0].strip()
                item['时间'] =date_info
            wei_str = info.xpath('.//div[@class="text fl"]//a/text()')
            wei_info = str(wei_str).replace("', '", '-')
            wei_info_2 = wei_info.replace("['", '')
            wei_info_3 = wei_info_2.replace("']", '')
            item['位置'] = wei_info_3
            price_str = info.xpath('.//div[@class="list-price"]/p[1]/span/text()')[0]
            price_info = str(price_str) + '万'
            item['总价格'] = price_info
            item['每㎡价格'] = info.xpath('.//div[@class="list-price"]/p[2]/text()')[0]
            try:
                flag_1 = info.xpath('.//div[@class="house-tags clearfix"]/a/span/text()')[0]
                flag_2 = info.xpath('.//div[@class="house-tags clearfix"]/p/text()')
                flag_2_str = str(flag_2)
                flag_rep_1 = flag_2_str.replace("['", '')
                flag_rep_2 = flag_rep_1.replace("']", '')
                flag_rep_3 = flag_rep_2.replace("', '", ',')
                flag_info = str(flag_1) + ',' + flag_rep_3
            except:
                return None
            item['标签'] = flag_info
            # print(item['title'])
            list.append(item)
        # print(list)
        self.collection.insert_many(list)

    # print('插入数据')

    async def house_spider_1(self, i):
        async with aiohttp.ClientSession() as session:
            url = "https://nanjing.qfang.com/sale/f%s" % i
            async with session.post(url, headers=self.headers, cookies=self.cookies_dict) as r:
                return await r.text()

    async def main(self):
        tasks =[]
        for i in range(1,11):
            house_spider_1 = asyncio.create_task(self.house_spider_1(i))
            # house_addres = 'house%s' % (i)
            house_spider_1.add_done_callback(functools.partial(self.insert))
            print( '爬取%s页数据' % i)
            tasks.append(house_spider_1)

            # 等待下载
        await asyncio.wait(tasks)


if __name__ == '__main__':
    house = House()
    loop = asyncio.get_event_loop()
    loop.run_until_complete(house.main())
