import time

import requests
from lxml import etree
from pymongo import MongoClient

headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.102 Safari/537.36',
    'Cookie': 'sid=1130e471-e3b4-47a0-80db-e2d09486ca3c; qchatid=3067c6db-a210-4439-b704-293e23ecadcb; cookieId=b9beb8f3-f860-46d1-aa91-674eda0e3979; JSESSIONID=aaaTxz65dcXGa40ev43wx; Hm_lvt_de678bd934b065f76f05705d4e7b662c=1605143476; _ga=GA1.3.1464733272.1605143476; _gid=GA1.3.141663440.1605143476; WINDOW_DEVICE_PIXEL_RATIO=1.25; CITY_NAME=NANJING; Hm_lvt_eaaf2b6886b56980102bdc75a8151d46=1605143478; _jzqc=1; _jzqckmp=1; wzws_cid=bf19ecb4833324bfdb6bb556ba6057d36f37dfcd21f1650dafca52ae3d17e26348fafa768724f25ead932acf343a0884c89babde0d890877a8ca582a4464cf24b956ad532839805d245a80a9f4fb6227; Hm_lpvt_eaaf2b6886b56980102bdc75a8151d46=1605145455; Hm_lpvt_de678bd934b065f76f05705d4e7b662c=1605145486; _jzqa=1.1213954754549273900.1605143492.1605143492.1605145487.2; _qzja=1.1708129336.1605143491636.1605143491637.1605145487448.1605143525933.1605145487448.0.0.0.3.2; _qzjc=1; _qzjto=3.2.0; _jzqb=1.1.10.1605145487.1; _qzjb=1.1605145487448.1.0.0.0'
}
client = MongoClient('localhost', 27017)


def processing_data(resp, page):
    collection_name = 'q_fang_info_page_' + page
    collection = client['q_fang'][collection_name]
    html = etree.HTML(resp)
    # 房子标题
    title_list = html.xpath('//a[@class="house-title fl"]/@title')
    # 房子类型
    type_list = html.xpath('//div[@class="house-metas clearfix"]/p[1]/text()')
    # 房子大小
    size_list = html.xpath('//div[@class="house-metas clearfix"]/p[2]/text()')
    # 房子楼层
    floor_list = html.xpath('//div[@class="house-metas clearfix"]/p[4]/text()')
    # 房子朝向
    orientation_list = html.xpath('//div[@class="house-metas clearfix"]/p[5]/text()')
    # 房子建立时间
    create_time = html.xpath('//div[@class="house-metas clearfix"]/p[6]/text()')

    total_price = html.xpath('//div[@class="list-price"]/p[1]/span[1]/text()')

    unit_price = html.xpath('//div[@class="list-price"]/p[2]/text()')

    qu = html.xpath('//div[@class="text fl"]/a[1]/text()')
    lu = html.xpath('//div[@class="text fl"]/a[2]/text()')
    hao = html.xpath('//div[@class="text fl"]/a[3]/text()')

    floor_index = 0
    for floor in floor_list:
        floor = floor.replace('\r', '').replace('\n', '').replace('\t', '')
        floor_list[floor_index] = floor
        floor_index += 1

    create_time_index = 0
    for time in create_time:
        time = time.replace('\r', '').replace('\n', '').replace(' ', '')
        create_time[create_time_index] = time
        create_time_index += 1

    addr_list = []
    addr_index = 0
    for addr in qu:
        addr_list.append(addr + '-' + lu[addr_index] + '-' + hao[addr_index])
        addr_index += 1

    info_list = []
    info_index = 0
    for title in title_list:
        new_dict = {}
        new_dict['title'] = title
        new_dict['type'] = type_list[info_index]
        new_dict['size'] = size_list[info_index]
        new_dict['floor'] = floor_list[info_index]
        new_dict['orientation'] = orientation_list[info_index]
        try:
            new_dict['create_time'] = create_time[info_index]
        except Exception as e:
            new_dict['create_time'] = '该房源的创建时间不存在'
        new_dict['total_price'] = total_price[info_index] + '万'
        new_dict['unit_price'] = unit_price[info_index]
        new_dict['addr'] = addr_list[info_index]
        info_list.append(new_dict)
        info_index += 1

    collection.insert_many(info_list)
    print('第%s页爬取完成' % page)


def send_request():
    for i in range(1, 11):
        url = 'https://nanjing.qfang.com/sale/f' + str(i)
        r = requests.get(url=url, headers=headers).text
        processing_data(r, str(i))


start = time.time()
send_request()
over = time.time()
jie = over - start
print(jie)
