# -*- coding: utf-8 -*-

import requests
from bs4 import BeautifulSoup
import csv
import logging
import time


Url_head = "https://xa.zu.ke.com/zufang/"
Url_tail = "/#contentList"
Num = 0
Filename = "rent.csv"

# 配置日志输出
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

# 设置代理IP
proxies = {
    'http': '',
    'https': ''
}

headers = {
    "User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1"}


# 控制爬取速度
def sleep(seconds):
    time.sleep(seconds)


def write_csv(msg_list):
    out = open(Filename, 'a', newline='', encoding='utf-8')
    csv_write = csv.writer(out, dialect='excel')
    csv_write.writerow(msg_list)
    out.close()


def acc_page_msg(page_url):
    # 发送请求并获取网页内容
    response = requests.get(page_url, timeout=20, headers=headers)
    web_data = response.text
    soup = BeautifulSoup(web_data, 'html.parser')

    # 遍历每条租房信息
    items = soup.select('.content__list .content__list--item')
    logging.info("租房信息数：" + str(len(items)))
    for item in items:
        # 解析租房信息
        location = item.find('p', class_='content__list--item--des').text.split('/')[0].strip().split('-')
        if len(location) < 3:
            continue
        area = location[0].strip()
        located = location[1].strip()
        address = location[2].strip()
        rent_type = item.find('p', class_='content__list--item--title').text.strip().split(' ')[0].split('·')[0].strip()
        rent_floor_area = item.find('p', class_='content__list--item--des').text.split('/')[1].strip()[:-1]
        rent_room_orientation = item.find('p', class_='content__list--item--des').text.split('/')[2].strip()
        rent_house_type = item.find('p', class_='content__list--item--des').text.split('/')[3].strip()
        rent_height = item.find('p', class_='content__list--item--des').text.split('（')[-1].split('）')[0]
        label = item.select('p.content__list--item--bottom i')
        labels = ','.join([tag.get_text().strip() for tag in label])
        if len(rent_height) >= 5:
            rent_height = "地下室"
        price = item.find('span', class_='content__list--item-price').text.strip()[:-3].strip()

        # 写入csv文件
        txt = (area, located, address, rent_type, rent_floor_area, rent_room_orientation, rent_house_type, rent_height, price, labels)

        write_csv(txt)


def get_pages_urls():
    urls = []
    # 1. 新城区可访问页数100
    for i in range(100):
        urls.append(Url_head + "xinchengqu/pg" + str(i + 1) + Url_tail)
    # 2. 碑林区可访问页数100
    for i in range(100):
        urls.append(Url_head + "beilin/pg" + str(i + 1) + Url_tail)
    # 3. 莲湖区可访问页数100
    for i in range(100):
        urls.append(Url_head + "lianhu/pg" + str(i + 1) + Url_tail)
    # 4. 未央区可访问页数100
    for i in range(100):
        urls.append(Url_head + "weiyang/pg" + str(i + 1) + Url_tail)
    # 5. 雁塔区可访问页数100
    for i in range(100):
        urls.append(Url_head + "yanta/pg" + str(i + 1) + Url_tail)
    # 6. 灞桥区可访问页数100
    for i in range(100):
        urls.append(Url_head + "baqiao/pg" + str(i + 1) + Url_tail)
    # 7. 长安区可访问页数100
    for i in range(100):
        urls.append(Url_head + "changan7/pg" + str(i + 1) + Url_tail)
    # 8. 西咸新区区可访问页数37
    for i in range(35):
        urls.append(Url_head + "xixianxinquxian/pg" + str(i + 1) + Url_tail)
    # 9. 临潼区可访问页数21
    for i in range(20):
        urls.append(Url_head + "lintong/pg" + str(i + 1) + Url_tail)

    return urls


def run():
    logging.info("开始爬虫")
    start_time = time.time()

    out = open(Filename, 'a', newline='')
    csv_write = csv.writer(out, dialect='excel')
    title = ("area", "located", "address", "type", "floor_area", "room_orientation", "house_type", "height", "price", "labels")
    csv_write.writerow(title)
    out.close()

    url_list = get_pages_urls()
    for url in url_list:
        acc_page_msg(url)
        logging.info("url:{}".format(url))

        # 控制爬取速度，休眠1秒
        time.sleep(1)

    logging.info("结束爬虫")
    end_time = time.time()
    elapsed_time = end_time - start_time
    elapsed_time_minutes = elapsed_time / 60
    logging.info("耗时：{}分钟".format(elapsed_time_minutes))


if __name__ == '__main__':
    run()
