import datetime
from lxml import etree
from loguru import logger

url = 'https://nc.ke.com/ershoufang/103112880353.html'

import datetime
from pymongo import MongoClient
from loguru import logger
import redis
import time
pool = redis.ConnectionPool(host='192.168.2.117', db=3, port=6380, decode_responses=True)
r = redis.Redis(connection_pool=pool)
client = MongoClient(host='192.168.2.117', port=27017)  # 连接mongodb端口
db = client['beike_house']
collection = db["beike_beike_second_hand_details"]
from elasticsearch import Elasticsearch
es = Elasticsearch(hosts="192.168.2.117")
import requests
import socket


def get_house_info(url):
    # i = random.randint(0, 40)
    # proxys = r.get("proxy_{}".format(i))
    # print(proxys)
    # proxies = eval(proxys)
    # print(proxies)
    headers = {
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36',
        #     'User-Agent':'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36',
        #  'User-Agent':ua.random,
        #  'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
        'authority': 'club.jd.com',
        'cache-control': 'max-age=0',
        'sec-ch-ua': '"Google Chrome";v="89", "Chromium";v="89", ";Not A Brand";v="99"',
        'sec-ch-ua-mobile': '?0',
        'upgrade-insecure-requests': '1',
        #       'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36',
        'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'sec-fetch-site': 'none',
        'sec-fetch-mode': 'navigate',
        'sec-fetch-user': '?1',
        'sec-fetch-dest': 'document',
        'accept-language': 'zh-CN,zh;q=0.9'

    }

    response = requests.get(url, headers=headers)
    html = etree.HTML(response.text)
    title = html.xpath("/html/body/div[1]/div[2]/div[2]/div/div/div[1]/h1/@title")[0] if html.xpath("/html/body/div[1]/div[2]/div[2]/div/div/div[1]/h1/@title") != [] else ''
    # logger.info("title",title)
    total = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[2]/div/span[1]/text()")[0] if html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[2]/div/span[1]/text()") != [] else '' 
    unit = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[2]/span[2]/text()")[-1] if html.xpath(
        "/html/body/div[1]/div[4]/div[1]/div[2]/div[2]/span[2]/text()") != [] else ''
    unitPriceValue = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[2]/div[1]/div[1]/span/text()")
    main_info = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[1]/div[1]/text()")[0] if html.xpath(
        "/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[1]/div[1]/text()") != [] else ''
    subinfo = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[1]/div[2]/text()")[0] if html.xpath(
        "/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[1]/div[2]/text()") != [] else ''
    type_main = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[2]/div[1]/text()")[0] if html.xpath(
        "/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[2]/div[1]/text()") != [] else ''
    type_sub = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[2]/div[2]/text()")[0]  if html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[2]/div[2]/text()") != [] else ''
    area = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[3]/div[1]/text()")[0] if html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[3]/div[1]/text()") != [] else ''
    year = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[3]/div[2]/text()")[0] if html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[3]/div[3]/div[2]/text()") != [] else ''
    communityname = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[1]/a[1]/text()")[0] if html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[1]/a[1]/text()") != [] else ''
    communityurl = url.split(".com")[0] + html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[1]/a/@href")[0] if html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[1]/a/@href") != [] else ''
    if html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[2]/span[2]/a[1]/text()") != []:
        district = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[2]/span[2]/a[1]/text()")[0]
    else:
        district = ''
    district_url = url.split(".com")[0] + \
                   html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[2]/span[2]/a[1]/@href")[0] if  html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[2]/span[2]/a[1]/@href") != [] else ''
    district_detail = html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[2]/span[2]/a[2]/text()")[
        0] if html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[2]/span[2]/a[2]/text()") != [] else ''
    district_detail_url = url.split(".com")[0] + \
                          html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[2]/span[2]/a[2]/@href")[0] if html.xpath("/html/body/div[1]/div[4]/div[1]/div[2]/div[4]/div[2]/span[2]/a[2]/@href") != [] else ''

    fundamental_map = {
       '房屋户型':'house_con',
       '所在楼层':'house_floor',
        '建筑面积':'house_area',
        '户型结构':'house_struct',
       '建筑类型':'house_contruct_type',
        '房屋朝向':'house_direction',
       '建筑结构':'house_construction',
        '装修情况':'house_detraction',
       '梯户比例':'elevtor_percent',
       '供暖方式':'warn_type',
        '配备电梯':'has_elevtor',
        '燃气价格':'gas_price',
        '别墅类型':'bighouse_type',
        '套内面积':'userful_area',
        '用水类型':"water_type",
        '用电类型':"power_type",
        '计租面积':"rent_area"

    }
    fundamental = html.xpath("/html/body/div[1]/div[5]/div[1]/div[1]/div/div/div[1]/div[2]/ul/li")
    fundamental_values = {}
    for item in fundamental:
        property_name = item.xpath("span/text()")[0]
        # logger.info(property_name)
        property = item.xpath("text()")[0]
        # logger.info(property)
        fundamental_values[fundamental_map[property_name]] = property.strip()
    # logger.info(primary_property)

    sells = html.xpath("/html/body/div[1]/div[5]/div[1]/div[1]/div/div/div[2]/div[2]/ul/li")
    sells_values = {}
    sell_map = {
        "挂牌时间": "up_time",
        "交易权属": "sell_type",
        "上次交易": "last_sell",
        "房屋用途": "house_type",
        "房屋年限": "house_age",
        "产权所属": "property_type",
        "抵押信息": "mostage_type",
        "房本备件": "house_paper",
        "房源核验码":'house_validation_code',
        '房源核验统一编码':'house_validation_code2',
        '房管局核验码':'house_validation_code3',
        '房源编码':"house_code",
        "房源核验编码":'code4',
        '房协编码':'code5',
        '房协编号':'code6'

    }
    for item in sells:
        property_name = item.xpath("span/text()")[0]
        property = item.xpath("text()")[0] if item.xpath("text()") != [] else ''
        sells_values[ sell_map[property_name]] = property.strip()

    dic = {
        "house_url": url,
        "host":socket.gethostname(),
        "total":total,
        "house_id": url.split("/ershoufang/")[-1].replace(".html", ''),
        "title": title.strip(),
        "unit": unit.strip(),
        "main_info": main_info.strip(),
        "type_main": type_main.strip(),
        "type_sub": type_sub.strip(),
        "district_detail": district_detail.strip(),
        "districr_url": district_url.strip(),
        "district_detail_url": district_detail_url.strip(),
        "communityname": communityname.strip(),
        "communityurl": communityurl.strip(),
        "district": district.strip(),
        "area": area.strip(),
        "year": year.strip(),
        "fetch_time": datetime.datetime.now(),
        "fundamental_values": fundamental_values,
        "sells_values": sells_values
    }
    logger.info(dic)
    collection.update_one({"_id": dic['house_id']}, {"$set": dic}, upsert=True)
    es.index(index="beike_ershoufang_details_english", doc_type="doc", id=url.split("/")[-1].replace(".html", ''), body=dic)


if __name__ == '__main__':
    pool2 = redis.ConnectionPool(host='192.168.2.117', port=6380, db=2, decode_responses=True)
    r2 = redis.Redis(connection_pool=pool2)
    while True:
        ids = r2.spop("house_details_new")
        if ids:
            logger.info(ids)
            get_house_info(ids)
        else:
            logger.info("None")
            time.sleep(10 * 60)
