import random
import time

import redis
from settings import logger
from selenium import webdriver
from selenium.webdriver import ChromeOptions
import datetime
from utils.kafka_utils import producer
import pymongo
client = pymongo.MongoClient("192.168.2.117")
db = client['beike_house']
collection = db['beike_house_zufang']
import lxml
from lxml import etree
from elasticsearch import Elasticsearch

# 默认host为192.168.2.1160,port为9200.但也可以指定host与port
es = Elasticsearch(hosts="192.168.2.117")
option = ChromeOptions()
option.add_argument('--disable-infobars')  # 禁用浏览器正在被自动化程序控制的提示
# 反爬机制代码开始，采用此代码在F12控制台输入window.navigator.webdriver结果不是True，而是undefined就成功了
option.add_experimental_option('excludeSwitches', ['enable-automation'])
option.add_argument('--no-sandbox')
option.add_argument('--disable-dev-shm-usage')
option.add_argument('--headless')
option.add_argument('blink-settings=imagesEnabled=false')
# option.add_argument('--disable-gpu')
driver = webdriver.Chrome(options=option)
# self.driver = webdriver.PhantomJS() # 无界面浏览已停止更新，建议使用headless
# 反爬机制代码结束
# 窗口最大化
driver.maximize_window()
# 隐式等待
# self.
def fetch_details(url):
    driver.get(url)
    driver.implicitly_wait(10)
    data = driver.page_source
    html = etree.HTML(data)
    houses = html.xpath("/html/body/div[3]/div[1]/div[5]/div[1]/div[1]/div")
    for house in houses:
        house_url = url.split(".com")[0] + ".com" + house.xpath("div/p[1]/a/@href")[0]
        title = house.xpath("div/p[1]/a/text()")[0]
        price = house.xpath("div/span/em/text()")[0]
        price_unit = house.xpath("div/span/text()")[0]
        district_url = url.split(".com")[0] + ".com" + house.xpath("div/p[2]/a[1]/@href")[0] if house.xpath("div/p[2]/a[1]/@href") !=[] else ''
        district_name = house.xpath("div/p[2]/a[1]/text()")[0] if house.xpath("div/p[2]/a[1]/text()") !=[] else ''
        district_url_discrete = url.split(".com")[0] + ".com" + house.xpath("div/p[2]/a[2]/@href")[0] if house.xpath("div/p[2]/a[2]/@href") !=[] else ''
        district_name_discrete = house.xpath("div/p[2]/a[2]/text()")[0] if house.xpath("div/p[2]/a[2]/text()") !=[] else ''
        xiaoqu_url = url.split(".com")[0] + ".com" + house.xpath("div/p[2]/a[3]/@href")[0] if house.xpath("div/p[2]/a[3]/@href") !=[] else ''
        xiaoqu_name = house.xpath("div/p[2]/a[3]/text()")[0] if house.xpath("div/p[2]/a[3]/text()") !=[] else ''
        area = house.xpath("div/p[2]/text()")[-4] if len(house.xpath("div/p[2]/text()")) == 8 else ""
        direction = house.xpath("div/p[2]/text()")[-3] if len(house.xpath("div/p[2]/text()")) == 8 else ""
        houses = house.xpath("div/p[2]/text()")[-2] if len(house.xpath("div/p[2]/text()")) == 8 else ""
        house_id = house_url.split("/")[-1].replace(".html", "")
        dic = {
            "house_url":house_url.strip(),
            "title":title.strip(),
            "price":price.strip(),
            "price_unit":price_unit.strip(),
            "district_url": district_url,
            "district_name": district_name.strip(),
            "district_name_discrete": district_name_discrete.strip(),
            "district_url_discrete": district_url_discrete.strip(),
            "xiaoqu_url": xiaoqu_url.strip(),
            "xiaoqu_name": xiaoqu_name.strip(),
            "area": area.strip(),
            "direction": direction.strip(),
            "houses": houses.strip(),
            "fetch_time": datetime.datetime.now(),
            "house_id":house_id,
        }


        a = es.index(index="beike_zufang", doc_type="doc", id=house_id, body=dic)
        collection.insert_one(dic)
        logger.warning(a)


    next_urls = html.xpath("/html/body/div[3]/div[1]/div[5]/div[1]/div[2]/a/@href")
    logger.info(next_urls)
    next_url = next_urls[-1]
    if next_urls[0] == next_urls[-2]:
        raise EOFError
    time.sleep(random.randint(1,5))
    fetch_details(url.split(".com")[0] + ".com" + next_url)


pool = redis.ConnectionPool(host='192.168.2.117',db=2, port=6380, decode_responses=True)
r = redis.Redis(connection_pool=pool)
if __name__ == "__main__":
    # url = "https://bj.zu.ke.com/zufang/"
    # fetch_details(url)
    # url_and_district = r.rpoplpush("all_district_details_queue", "all_district_details_queue")
    # while True:
        # zufang_url = eval(r.spop('all_district_details_set_2'))
        # url = zufang_url['district_url'].replace("https://wh.ke.com/","")
        # if "http" not in url:
        #     url = "https://wh.ke.com/" + url
        # city_name = zufang_url['city_name']
        #
        # r.sadd("beike_zufang_set",str({
        #     "url":url.split('.ke.com')[0] + "zu.ke.com/zufang",
        #     "city_name":city_name
        # }))
        # a = r.lpop("beike_zufang_queue")
        # try:
        #     a = r.spop("beike_zufang_set")
        #     r.sadd("beike_zufang_set_set",a)
        #     r.sadd("beike_zufang_set_set2", a)
        #     r.lpush("beike_zufang_queue",a)
        #     # a = eval(r.spop("beike_zufang_set_1"))
        #     # logger.info(a)
        #     # url = a['district_url'].split('.ke.com/https')[0] + ".zu.ke.com/zufang",
        #     # city_name = a['city_name']
        #     # r.sadd("beike_zufang_set",str({
        #     #     "url":url,
        #     #     "city_name":city_name
        #     # }))
        # except  Exception as e:
        #     print(e)

        # r.sadd("beike_zufang_set_2", a)
        # r.lpush("beike_zufang_queue",r.spop("beike_zufang_set"))
        # logger.info(a)
        # r.sadd("all_district_details_set",a)
        # r.lpush('beike_zufang_queue',a)
        # r.sadd("all_district_details_set_1",a)
        # # logger.info(url_and_district)
        # r.sadd("all_district_details_set_2",a)
    while True:
        # url = r.spop("beike_zudang_district")
        zufang_url = eval(r.rpoplpush("beike_zufang_queue","beike_zufang_queue"))
        logger.info(zufang_url)
        url = zufang_url['url']
        district_name = zufang_url['city_name']
        city_name = zufang_url['city_name']

        if not url:
            break
        logger.info(url)
        try:
            fetch_details(url[0] ) #+ "/zufang/")
        except Exception as e:
            logger.info(e)


