import redis
from elasticsearch import Elasticsearch

from selenium import webdriver
from selenium.webdriver import ChromeOptions
import datetime
import pymongo
import loguru

es = Elasticsearch(hosts="127.0.0.1")
logger = loguru.logger
client = pymongo.MongoClient("localhost")
db = client['beike_house']
collection = db['beike_house_xiaoqu_info']

from lxml import etree
option = ChromeOptions()
option.add_argument('--disable-infobars')  # 禁用浏览器正在被自动化程序控制的提示
# 反爬机制代码开始，采用此代码在F12控制台输入window.navigator.webdriver结果不是True，而是undefined就成功了
option.add_experimental_option('excludeSwitches', ['enable-automation'])
option.add_argument('--no-sandbox')
# option.add_argument('--disable-dev-shm-usage')
#option.add_argument('--headless')
option.add_argument('blink-settings=imagesEnabled=false')
driver = webdriver.Chrome(options=option)

driver.maximize_window()

def fetch_details(url):
    driver.get(url)
    driver.implicitly_wait(100)
    data = driver.page_source
    html = etree.HTML(data)

    dic= {
        'fetch_time':datetime.datetime.now(),
        'url':url,
        'title':html.xpath("/html/body/div[1]/div[2]/div[2]/div/div/div[1]/h1/text()")[0].strip() if html.xpath("/html/body/div[1]/div[2]/div[2]/div/div/div[1]/h1/text()")  != [] else '',
        'position':html.xpath("/html/body/div[1]/div[2]/div[2]/div/div/div[1]/div/text()")[0].strip().replace("\n",'').replace(' ','') if html.xpath("/html/body/div[1]/div[2]/div[2]/div/div/div[1]/div/text()") !=[] else '',
        'house_average_price':html.xpath("/html/body/div[1]/div[3]/div[1]/div[2]/div[2]/div/span[1]/text()")[0].strip() if html.xpath("/html/body/div[1]/div[3]/div[1]/div[2]/div[2]/div/span[1]/text()") != [] else '',
        'house_content':html.xpath("/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[1]/span[2]/text()")[0].strip() if html.xpath("/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[1]/span[2]/text()") != [] else '',
        'wuyefei':html.xpath("/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[2]/span[2]/text()")[0].strip() if html.xpath("/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[2]/span[2]/text()") != [] else '',
        'wuye_commpany':html.xpath("/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[3]/span[2]/text()")[0].strip()
        if html.xpath("/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[3]/span[2]/text()") != [] else '',
        'producer':html.xpath('/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[4]/span[2]/text()')[0].strip() if
        html.xpath('/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[4]/span[2]/text()') != [] else '',
        'total_budings':html.xpath('/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[5]/span[2]/text()')[0].strip() if
        html.xpath('/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[5]/span[2]/text()') != [] else '',
        'total_houses':html.xpath('/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[6]/span[2]/text()')[0].strip() if html.xpath('/html/body/div[1]/div[3]/div[1]/div[2]/div[3]/div[6]/span[2]/text()') != [] else ''
    }
    logger.info(dic)
    a = es.index(index="beike_xiaoqu", doc_type="doc", body=dic)
    logger.info(a)
    collection.insert_one(dic)
    logger.info("insert to mongo ok")



pool = redis.ConnectionPool(host='localhost', port=6379,db=7, decode_responses=True)
r = redis.Redis(connection_pool=pool)
if __name__ == "__main__":
    while True:
        url = r.spop("xiaoqu_urls")

        if not url:
            break
    # url = 'https://nt.ke.com/xiaoqu/8745133390725815/'
       # try:
        urls = url.split('/xiaoqu')
        url = urls[0] + ".com/xiaoqu" + urls[-1]
        print(url)
        fetch_details(url)
        #except Exception as e:
        #    print(e)


