import pymongo
import redis
from elasticsearch import Elasticsearch
from loguru import logger
client = pymongo.MongoClient("localhost")
db = client['beike_house']
collection = db['ershou_chengjiao']
# collection_house_total = db['ershoufang_total']

pool = redis.ConnectionPool(host='localhost', port=6379,db=7, decode_responses=True)
r = redis.Redis(connection_pool=pool)
redis_key = "chengjiao_cursor"
cusor = int(r.get(redis_key)) if r.exists(redis_key) else 0
es = Elasticsearch(hosts="localhost")
for index,i in enumerate(collection.find().skip(cusor)):
    del i["_id"]
    r.set(redis_key, index + cusor)
    logger.info(index + cusor)
    res = es.index(index="beike_chengjiao2", doc_type="doc", id=i['house_url'].split("/")[-1].replace(".html", ''), body=i)
    logger.info(res)
    r.sadd('beike_chengjiao_queue', str({"url":i['house_url'].split("/chengjiao")[0] + "/chengjiao"}))
