# -*- coding: utf-8 -*-


from utils import common_logger,get_mongodb

import datetime
from spider import rule_parser
import json
import pika
from pika.spec import PERSISTENT_DELIVERY_MODE
from config import MONGO_DB_SERVER, MQ_SERVER, MONGO_DB_PWD, MONGO_DB_USER

logger = common_logger.get_logger(__name__, 'spider.log')


def do_crawl(rule, collect_urls, obj):
    logger.info('begin crawl initial')
    connection = pika.BlockingConnection(pika.ConnectionParameters(host=MQ_SERVER))
    channel = connection.channel()

    mq_args = {"x-message-ttl": 6000000}
    channel.queue_declare(queue='to_crawl', durable=True, arguments=mq_args)

    if type(rule) == str:
        rule = json.loads(rule)
    if type(collect_urls) == str:
        collect_urls = json.loads(collect_urls)
    table_info = {
        'db': 'estateDb',
        'table_key': 'estate',
        'table_name': '房源信息',
        'cols': [
            {
                'key': 'code',
                'isIndex': True,
                'name': '房源代码'
            },
            {
                'key': 'create_date',
                'isIndex': True,
                'name': '采集时间',
                'setValue': 'NOW'
            },
        ]
    }

    db = get_mongodb(table_info['db'])
    collect = db[table_info['table_key']]
    url_crawled = get_mongodb('crawl').url_crawled

    logger.info('task info is inited over')
    try:
        for url in collect_urls:
            obj.get(url)  # 打开网址

            while True:
                logger.info('open url: %s' % obj.current_url)
                url_that_haved_crawled = url_crawled.find_one({'url': obj.current_url, "time": "%s-%s-%s" % (
                                        datetime.datetime.now().year, datetime.datetime.now().month,
                                        datetime.datetime.now().day)})

                if url_that_haved_crawled is not None and url_that_haved_crawled.get('status') in [1, 2]:
                    logger.error("%s haved crawled today" % obj.current_url)
                else:
                    url_crawled.save({'url': obj.current_url, "time": "%s-%s-%s" % (
                        datetime.datetime.now().year, datetime.datetime.now().month,
                        datetime.datetime.now().day), 'status': 1})
                    elements = obj.find_elements_by_xpath(
                        rule['xpath']
                    )
                    logger.info('save current url in today crawled list')
                    if len(elements) == 0:
                        logger.error('elements not found, break')
                        break
                    for ele in elements:
                        result = {}
                        rule_parser.rule_parse(ele, rule.get('nodes'), result)
                        logger.info('get result success')
                        check_mark = {}
                        for col in table_info['cols']:
                            if col['isIndex']:
                                if col.get('setValue') is None:
                                    check_mark[col['key']] = result.get(col['key'])
                                else:
                                    if col['setValue'] == 'NOW':
                                        result[col['key']] = check_mark[col['key']] = "%s-%s-%s" % (
                                            datetime.datetime.now().year, datetime.datetime.now().month,
                                            datetime.datetime.now().day)
                        logger.info('crawl data check_mark: %s' % check_mark)
                        if result.get('url') and rule.get('subTask'):
                            # 将次级任务推入队列中
                            logger.info('pull subTask to mq')
                            subTask = {
                                'url': result.get('url'),
                                'task': rule.get('subTask')
                            }
                            channel.basic_publish(exchange='',
                                                  routing_key='to_crawl',
                                                  body=json.dumps(subTask),
                                                  properties=pika.BasicProperties(delivery_mode=PERSISTENT_DELIVERY_MODE))

                        data = collect.find_one(check_mark)
                        logger.info('find by check_mark')
                        if data is None:
                            collect.save(result)
                        else:
                            print(data)
                            collect.update(check_mark,
                                           {'$set': result}
                                           )

                    logger.info('save success')
                    url_crawled.update({'url': obj.current_url, "time": "%s-%s-%s" % (
                        datetime.datetime.now().year, datetime.datetime.now().month,
                        datetime.datetime.now().day)}, {'$set': {'status': 2}})
                    logger.info('update today crawled list')

                if rule.get('next') is None:
                    logger.info('no more datas')
                    break

                next = obj.find_elements_by_xpath(
                    rule['next']['xpath'])
                if len(next) == 0:
                    logger.info('no more datas')
                    break

                logger.info('go next')
                next[0].click()
                obj.implicitly_wait(2)

    except Exception as exc:
        print(exc)
    finally:

        connection.close()

if __name__ == '__main__':
    print(json.dumps({
        'title': '根节点',
        'type': 'list',
        'xpath': '//ul[@class="sellListContent"]//div[@class="info clear"]',
        'next': {
            'action': 'click',
            'xpath': '//div[@class="page-box fr"]/div[@class="page-box house-lst-page-box"]/a[text()="下一页"]'
        },
        'sub_task': {
            'taskId': ''
        },
        'nodes': [
            {
                'title': '',
                'type': 'element',
                'xpath': './div[@class="title"]/a',
                'nodes': [
                    {
                        'title': '',
                        'type': 'attr',
                        'attr_key': 'data-housecode',
                        'col_key': 'code',
                    },
                    {
                        'title': '',
                        'type': 'attr',
                        'attr_key': 'href',
                        'col_key': 'url',
                    },
                ]
            },
            {
                'title': '',
                'type': 'element',
                'xpath': './div[@class="title"]/a',
                'nodes': [
                    {
                        'title': '',
                        'type': 'attr',
                        'attr_key': 'innerText',
                        'col_key': 'title',
                    },
                ]
            },
            {
                'title': '',
                'type': 'element',
                'xpath': './/div[@class="houseInfo"]',
                'nodes': [
                    {
                        'title': '',
                        'type': 'attr',
                        'attr_key': 'innerText',
                        'col_key': 'title',
                        'split': {
                            'split_text': '|',
                            'split_rules': [
                                {
                                    'position': 0,
                                    'col_key': 'housing_estate',
                                },
                                {
                                    'position': 1,
                                    'col_key': 'house_type',
                                },
                                {
                                    'position': 2,
                                    'col_key': 'size',
                                },
                                {
                                    'position': 3,
                                    'col_key': 'orientation',
                                },
                                {
                                    'position': 4,
                                    'col_key': 'fitment',
                                    'default_value': ''
                                },
                                {
                                    'position': 5,
                                    'col_key': 'elevator',
                                    'default_value': ''
                                }
                            ]
                        }
                    },
                ]
            },
            {
                'title': '',
                'type': 'element',
                'xpath': './/div[@class="positionInfo"]',
                'nodes': [
                    {
                        'title': '',
                        'type': 'attr',
                        'attr_key': 'innerText',
                        'col_key': 'description',
                    },
                    {
                        'title': '',
                        'type': 'element',
                        'xpath': './a',
                        'nodes': [
                            {
                                'title': '',
                                'type': 'attr',
                                'attr_key': 'innerText',
                                'col_key': 'region',
                            }
                        ]
                    }
                ]
            },
            {
                'title': '',
                'type': 'element',
                'xpath': './/div[@class="followInfo"]',
                'nodes': [
                    {
                        'title': '',
                        'type': 'attr',
                        'attr_key': 'innerText',
                        'split': {
                            'split_text': '/',
                            'split_rules': [
                                {
                                    'position': 2,
                                    'col_key': 'publish_date'
                                }
                            ]
                        }
                    },
                ]
            },
            {
                'title': '',
                'type': 'element',
                'xpath': './/div[@class="totalPrice"]/span',
                'nodes': [
                    {
                        'title': '',
                        'type': 'attr',
                        'attr_key': 'innerText',
                        'col_key': 'total_price'
                    }
                ]
            }
        ]
    })
    )
