import schedule
from pymongo import MongoClient
import pika
from config import MQ_SERVER, MONGO_DB_SERVER, MONGO_DB_USER, MONGO_DB_PWD
import json
from pika.spec import PERSISTENT_DELIVERY_MODE
import time
import threading
import datetime
from utils import common_logger

logger = common_logger.get_logger(__name__, 'schedule.log')


def schedule_crawl():
    logger.info('begin schedule')
    connection = pika.BlockingConnection(pika.ConnectionParameters(host=MQ_SERVER))
    channel = connection.channel()

    mq_args = {"x-message-ttl": 6000000}
    channel.queue_declare(queue='to_crawl', durable=True, arguments=mq_args)

    conn = MongoClient('mongodb://%s:%s@%s:%s' % (MONGO_DB_USER, MONGO_DB_PWD, MONGO_DB_SERVER, 27017))
    db = conn.crawl
    task_collect = db.crawl_task
    run_log = db.run_log

    logger.info('run log record')
    run_log.save({
        "date": "%s-%s-%s" % (
                            datetime.datetime.now().year, datetime.datetime.now().month,
                            datetime.datetime.now().day)
    })

    tasks = task_collect.find({"run": 1})
    for task in tasks:
        logger.info("task run :{}" % task)
        links = task.get('links')
        for link in links:
            subTask = {
                'url': link,
                'task': str(task.get('_id'))
            }

            channel.basic_publish(exchange='',
                                  routing_key='to_crawl',
                                  body=json.dumps(subTask),
                                  properties=pika.BasicProperties(delivery_mode=PERSISTENT_DELIVERY_MODE)
                                  )

    conn.close()
    connection.close()


def run_tasks():
    while True:
        schedule.run_pending()
        time.sleep(1)


def do_run_tasks():
    schedule.every().day.at("00:10").do(schedule_crawl)
    t = threading.Thread(target=run_tasks)
    # t.setDaemon(True)
    t.start()

if __name__ == "__main__":
    schedule_crawl()
