# -*- coding: utf-8 -*-

from flask import Flask, request
import json
from pymongo import MongoClient
from bson import json_util, ObjectId
import datetime
import threading
# from spider.crawl import do_crawl
import pika
from pika.spec import PERSISTENT_DELIVERY_MODE
from config import MONGO_DB_SERVER, MQ_SERVER, MONGO_DB_PWD, MONGO_DB_USER
from utils import get_mongodb


conn = MongoClient(MONGO_DB_SERVER)
db = get_mongodb('crawl')

task_collect = db.crawl_task
app = Flask(__name__)

connection = pika.BlockingConnection(pika.ConnectionParameters(host=MQ_SERVER))
channel = connection.channel()

mq_args = {"x-message-ttl": 6000000}
channel.queue_declare(queue='to_crawl', durable=True, arguments=mq_args)


@app.route('/')
def hello_world():
    pass


@app.after_request
def add_header(response):
    response.headers['Access-Control-Allow-Origin'] = '*'
    response.headers['Content-Type'] = 'application/json'
    return response


@app.route('/task', methods=['POST', 'GET'])
def save_task():
    if request.method == 'POST':
        data = json.loads(request.data, object_hook=json_util.object_hook)
        if data.get('_id'):
            oldData = task_collect.find_one({'_id': data['_id']})
            if oldData is not None:
                data['update_date'] = datetime.datetime.now()
                data['create_date'] = oldData.get('create_date')
                task_collect.update({'_id': data['_id']}, {'$set': data})
            else:
                raise Exception('object not found')
        else:
            data['create_date'] = datetime.datetime.now()
            data['status'] = 0
            task_collect.save(data)
        return json.dumps(data, default=json_util.default)
    elif request.method == 'GET':
        result = []
        for data in task_collect.find({}):
            result.append(data)
        return json.dumps(result, default=json_util.default)


# @app.route('/crawl/<task_id>/old', methods=['POST'])
# def doCrawl(task_id):
#     task = task_collect.find_one({'_id': ObjectId(task_id)})
#     task['status'] = 1
#     task_collect.save(task)
#     t = threading.Thread(target=do_crawl, args=(task['rule'], task['links'],))
#     t.start()
#     return json.dumps({"status": "success"})


@app.route('/crawl/<task_id>', methods=['POST'])
def waitCrawl(task_id):
    task = task_collect.find_one({'_id': ObjectId(task_id)})
    task['status'] = 1
    task_collect.save(task)
    # 将次级任务推入队列中
    links = task.get('links')
    for link in links:
        subTask = {
            'url': link,
            'task': str(task.get('_id'))
        }

        channel.basic_publish(exchange='',
                              routing_key='to_crawl',
                              body=json.dumps(subTask),
                              properties=pika.BasicProperties(delivery_mode=PERSISTENT_DELIVERY_MODE)
                              )

    return json.dumps({"status": "success"})

if __name__ == '__main__':
    app.run(host='0.0.0.0')
