# -*- coding:utf-8 -*-
import itertools
import json
import threading
from Queue import Queue
from datetime import datetime

import pymongo


class TaskCollector(threading.Thread):
    def __init__(self, db_conf, log):
        self._queue = Queue(10000)
        self._log = log
        self._collection = None
        self.db_conf = db_conf
        self._mongo_client = pymongo.MongoClient(self.db_conf['host'], self.db_conf['port'])

        if self.db_conf['username'] != '':
            self._mongo_client[self.db_conf['db_name']].authenticate(self.db_conf['username'], self.db_conf['password'])
        self._collection = self._mongo_client[self.db_conf['db_name']][self.db_conf['collection_name']]
        self._prepare_db()

        threading.Thread.__init__(self)
        self.daemon = True
        self.running = True

    def _prepare_db(self):
        self._allowed_metric = ['extract_success', 'extract_fail', 'extract_skip', 'download_fail']
        self._collection.create_index(
            [
                ('metadata.site', pymongo.ASCENDING),
            ],
            background=True,
            unique=False
        )
        self._collection.create_index(
            [
                ('metadata.site', pymongo.ASCENDING),
                ('metadata.date', pymongo.ASCENDING)
            ],
            background=True,
            unique=False
        )

    def _init_stats_document(self, metadata):
        hourly_fields = ['hourly.%s' %(i) for i in range(24)]
        daily_fields = ['daily']
        all_field_names = [
            '%s.%s' % (metric, field)
            for metric, field in itertools.product(self._allowed_metric, hourly_fields + daily_fields)
            ]
        write_requests = [
            pymongo.operations.UpdateOne(
                filter={
                    'metadata': metadata,
                    field: {'$exists': False}
                },
                update={
                    '$set': {field: 0}
                }
            )
            for field in all_field_names
            ]
        write_requests += [
            pymongo.operations.UpdateOne(
                filter={'metadata': metadata},
                update={'$set': {'rechecked': False}}
            ),
            pymongo.operations.UpdateOne(
                filter={'metadata': metadata},
                update={'$set': {'recheck_start_time': 0}}
            )
        ]
        self._collection.bulk_write(write_requests, ordered=False)

    def _send_record(self, metadata, metric):
        try:
            ctime = datetime.now()
            metadata['date'] = ctime.strftime("%Y-%m-%d")

            query = {
                'metadata':metadata
            }
            update = {
                '$inc': {
                    '%s.daily' % (metric,): 1,
                    '%s.hourly.%d' % (metric, ctime.hour): 1,
                }
            }
            write_result = self._collection.update_one(query, update, upsert=True)
            if write_result.matched_count == 0:  # 说明是一条新记录
                self._init_stats_document(metadata)

        except Exception as e:
            self._log.error(e.message)

    def run(self):
        while True:
            record = self._queue.get()
            self._build_record_and_put(record)
    def _build_record_and_put(self, obj):
        metadata = {}
        metadata['site'] = obj.base_info.site
        parse_extends = {}
        if obj.parse_extends:
            parse_extends = json.loads(obj.parse_extends)
        if parse_extends:
            metadata['parser_id'] = parse_extends.get('parser_id', -1)
        else:
            metadata['parser_id'] = -1
        ex_status = obj.extract_info.ex_status
        crawl_status = obj.crawl_info.status_code
        if crawl_status == 1:
            self._send_record(metadata, 'download_fail')
        elif ex_status == 2:
            self._send_record(metadata, 'extract_success')
        elif ex_status == 1:
            self._send_record(metadata, 'extract_skip')
        elif ex_status == 3:
            self._send_record(metadata, 'extract_fail')
    def save_record(self, data):
        self._queue.put(data)
