#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Jimin Huang <huangjimin@whu.edu.cn>
# Date: 15.02.2017
import logging

from datetime import timedelta
from scrapy import signals

from aquila.database import DBSession


logger = logging.getLogger(__name__)


class AquilaExtension(object):
    """Extension class to log status.
    """
    def __init__(self, stats):
        """Initialize class with stats

        Args:
            stats: an instance of the stats in crawler.
        """
        self.stats = stats

    @classmethod
    def from_crawler(cls, crawler):
        """Initialize an instance using given spider

        Args:
            crawler: an instance of the scrapy crawler.
        """
        extension = cls(crawler.stats)
        crawler.signals.connect(
            extension.spider_opened,
            signal=signals.spider_opened
        )
        crawler.signals.connect(
            extension.spider_closed,
            signal=signals.spider_closed
        )
        crawler.signals.connect(
            extension.item_dropped,
            signal=signals.item_dropped
        )
        return extension

    def item_dropped(self, item, response, exception, spider):
        """Log status when item dropped

        Args:
            item: an item dropped.
            response: a ``scrapy.Response`` instance of the item.
            exception: the instance of the exception raised.
            spider: an instance of the scrapy spider.
        """
        self.stats.inc_value("database_error_count")

    def spider_opened(self, spider):
        """Log status when spider opened

        Args:
            spider: a instance of the scrapy spider
        """
        logger.info("Start recording stats from crawler")

        spider.session_maker = DBSessionMaker()

        self.stats.set_value("database_error_count", 0)

    def spider_closed(self, spider):
        values = {
            "start_time":
                self.stats.get_value("start_time") + timedelta(hours=8),
            "finish_time":
                self.stats.get_value("finish_time") + timedelta(hours=8),
            "finish_reason": self.stats.get_value("finish_reason"),
            "item_scraped_count": self.stats.get_value(
                "item_scraped_count"
            ),
            "database_error_count": self.stats.get_value(
                "database_error_count"
            )
        }

        with DBSession(spider.session_maker.session_maker) as session:
            AquilaStats.add(
                AquilaStats(**values), session
            )

        spider.session_maker.trans.commit()
        spider.session_maker.connection.close()
