# -*- coding: utf-8 -*-

import json
import zlib
import base64

from task import Task
import util


class Downloader(Task):

    def _init(self):
        super(Downloader, self)._init()

        self.__crawlers = {
            'toutiao_news': 'toutiao_news.TouTiaoNews',
        }

    def begin(self):
        while 1:
            msgs = []

            try:
                msgs = self._components['conveyor'].receive()
            except Exception as e:
                self._logger.warn('[receiving]: %s' % e)

            if not msgs:
                self._rest()
                continue

            for x in msgs:
                self._logger.info('[accepted]: %s' % util.ustr(x))

                tk = None
                try:
                    tk = json.loads(x)
                except Exception as e:
                    self._logger.warn('[parsing]: %s' % e)

                if not tk:
                    continue

                try:
                    res = self.__perform_task(tk)

                    self._logger.info('[finished]: %s' % util.ustr(res))
                except Exception as e:
                    self._logger.warn('[performing]: %s' % e)
                    self._rest()

    def __perform_task(self, tk):
        crawler_name = tk.get('crawlerSource')
        keyword = tk.get('searchKey')
        status = tk.get('status')
        if not crawler_name or not keyword:
            self._logger.error('The parameters(crawlerSource|searchKey) is not specified.')

        crawler = self.__load_crawler(crawler_name)
        data = crawler.download({
            'logger': self._logger,
            'keyword': keyword,
            'time_limit': util.get_date(-7 if status else -1)
        })
        if not data:
            self._logger.info('No data was fetched.')

            return

        data = {
            'task': tk,
            'data': base64.b64encode(zlib.compress(util.ustr(data))),
        }
        self._logger.debug(data)
        self._components['conveyor'].send(json.dumps(data))

    def __load_crawler(self, name):
        if name not in self.__crawlers:
            raise Exception('Crawler[%s] not exist!' % name)

        crawler = None
        try:
            crawler = util.create_class_instance(self.__crawlers[name])
        except Exception as e:
            raise Exception('Crawler[%s] load failed, error: %s' % (self.__crawlers[name], e))

        if not hasattr(crawler, 'crawl'):
            raise Exception('Crawler[%s] error: method[crawl] not exist!' % self.__crawlers[name])

        return crawler
