# -*- coding: utf-8 -*-

import json
import zlib
import base64

from task import Task
import util


class Resolver(Task):

    def _init(self):
        super(Resolver, self)._init()

        self.__crawlers = {
            'toutiao_news': 'toutiao_news.TouTiaoNews',
        }

    def begin(self):
        while 1:
            msgs = []

            try:
                msgs = self._components['conveyor'].receive()
            except Exception as e:
                self._logger.warn('[receiving]: %s' % e)

            if not msgs:
                self._rest()
                continue

            for x in msgs:
                self._logger.info('[accepted]: %s' % util.ustr(x))

                tk = None
                try:
                    tk = json.loads(x)
                except Exception as e:
                    self._logger.warn('[parsing]: %s' % e)

                if not tk:
                    continue

                try:
                    res = self.__perform_task(tk)

                    self._logger.info('[finished]: %s' % util.ustr(res))
                except Exception as e:
                    self._logger.warn('[performing]: %s' % e)
                    self._rest()

    def __perform_task(self, tk):
        crawler_name = tk.get('crawlerSource')
        keyword = tk.get('searchKey')
        status = tk.get('status')
        if not crawler_name or not keyword:
            self._logger.error('The parameters(crawlerSource|searchKey) is not specified.')

        crawler = self.__load_crawler(crawler_name)
        data = crawler.crawl({
            'logger': self._logger,
            'keyword': keyword,
            'time_limit': util.get_date(-7 if status else -1)
        })
        if not data:
            self._logger.info('No data was fetched.')

            return

        for x in data:
            if not x['content'][0]:
                continue

            self.__save_to_pqsql(x)
            self.__save_to_dynamodb(x)

    def __load_crawler(self, name):
        if name not in self.__crawlers:
            raise Exception('Crawler[%s] not exist!' % name)

        crawler = None
        try:
            crawler = util.create_class_instance(self.__crawlers[name])
        except Exception as e:
            raise Exception('Crawler[%s] load failed, error: %s' % (self.__crawlers[name], e))

        if not hasattr(crawler, 'crawl'):
            raise Exception('Crawler[%s] error: method[crawl] not exist!' % self.__crawlers[name])

        return crawler

    def __save_to_pqsql(self, x):
        sqle = self._components['sqlexecutor']

        sqle.sql = 'INSERT INTO %s %s' % (self._cfg['pgsql_table'], '''
            (search_key, url, published_time, title, source, crawler_source, source_type, create_ts, abstract, has_content)
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)''')
        sqle.empty_params()
        sqle.params.append(x['searchKey'])
        sqle.params.append(x['url'])
        sqle.params.append(x['newsTime'])
        sqle.params.append(x['title'])
        sqle.params.append(x['source'])
        sqle.params.append(x['crawlerSource'])
        sqle.params.append(x['sourceClassify'])
        sqle.params.append(x['createdAt'])
        sqle.params.append(x['content'][0][0:50] if not x['abstract'] else x['abstract'])
        sqle.params.append(False if not x['content'][0] else True)

        sqle.execute_non_query()

    def __save_to_dynamodb(self, x):
        table = self._components['dynamodb'].get_table(self._cfg['dynamodb_table'])
        table.put_item(Item={
            'url': x['url'],
            'content': x['content'][0],
            'label_content': x['content'][1]
        })
