# -*- coding: utf-8 -*-

from scpy2.conveyor_driven_worker import ConveyorDrivenWorker


class NewsStorer(ConveyorDrivenWorker):

    def _process_task_msg(self, msg, params=None):
        self.logger.debug(type(msg))
        for x in msg:
            self.logger.debug(type(x))
            self.logger.debug(type(x[0]))
            y = x[0]

            self.__save_to_pqsql(y)
            # self.__save_to_dynamodb(y)
    #
    def __save_to_pqsql(self, x):
        sqle = self._components['sqlexecutor']

        sqle.sql = 'INSERT INTO %s %s' % (self._cfg['pgsql_table'], '''
            (search_key, url, published_time, title, source, crawler_source, source_type, create_ts, abstract, has_content)
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)''')
        sqle.empty_params()
        sqle.params.append(x['searchKey'])
        sqle.params.append(x['url'])
        sqle.params.append(x['newsTime'])
        sqle.params.append(x['title'])
        sqle.params.append(x['source'])
        sqle.params.append(x['crawlerSource'])
        sqle.params.append(x['sourceClassify'])
        sqle.params.append(x['createdAt'])
        sqle.params.append(x['content'][0][0:50] if not x['abstract'] else x['abstract'])
        sqle.params.append(False if not x['content'][0] else True)

        sqle.execute_non_query()

    def __save_to_dynamodb(self, x):
        table = self._components['dynamodb'].get_table(self._cfg['dynamodb_table'])
        table.put_item(Item={
            'url': x['url'],
            'content': x['content'][0],
            'label_content': x['content'][1]
        })


if __name__ == '__main__':
    worker = NewsStorer()
    worker.run()
