# -*- coding: utf-8 -*-

import json
from task import Task
import util
from parse_util.recruitment_ana_util import job_info_trans
import datetime


class Parser(Task):

    def _init(self):
        super(Parser, self)._init()

        self.__crawlers = {
            'toutiao': 'toutiao_news.TouTiaoNews',
            'sina_blog': 'sina_blog.SinaBlog',
            # 'newspaper': 'newspaper.Newspaper',
        }

    def begin(self):
        while 1:
            try:
                msgs = self._components['conveyor'].receive()
                msgs = json.loads(msgs[0])
            except Exception as e:
                self._logger.warn('[receiving]: %s' % e)

            if not msgs:
                self._rest()
                continue
            for x in json.loads(msgs.get('data','')):
                self._logger.info('[accepted]: %s' % util.ustr(x))
                if not x:
                    continue
                try:
                    res = self.__perform_task(x)
                    self._logger.info('[finished]: %s' % util.ustr(res))
                except Exception as e:
                    self._logger.warn('[performing]: %s' % e)
                    self._rest()

    def __perform_task(self, tx):
        tx = self.__parse_job_info(tx)
        self.__save_to_pqsql(tx)
        # self.__save_to_dynamodb(tx)

    def __parse_job_info(self, jobItem):
        return job_info_trans(jobItem)

    def __load_crawler(self, name):
        if name not in self.__crawlers:
            raise Exception('Crawler[%s] not exist!' % name)

        crawler = None
        try:
            crawler = util.create_class_instance(self.__crawlers[name])
        except Exception as e:
            raise Exception('Crawler[%s] load failed, error: %s' % (self.__crawlers[name], e))

        if not hasattr(crawler, 'crawl'):
            raise Exception('Crawler[%s] error: method[crawl] not exist!' % self.__crawlers[name])

        return crawler

    def __save_to_pqsql(self, x):
        sqle = self._components['sqlexecutor']
        _url = x.get('sourceUrl')
        try:
            if not self.__check_url_exist(_url):
                _keys = [
                    'companyName',
                    'sourceUrl',
                    'salary',
                    'city',
                    'category',
                    'category_type',
                    'degree',
                    'working_years',
                    'keywords',
                    'keywords_des',
                    'releaseTime'
                ]
                params = [x[key] for key in _keys]

                sqle.reset()
                sqle.sql = 'SELECT industry FROM recruitment_monitor_company WHERE company_name=%s'
                sqle.params = [x.get('companyName','')]
                res_industry = sqle.execute_dataset()
                if res_industry:
                    industry = res_industry[0]
                else:
                    industry = []

                params.append(datetime.datetime.now())
                params.append(industry.get('industry',[]))

                sqle.reset_sql_params()
                sqle.sql = """
                          INSERT INTO recruitment_analysis(
                          company_name,
                          url,
                          salary,
                          city,
                          category,
                          category_type,
                          degree,
                          working_years,
                          keywords,
                          keywords_des,
                          release_time,
                          update_time,
                          industry
                          )  VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);
                           """
                sqle.params = params
                sqle.execute_non_query()
            else:
                sqle.reset_sql_params()
                sqle.sql = """
                           UPDATE recruitment_analysis SET
                           update_time=%s WHERE url=%s;
                           """
                sqle.params = [datetime.datetime.now(), _url]
                sqle.execute_non_query()
        except Exception as e:
            self._logger.error('[insert_job_info] faile for %s'%e)

    def __check_url_exist(self, url):
        sqle = self._components['sqlexecutor']
        sqle.reset_sql_params()
        sqle.sql = """
                   SELECT company_name FROM recruitment_analysis WHERE url=%s;
                   """
        sqle.params = [url]
        res = sqle.execute_dataset()
        if res and len(res):
            self._logger.info('[check_url_exist]: %s already exist' % url)
            return True
        return False


    def __save_to_dynamodb(self, x):
        table = self._components['dynamodb'].get_table(self._cfg['dynamodb_table'])
        table.put_item(Item={
            'url': x['url'],
            'content': x['content'][0],
            'label_content': x['content'][1]
        })
