# -*- coding:utf-8 -*-
import codecs
import pika
import time
import json
import threading
import requests
import os
import logging
import argparse
import emoji
import copy
from pprint import pprint
from datetime import datetime

from utils import configs
from utils import maixun_fid_info
from utils import maixun_domain_mapping
from utils import feeling_labels
from utils import quality_labels
from utils import idmi_crawler_modelname_mapping
from utils import sourceId2websites_Response
from utils import log_setup
from utils import purchase_year_analyze
from utils import idmi_crawled_complaints_map2fid
from utils import translate_sent
from utils import quality_preprocessing

log_setup('./logs/', 'conusmer.log')


class RabbitMQ_Consumer:

    def __init__(self):
        self.login = pika.PlainCredentials(
            username=configs.rabbitmq_user,
            password=configs.rabbitmq_pw
        )
        self.connection_parameter = pika.ConnectionParameters(
            host=configs.rabbitmq_ip,
            port=configs.rabbitmq_data_port,
            virtual_host=configs.rabbitmq_vhost,
            credentials=self.login,
            connection_attempts=int(configs.rabbitmq_connection_attempts),
            retry_delay=int(configs.rabbitmq_retry_delay),
            heartbeat=0
        )
        self.connection = None
        self.channel = None
        self.publish_quality_url = os.path.join(
            configs.saved_data_host,
            'complaint/idmi_complaints'
        )
        self.publish_cuzu_url = os.path.join(
            configs.saved_data_host,
            'maixun_cuzu/maixun_cuzu_storage'
        )

    def connect(self):
        if not self.connection or self.connection.is_closed:
            self.connection = pika.BlockingConnection(
                self.connection_parameter
            )
            self.channel = self.connection.channel()
            self.channel.exchange_declare(
                exchange=configs.rabbitmq_exchange,
            )
            result = self.channel.queue_declare(
                queue=configs.rabbitmq_queue,
                # durable=True, auto_delete=False
            )
            self.channel.queue_bind(
                exchange=configs.rabbitmq_exchange,
                queue=result.method.queue
            )

    def callback(
            self,
            channel,
            method_frame,
            header_frame,
            body
    ):
        body = body.decode('utf-8')
        body = json.loads(
            body,
            encoding='utf-8'
        )
        self.preprocess_body(
            body,
            channel,
            method_frame
        )
        logging.info(msg="INFO : preprocess the article, TIME is ：{TIME}".format(
            TIME=str(datetime.now()))
        )

    def connect_thread_func(self):
        while True:
            time.sleep(1)
            self.connection.process_data_events()

    def clean_sentence(self, sentence):
        if isinstance(sentence, str):
            ...
        elif isinstance(sentence, list):
            sentence = ''.join(sentence)
        else:
            sentence = ''
        s = copy.deepcopy(sentence)
        s = s. \
            replace('\r', ' '). \
            replace('\n', ' '). \
            replace('\t', ' '). \
            replace('"', ' '). \
            replace("'", ' '). \
            replace('“', ' '). \
            replace('”', ' '). \
            replace('{', ' '). \
            replace('}', ' '). \
            replace(']', ''). \
            replace('[', '')
        try:
            s = emoji.demojize(s)
        except:
            ...
        return s

    def consumer(self):
        logging.info('=========================')
        logging.info('start consuming')
        logging.info('=========================')

        self.connect()
        connect_thread = threading.Thread(
            target=self.connect_thread_func
        )
        connect_thread.start()
        logging.info(msg="INFO : start collect articles .... ")
        self.channel.basic_consume(
            queue=configs.rabbitmq_queue,
            on_message_callback=self.callback,
            auto_ack=False,
        )
        self.channel.start_consuming()

    def preprocess_body(self, body, channel, method_frame):
        response = self.publish_msg(msg=body)
        # channel.basic_ack(delivery_tag=method_frame.delivery_tag)

        if not response.status_code == 200:
            channel.basic_ack(delivery_tag=method_frame.delivery_tag)
            logging.info("ERROR : Backend Error Code {ERROR_CODE}".format(
                ERROR_CODE=response.status_code)
            )
        else:
            channel.basic_ack(delivery_tag=method_frame.delivery_tag)
            logging.info("INFO : collect one article in {TIME}".format(
                TIME=str(datetime.now())
            ))

    def sentence_check(self, s):
        if isinstance(s, str):
            return s
        elif isinstance(s, list):
            return ''.join(s)
        else:
            return ''


    def publish_msg(self, msg):
        """
        Extract values from msg ,
        and then call the web-backend to preprocess raw dataset
        dict_keys([
            'sourceId',
            'source_web_origin',
            'save_source_image',
            'info_category',
            'url',
            'title',
            'text',
            'brand_src',
            'series_src',
            'model_src',
            'image_urls'
            '_url_unique',
            ])
        """
        sourceId = msg['sourceId']
        logging.info('-------------')
        logging.info(sourceId)
        logging.info('-------------')

        if sourceId.startswith('complaint'):
            logging.info("------->>>>>>> {}".format(sourceId))

            url = msg['url']
            url = self.sentence_check(url)

            _url_unique = msg['_url_unique']
            _url_unique = self.sentence_check(_url_unique)

            OEM_src = msg['brand_src']
            OEM_src = self.sentence_check(OEM_src)

            ModelName_src = msg['series_src']
            ModelName_src = self.sentence_check(ModelName_src)

            vehicle_line_src = msg['model_src']
            vehicle_line_src = self.sentence_check(vehicle_line_src)

            title = msg['title']
            text = msg['text']

            title_zh = self.clean_sentence(
                self.sentence_check(title)
            )
            text_zh = self.clean_sentence(
                self.sentence_check(text)
            )
            title_en = translate_sent(title_zh)
            text_en = translate_sent(text_zh)


            create_time = datetime.now().strftime("%Y-%m-%d")
            purchase_year = purchase_year_analyze(
                customer_voice=OEM_src + ModelName_src + vehicle_line_src + title_zh + text_zh,
            )
            info_category = 'idmi_crawled'
            URL = url
            OEM_src = OEM_src
            ModelName_src = ModelName_src
            vehicle_line_src = vehicle_line_src

            fid = idmi_crawled_complaints_map2fid(
                sourceId=sourceId,
                OEM_src=OEM_src,
                ModelName_src=ModelName_src
            )
            if fid :
                if fid in maixun_fid_info.keys:
                    _ = maixun_fid_info[fid]
                    OEM = _['OEM']
                    ModelName = _['ModelName']
                    vehicle_line = vehicle_line_src
                else:
                    OEM = ''
                    ModelName = ''
                    vehicle_line = vehicle_line_src
            else:
                OEM = ''
                ModelName = ''
                vehicle_line = vehicle_line_src

            websites = sourceId2websites_Response[sourceId]
            maixun_author_location = ''
            maixun_domain = ''
            maixun_fid = fid

            complaint_tags = quality_preprocessing(title_zh=title_zh, text_zh=text_zh)
            FLAG_function_group_problem = True if len(complaint_tags) else False
            data = {
                'create_time' : create_time,
                'purchase_year' : purchase_year,
                'info_category' : info_category,
                'URL' : URL,
                'FLAG_function_group_problem' : FLAG_function_group_problem,
                'OEM_src' : OEM_src,
                'ModelName_src' : ModelName_src,
                'vehicle_line_src' : vehicle_line_src,
                'OEM' : OEM,
                'ModelName' : ModelName,
                'vehicle_line' : vehicle_line,
                'url_unique' : _url_unique,
                'title_zh' : title_zh,
                'text_zh' : text_zh,
                'title_en' : title_en,
                'text_en' : text_en,
                'websites' : websites,
                'maixun_author_location' : maixun_author_location,
                'maixun_domain' : maixun_domain,
                'maixun_fid' : maixun_fid,
                'complaint_tags' : complaint_tags,
            }


            return requests.post(
                url=self.publish_quality_url,
                data={'msg' : json.dumps(data, ensure_ascii=False)}
            )
        elif sourceId.startswith('forum'):
            logging.info("------->>>>>>> {}".format(sourceId))

            url = msg['url']
            url = self.sentence_check(url)

            _url_unique = msg['_url_unique']
            _url_unique = self.sentence_check(_url_unique)

            fid = msg['fid']
            if isinstance(fid, int):
                ...
            else:
                try:
                    fid = int(fid)
                except:
                    fid = 0
            if fid in maixun_fid_info.keys:
                _ = maixun_fid_info[fid]
                OEM = _['OEM']
                ModelName = _['ModelName']
                vehicle_line = ''
            else:
                OEM = ''
                ModelName = ''
                vehicle_line = ''

            title = msg['title']
            text = msg['text']

            title_zh = self.clean_sentence(
                self.sentence_check(title)
            )
            text_zh = self.clean_sentence(
                self.sentence_check(text)
            )
            title_en = translate_sent(title_zh)
            text_en = translate_sent(text_zh)

            create_time = datetime.now().strftime("%Y-%m-%d")
            purchase_year = purchase_year_analyze(
                customer_voice=title_zh + text_zh,
            )
            info_category = 'idmi_crawled'
            URL = url

            websites = sourceId2websites_Response[sourceId]
            maixun_author_location = ''
            maixun_domain = ''
            maixun_fid = fid

            complaint_tags = quality_preprocessing(title_zh=title_zh, text_zh=text_zh)
            FLAG_function_group_problem = True if len(complaint_tags) else False
            data = {
                'create_time': create_time,
                'purchase_year': purchase_year,
                'info_category': info_category,
                'URL': URL,
                'FLAG_function_group_problem': FLAG_function_group_problem,
                'OEM_src': OEM,
                'ModelName_src': ModelName,
                'vehicle_line_src': vehicle_line,
                'OEM': OEM,
                'ModelName': ModelName,
                'vehicle_line': vehicle_line,
                'url_unique': _url_unique,
                'title_zh': title_zh,
                'text_zh': text_zh,
                'title_en': title_en,
                'text_en': text_en,
                'websites': websites,
                'maixun_author_location': maixun_author_location,
                'maixun_domain': maixun_domain,
                'maixun_fid': maixun_fid,
                'complaint_tags': complaint_tags,
            }

            return requests.post(
                url=self.publish_quality_url,
                data={'msg': json.dumps(data, ensure_ascii=False)}
            )


rabbitmq_consumer = RabbitMQ_Consumer()
rabbitmq_consumer.consumer()
