# -*- coding: utf-8 -*-

import urllib2
import logging
from lxml import etree, html
from datetime import datetime

from django.conf import settings as config
from basic_info.models import Stock
from companies.models import RumorVerification

logger = logging.getLogger(__name__)

class XMLProcessorBase:
    XPATH_PREFIX = config.XPATH_PREFIX

    def load_xml_from_url(self, url):
        conn = urllib2.urlopen(url)
        return conn.read()

class ChuanwenProcessor(XMLProcessorBase):

    positive_base_url = config.CHUANWEN_CONFIG['positive_base_url']
    negative_base_url = config.CHUANWEN_CONFIG['negative_base_url']
    xml_encoding = config.CHUANWEN_CONFIG['encoding']
    input_date_format = config.CHUANWEN_CONFIG['input_date_format']
    output_date_format = config.CHUANWEN_CONFIG['output_date_format']

    item_node_attrib = config.CHUANWEN_DOC_ATTRIBUTE['Item']
    sec_name_attrib = config.CHUANWEN_DOC_ATTRIBUTE['SecName']
    sec_code_attrib = config.CHUANWEN_DOC_ATTRIBUTE['SecCode']
    question_attrib = config.CHUANWEN_DOC_ATTRIBUTE['Question']
    answer_attrib = config.CHUANWEN_DOC_ATTRIBUTE['Answer']
    pub_date_attrib = config.CHUANWEN_DOC_ATTRIBUTE['PubDate']
    orig_link_attrib = config.CHUANWEN_DOC_ATTRIBUTE['Link']

    def process(self, date_str=None):
        pos_count = self.__process_internal(date_str, [self.positive_base_url], True)
        neg_count = self.__process_internal(date_str, [self.negative_base_url], False)
        logger.info("Done processing daily chuanwen. %d added", pos_count + neg_count)

    def __process_internal(self, date_str, base_url_list, is_positive):

        gbk_parser = etree.XMLParser(encoding=self.xml_encoding)

        item_count = 0
        for base_url in base_url_list:
            xml_content = self.load_xml_from_url(base_url)
            doc_tree = etree.XML(xml_content, parser=gbk_parser)
            for item in doc_tree.xpath(XMLProcessorBase.XPATH_PREFIX + self.item_node_attrib):
                try:
                    pub_date = item.find(self.pub_date_attrib).text
                    date = datetime.strptime(str(pub_date), self.input_date_format).strftime(self.output_date_format)
                    if date_str != None and date < date_str:
                        break # incremental processing
                    sec_name = item.find(self.sec_name_attrib).text
                    sec_code = item.find(self.sec_code_attrib).text
                    question = item.find(self.question_attrib).text
                    answer = item.find(self.answer_attrib).text
                    orig_link = item.find(self.orig_link_attrib).text
                    
                    try:
                        stock = Stock.objects.get(code=sec_code)
                        RumorVerification.objects.get_or_create(date=date,
                                                                url=orig_link,
                                                                defaults={
                                                                    'stock': stock,
                                                                    'question': question,
                                                                    'answer': answer,
                                                                    'is_positive': is_positive,
                                                                })
                        item_count += 1
                    except Stock.DoesNotExist:
                        logger.warn("Stock code %s does not exist!", sec_code)
                    
                except Exception as e:
                    # ignores this item and carries on 
                    logger.warn(e)

        return item_count
