# -*- coding: utf-8 -*-

from __future__ import absolute_import

import datetime, os

from django.conf import settings
from celery.utils.log import get_task_logger
from celery import chain

from financial_daily.celery import app
from financial_daily.external_db_accessors import CNInfoDBAccessor
from financial_daily.utils import StringUtils
from financial_daily.stocks_category_info_crawler import StocksCategoryInFoCrawler

from basic_info.models import Stock, Institute
from basic_info.models import Industry,StockIndustryMap
from basic_info.models import Location,StockLocationMap
from basic_info.models import Concept,StockConceptMap

logger = get_task_logger(__name__)
date_str = datetime.date.today().strftime('%Y-%m-%d')

@app.task
def sync_basic_info():
    # stocks
    stock_list = CNInfoDBAccessor.get_stock_basic_info()
    for full_name_raw, pinyin_name, sec_name_raw, sec_code in stock_list:
        sec_name = StringUtils.escape_stock_name_string(sec_name_raw)
        full_name = StringUtils.escape_stock_name_string(full_name_raw)
        try:
            stock = Stock.objects.get(code=sec_code)
            stock.short_name=sec_name
            stock.pinyin_name=pinyin_name
            stock.full_name=full_name
            stock.save()
        except Stock.DoesNotExist:
            Stock.objects.create(
                    code=sec_code,
                    short_name=sec_name,
                    pinyin_name=pinyin_name,
                    full_name=full_name
            )

    logger.info("Done syncing stock basic info. Total %d stocks in DB.", len(stock_list))
      
    # institutes
    institute_list = CNInfoDBAccessor.get_institute_basic_info()
    for sec_name_raw, full_name_raw in institute_list:
        sec_name = StringUtils.escape_stock_name_string(sec_name_raw)
        full_name = StringUtils.escape_stock_name_string(full_name_raw)
        try:
            institute = Institute.objects.get(short_name=sec_name)
            institute.full_name=full_name
            institute.save()
        except Institute.DoesNotExist:
            Institute.objects.create(
                    short_name=sec_name,
                    full_name=full_name
            )

    logger.info("Done syncing institute basic info. Total %d institute in DB.", len(institute_list))
   
@app.task(queue='web_access')
def crawl_stocks_info():
    # Crawl stocks info and write them into a local documents
    crawler = StocksCategoryInFoCrawler()
    
    # ZhengJianHui
    config_file = settings.CRAWL_STOCKS_INFO_FILES['csrc']
    analytical_args = crawler.parse_config_file(config_file) 
    if analytical_args:
        file_name = settings.CRAWL_STOCKS_INFO_FILE_NAME['csrc']
        crawler.crawl_csrc(analytical_args, file_name)

    # sina
    config_file = settings.CRAWL_STOCKS_INFO_FILES['sina']
    flag, industry_args, location_args, concept_args = crawler.parse_config_file(config_file) 
     
    if industry_args: # industry 
        file_name = settings.CRAWL_STOCKS_INFO_FILE_NAME['sina'] + \
                    settings.CRAWL_STOCKS_INFO_CATEGORY['industry']
        crawler.crawl_data(industry_args, file_name,flag)
    
    if concept_args: # concept
        file_name = settings.CRAWL_STOCKS_INFO_FILE_NAME['sina'] + \
                    settings.CRAWL_STOCKS_INFO_CATEGORY['concept']
        crawler.crawl_data(concept_args, file_name,flag)
        
    if location_args: # location
        file_name = settings.CRAWL_STOCKS_INFO_FILE_NAME['sina'] + \
                    settings.CRAWL_STOCKS_INFO_CATEGORY['location']
        crawler.crawl_data(location_args, file_name,flag)
    

@app.task(queue='web_access')
def write_stocks_info_to_db():
    # Read stocks info from a local documents and write them to db
    crawler = StocksCategoryInFoCrawler()
    
    # sina-industry
    file_name = settings.CRAWL_STOCKS_INFO_FILE_NAME['sina'] + \
                settings.CRAWL_STOCKS_INFO_CATEGORY['industry']
    file_path = os.path.join(settings.CRAWL_STOCKS_INFO_FILE_PATH, file_name)
    try:
        lines = crawler.read_file(file_path) 
    except Exception as e:
        logger.warn("Failed to read Sina Industry from %s.", file_path)
    else:
        for line in lines:
            line = line.strip()
            entry = line.split(settings.CRAWL_COLUMNS_DELIMITER)
            stock_category = entry[0]
            stock_code = entry[1]
            try:
                industry, created = Industry.objects.get_or_create(name=stock_category)
                stock = Stock.objects.get(code=stock_code) 
                StockIndustryMap.objects.get_or_create(industry=industry, stock=stock)
            except Stock.DoesNotExist:
                logger.warn("Stock code %s does not exist!", stock_code)

    # sina-concept
    file_name = settings.CRAWL_STOCKS_INFO_FILE_NAME['sina'] + \
                settings.CRAWL_STOCKS_INFO_CATEGORY['concept']
    file_path = os.path.join(settings.CRAWL_STOCKS_INFO_FILE_PATH, file_name)
    try:
        lines = crawler.read_file(file_path) 
    except Exception as e:
        logger.warn("Failed to read Sina Concept from %s.", file_path)
    else:
        for line in lines:
            line = line.strip()
            entry = line.split(settings.CRAWL_COLUMNS_DELIMITER)
            stock_category = entry[0]
            stock_code = entry[1]
            try:
                concept,created = Concept.objects.get_or_create(name=stock_category)
                stock = Stock.objects.get(code=stock_code) 
                StockConceptMap.objects.get_or_create(concept=concept, stock=stock)
            except Stock.DoesNotExist:
                logger.warn("Stock code %s does not exist!", stock_code)

    # sina-location
    file_name = settings.CRAWL_STOCKS_INFO_FILE_NAME['sina'] + \
                settings.CRAWL_STOCKS_INFO_CATEGORY['location']
    file_path = os.path.join(settings.CRAWL_STOCKS_INFO_FILE_PATH, file_name)           
    try:
        lines = crawler.read_file(file_path) 
    except Exception as e:
        logger.warn("Failed to read Sina Location from %s.", file_path)
    else:
        for line in lines:
            line = line.strip()
            entry = line.split(settings.CRAWL_COLUMNS_DELIMITER)
            stock_category = entry[0]
            stock_code = entry[1]
            try:
                location,created = Location.objects.get_or_create(name=stock_category)
                stock = Stock.objects.get(code=stock_code) 
                StockLocationMap.objects.get_or_create(location=location, stock=stock)
            except Stock.DoesNotExist:
                logger.warn("Stock code %s does not exist!", stock_code)
    
    # ZhengJianHui
    file_name = settings.CRAWL_STOCKS_INFO_FILE_NAME['csrc']  
    file_path = os.path.join(settings.CRAWL_STOCKS_INFO_FILE_PATH, file_name)
    
    try:
        lines = crawler.read_file(file_path) 
    except Exception as e:
        logger.warn("Failed to read ZhengJianhui from %s.", file_path)
    else:
        for line in lines:
            line = line.strip()
            entry = line.split(settings.CRAWL_COLUMNS_DELIMITER)
            csrc_industry = entry[0]
            csrc_sub_industry = entry[1]
            code = entry[2]
            try:
                stock = Stock.objects.get(code=code)
            except Stock.DoesNotExist:
                logger.warn("Stock code %s does not exist!", code)
            else:
                stock.csrc_industry = csrc_industry
                stock.csrc_sub_industry = csrc_sub_industry
                stock.save()

@app.task
def import_stocks_info():
    # Crawl stock info and write it into mysql
    # Include three tasks:syc_basic_info crawl_stocks_info write_stocks_info_to_db
    chain(sync_basic_info.si(),
          crawl_stocks_info.si(),
          write_stocks_info_to_db.si(),
          )()

