#-*- coding:utf-8 -*-
import json

from django.shortcuts import render,HttpResponse,render_to_response
import datetime

# Create your views here.
from django.template import RequestContext
from django.views.decorators.http import require_POST
from scrapyd_api import ScrapydAPI
from finance.models import spider_statues
from finance.mongo_items import finance as finance_mongo,finance_words,key_words
import logging
log = logging.getLogger('proscenium')


def index(request):
    items={}
    return render_to_response('finance/index.html',items,context_instance=RequestContext(request))




@require_POST
def start_spider(request):
    items={}
    items['is_tip']=0
    items['is_success']=0
    items['msg']=u''
    items['data']={}
    spider_projects='sina_finance'
    spider_name='finance'
    scrapyd=ScrapydAPI(target='http://root:123456@192.168.1.46:6802')
    jod_id=scrapyd.schedule(spider_projects,spider_name)
    if jod_id=='':
        items['is_tip']=1
        items['msg']=u'启动爬虫失败'
        data=json.dumps(items)
        log.info('retrun data %s' % data)
        return HttpResponse(data)
    else:
        jod_state=None
        jod_list=scrapyd.list_jobs(spider_projects)
        for key,value in jod_list.items():
            jod_id_all=map(lambda x:x['id'],value)
            if jod_id in jod_id_all:
                if key=='pending':
                    jod_state=0
                elif key=='running':
                    jod_state=1
                elif key=='finished':
                    jod_state=2
                else:
                    jod_state=3
                break
        if jod_state==None:
            items['is_tip'] = 1
            items['msg'] = u'启动爬虫失败,未知的jod_id'
            data = json.dumps(items)
            return HttpResponse(data)
        spider_statues.objects.create(peoject_name=spider_projects,spider_name=spider_name,spider_job_id=jod_id,staute=jod_state)
        items['is_tip']=1
        items['is_success']=1
        items['msg']=u'启动爬虫成功'
        items['data']['jod_id']=jod_id
        data=json.dumps(items)
        log.info('retrun data %s' % data)
        return HttpResponse(data)


@require_POST
def get_itmes(request):
    '''
    获取某个爬虫任务的数据
    :param request:
    :return:
    '''
    log.info('get_itmes post 参数 %s' %request.POST)
    items={}
    items['is_tip']=0
    items['is_success']=0
    items['msg']=u''
    items['data']={}
    jod_id=request.POST.get('jod_id','')
    if jod_id=='':
        items['is_tip']=1
        items['is_success']=0
        items['msg']=u'jod_id为空'
        data=json.dumps(items)
        log.info('retrun data %s' % data)
        return HttpResponse(data)
    spider_projects = 'sina_finance'
    scrapyd = ScrapydAPI(target='http://root:123456@192.168.1.46:6802')
    jod_dict = scrapyd.list_jobs(spider_projects)
    jod_state = None
    for key, value in jod_dict.items():
        jod_id_all = map(lambda x: x['id'], value)
        if jod_id in jod_id_all:
            if key == 'pending':
                jod_state = 0
            elif key == 'running':
                jod_state = 1
            elif key == 'finished':
                jod_state = 2
            else:
                jod_state = 3
            break
    if jod_state == None:
        items['is_tip'] = 1
        items['msg'] = u'获取数据失败,未知的jod_id'
        data = json.dumps(items)
        log.info('retrun data %s' % data)
        return HttpResponse(data)
    data_list=finance_mongo.objects(spider_jod_id=jod_id)

    str_list=[]
    for sub_data in data_list:
        mongo_id_list = []
        word_list=[]
        one_item={}
        one_item['title']=sub_data.title
        one_item['href']=sub_data.content_href
        one_item['jod_id']=sub_data.spider_jod_id
        one_item['time']=sub_data.create_time.strftime('%Y-%m-%d %H:%M:%S')
        for sub_key in finance_words.objects(finance_id=sub_data.id):
            mongo_id_list.append(sub_key.key_words_id)
        word_list=map(lambda x:x['word'],key_words.objects(id__in=mongo_id_list))
        one_item['word_list']=u'、'.join(word_list)
        str_list.append(one_item)
    items['is_tip']=1
    items['is_success']=1
    items['msg']=u'获取当前爬虫的数据'
    items['data']={'jod_id':jod_id,'str_list':str_list}
    data=json.dumps(items)
    log.info('retrun data %s' %data)
    return HttpResponse(data)


@require_POST
def get_day_items(request):
    log.info('get_day_items post 参数 %s' % request.POST)
    items = {}
    items['is_tip'] = 0
    items['is_success'] = 0
    items['msg'] = u''
    items['data'] = {}
    odate = request.POST.get('date', '')
    if odate == '':
        items['is_tip'] = 1
        items['is_success'] = 0
        items['msg'] = u'date为空'
        data = json.dumps(items)
        log.info('retrun data %s' % data)
        return HttpResponse(data)

    try:
        odate=datetime.datetime.strptime(odate,'%Y-%m-%d')
    except Exception,e:
        items['is_tip'] = 1
        items['is_success'] = 0
        items['msg'] = u'date格式化错误'
        data = json.dumps(items)
        log.info('retrun data %s' % data)
        return HttpResponse(data)
    last_date=odate+datetime.timedelta(days=1)
    data_list=finance_mongo.objects(create_time__gte=odate,create_time__lt=last_date)
    str_list = []
    url_list=[]
    for sub_data in data_list:
        mongo_id_list = []
        word_list = []
        one_item = {}
        one_item['title'] = sub_data.title
        one_item['href'] = sub_data.content_href
        one_item['jod_id'] = sub_data.spider_jod_id
        one_item['time'] = sub_data.create_time.strftime('%Y-%m-%d %H:%M:%S')
        if sub_data.content_href not in url_list:
            for sub_key in finance_words.objects(finance_id=sub_data.id):
                mongo_id_list.append(sub_key.key_words_id)
            word_list = map(lambda x: x['word'], key_words.objects(id__in=mongo_id_list))
            one_item['word_list'] = u'、'.join(word_list)
            str_list.append(one_item)
            url_list.append(sub_data.content_href)
    items['is_tip'] = 1
    items['is_success'] = 1
    items['msg'] = u'获取当天的数据'
    items['data'] = {'str_list': str_list}
    data = json.dumps(items)
    log.info('retrun data %s' % data)
    return HttpResponse(data)