# -*- coding=utf-8 -*-
import markdown2

import json
import logging
import subprocess
import redis
import config

from django.dispatch import receiver
from django.db.models import Q
from django.http import HttpResponse
from django.shortcuts import render
from django.conf import settings
from django.views import View
from django.core.signals import request_finished
from autohome.models import Article

logger = logging.getLogger(__name__)


# Create your views here.
class IndexView(View):
    def get(self, request):
        status = request.GET.get('status', 1)
        articles = Article.objects.filter(status = status)
        for i, article in enumerate(articles):
            article.abstract = markdown2.markdown(text = article.abstract, extras = {
                'tables': True,
                'wiki-tables': True,
                'fenced-code-blocks': True,
            })

        context = {
            'articles': articles,
        }

        return render(request, 'index.html', context = context)


# 查看指南
class ArticleView(View):
    def get(self, request, url):
        article = Article.objects.filter(url = url).first()
        article.content = markdown2.markdown(text = article.content, extras = {
            'tables': True,
            'wiki-tables': True,
            'fenced-code-blocks': True,
        })

        context = {
            'article': article
        }

        return render(request, 'article.html', context = context)


# 运行爬虫
def run_crawl(request):
    res_data = {
        'status': '',
        'msg': '',
    }

    try:
        name = request.GET.get('name')
        print('name:%s' % name)
        redis_db = redis.StrictRedis(**config.redis_config)

        cmd = 'cd {dir};python manage.py run_spider -a name={name};'. \
            format(dir = settings.BASE_DIR, name = name)
        popen = subprocess.Popen(cmd, shell = True)
        # value = {
        #     'name': name,
        #     # 'popen': popen,
        #     'time': str(datetime.datetime.now())
        # }
        # redis_db.set(name, json.dumps(value))

        res_data['status'] = 'success'
        res_data['msg'] = '启动成功'
    except Exception as e:
        res_data['status'] = 'error'
        logging.error('run spider exception:%s' % e)
        res_data['msg'] = '出现错误，错误原因：%s' % e

    response = HttpResponse(json.dumps(res_data), content_type = "application/json")
    return response


# 查看爬虫状态
def spider_status(request):
    res_data = {
        'status': '',
        'msg': '',
    }
    try:
        name = request.GET.get('name')
        redis_db = redis.StrictRedis(**config.redis_config)
        value = redis_db.get(name)
        if value != None:
            res_data['msg'] = '%s 爬虫正在运行' % name
            res_data['info'] = json.loads(value)
        else:
            res_data['msg'] = '%s 爬虫没有运行' % name
        res_data['status'] = 'success'
    except Exception as e:
        res_data['status'] = 'error'
        logging.error('run spider exception:%s' % e)
        res_data['msg'] = '出现错误，错误原因：%s' % e

    response = HttpResponse(json.dumps(res_data), content_type = "application/json")
    return response


# 关闭爬虫  TODO... 存储监控进程状态
def close_spider(request):
    res_data = {
        'status': '',
        'msg': '',
    }
    try:
        name = request.GET.get('name')
        redis_db = redis.StrictRedis(**config.redis_config)
        value = redis_db.get(name)
        if value != None:
            res_data['msg'] = '%s 爬虫正在运行' % name
            data = json.loads(value)
            res_data['info'] = data

            popen = data.get('popen', None)
            if popen != None:
                popen.kill()
        else:
            res_data['msg'] = '%s 爬虫没有运行' % name
            res_data['status'] = 'success'
    except Exception as e:
        res_data['status'] = 'error'
        logging.error('run spider exception:%s' % e)
        res_data['msg'] = '出现错误，错误原因：%s' % e

    response = HttpResponse(json.dumps(res_data), content_type = "application/json")
    return response
