# -*- coding: utf-8 -*-
import json

from django.http import JsonResponse, HttpResponse
from .spider_util import SpiderManager
from .spider_info import SpiderConf
from .forms import CrawlerStartFrom
from rest_framework import permissions, viewsets
from rest_framework.decorators import action
from apps.crawler.serializers import StartSpiderSerializers, BaseSpiderSerializers


class CrawlerViewSet(viewsets.GenericViewSet):

    permissions_classes = [permissions.IsAuthenticated]
    queryset = None

    def get_serializer_class(self):
        return StartSpiderSerializers

    @action(['get'], detail=False, url_name='crawl-index')
    def index(self, request):
        """
        主页，返回子级菜单
        :param request:
        :return:
        """
        if request.method == 'GET':
            spider_manager = SpiderManager()
            menus = list()
            result = {}
            result['projects'] = []
            try:
                # 获取项目名称，返回第一个结果，本项目只有一个爬虫项目
                projects = spider_manager.list_projects().get('projects', [])
                first_project = projects[0]
                result['projects'] = projects
                result['projects_spiders'] = {}
                # 再查询所有project下面的所有spiders
                for project in projects:
                    # 调用api查询
                    if project == 'default':
                        result['projects'].remove('default')
                    else:
                        spider_list = spider_manager.list_spiders(project)
                        result['projects_spiders'][project] = []
                        for spider_name in spider_list['spiders']:
                            try:
                                temp = SpiderConf[spider_name]
                                temp['spider_name'] = spider_name
                                temp['project_name'] = project
                                result['projects_spiders'][project].append(temp)
                            except KeyError as e:
                                pass
                return JsonResponse(result, status=200)
            except Exception as e:
                print(e)
                return JsonResponse({}, status=500)

    @action(['get'], detail=False, url_name='crawl-get-spider-info')
    def getspiderinfo(self, request):
        """
        查询爬虫的配置信息，从spiderconf中获取，服务器状态
        :param request:
        :return:
        """
        if request.method == 'GET':
            spider_manager = SpiderManager()
            try:
                # 获取爬虫服务器状态
                daemon_status = spider_manager.get_daemon_status()

                # 获取爬虫配置信息, 需要爬虫英文名
                spider_name = request.GET.get('spider')
                spider_project = request.GET.get('project')
                spider = SpiderConf[spider_name]
                result = {
                    'daemon_status': daemon_status,
                    'project': spider_project,
                    'spider': spider,
                }
                return JsonResponse(result, status=200)
            except Exception as e:
                print(e)
                return HttpResponse(e, status=500)

    @action(['get'], detail=False, url_name='crawl-list-jobs')
    def listjobs(self, request):
        """
        获取正在运行的爬虫任务，返回结果：
        "pending": [{"id": "78391cc0fcaf11e1b0090800272a6d06",
                     "spider": "spider1"}],
        "running": [{"id": "422e608f9f28cef127b3d5ef93fe9399",
                     "spider": "spider2",
                     "start_time": "2012-09-12 10:14:03.594664"}],
        "finished": [{"id": "2f16646cfcaf11e1b0090800272a6d06",
                      "spider": "spider3",
                      "start_time": "2012-09-12 10:14:03.594664",
                      "end_time": "2012-09-12 10:24:03.594664"}]}
        :param request:
        :return:
        """
        if request.method == 'GET':
            spider_manager = SpiderManager()

            # 获取爬虫服务器状态
            daemon_status = spider_manager.get_daemon_status()

            # 获取任务信息
            project_name = request.GET.get('project')
            if not project_name:
                result = {
                    "message": "缺少必要的项目参数"
                }
                return JsonResponse(result, status=500)
            else:
                try:
                    jobs = spider_manager.list_jobs(project_name)
                    result = {
                        'daemon_status': daemon_status,
                        'jobs': jobs
                    }
                    return JsonResponse(result, status=200)
                except Exception as e:
                    print(e)
                    return HttpResponse(e, status=500)

    @action(['post'], detail=False, url_name='crawl-start-spider')
    def schedule(self, request):
        """
        启动爬虫
        :param request:
        :return:
        """
        if request.method == 'POST':
            form = CrawlerStartFrom(request.data)

            if form.is_valid():
                spider_manager = SpiderManager()

                store_conf = {
                    'DBTYPE': request.data['dbType'],
                    'MONGODB_URI': request.data['dbUri'],
                    'MONGODB_DATABASE': request.data['dbName'],
                    'MONGODB_COLLECTION': request.data['dbForm']
                }
                store_conf_json = json.dumps(store_conf, separators=(',', ':')) \
                    .replace('"', r'\"').replace('{', r'\{').replace('}', r'\}')

                settings = [
                    'DBTYPE=' + request.data['dbType'],
                    'MONGODB_URI=' + request.data['dbUri'],
                    'MONGODB_DATABASE=' + request.data['dbName'],
                    'MONGODB_COLLECTION=' + request.data['dbForm']
                ]

                trash_data = request.data['crawlerDelete'] == 'true'
                response = spider_manager.schedule(request.data['project'], request.data['spider'],
                                                   setting=settings,
                                                   limit_count=request.data['crawlerAmount'],
                                                   trash_data=trash_data,
                                                   storeConf=store_conf_json)
                json_result = json.loads(bytes.decode(response))
                return JsonResponse({
                    'message': u'启动成功',
                    'results': json_result
                }, status=200)
            else:
                return HttpResponse(u'缺少必要参数', status=301)

    @action(['get'], detail=False, url_name='crawl-cancel-spider')
    def cancel(self, request):
        """
        停止爬虫
        :param request:
        :return:
        """
        if request.method == 'POST':
            project = request.POST.get('project')
            job = request.POST.get('job')

            if project and job:
                spider_manager = SpiderManager()
                response = spider_manager.cancel(project, job)
                return JsonResponse({
                    'message': u'任务取消成功',
                    'results': response
                }, status=200)
            else:
                return HttpResponse(u'缺少必要参数', status=301)

    @action(['get'], detail=False, url_name='crawl-list-projects')
    def listprojects(self, request):
        if request.method == 'GET':
            spider_manager = SpiderManager()
            try:
                projects = spider_manager.list_projects()
                return JsonResponse({
                    'message': 'request success.',
                    'results': {'projects': projects}
                }, status=200)
            except Exception as e:
                print(e)
                return HttpResponse(u'缺少必要参数', status=301)

    @action(['get'], detail=False, url_name='crawl-project-info')
    def project(self, request):
        if request.method == 'GET':
            spider_manager = SpiderManager()
            project = 'Course'
            try:
                versions = spider_manager.list_versions(project).get('versions', [])
                jobs = spider_manager.list_jobs(project)
                return JsonResponse({
                    'message': 'request success.',
                    'results': {'versions': versions, 'jobs': jobs, 'project': project}
                }, status=200)
            except Exception as e:
                print(e)
                return HttpResponse(u'缺少必要参数', status=301)

    @action(['get'], detail=False, url_name='crawl-list-spider')
    def listspiders(self, request):
        project = 'Course'
        version = ''
        if request.method == 'GET':
            spider_manager = SpiderManager()
            try:
                spiders = spider_manager.list_spiders(project, version).get('spiders', [])
                return JsonResponse({
                    'message': 'request success.',
                    'results': {'version': version, 'spiders': spiders, 'project': project}
                }, status=200)
            except Exception as e:
                print(e)
                return HttpResponse(u'缺少必要参数', status=301)
