from django.shortcuts import render
from rest_framework import viewsets, status
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from django.utils import timezone
from .models import CrawlerTask, CrawledData
from .serializers import (
    CrawlerTaskSerializer,
    CrawledDataSerializer,
    CrawlerTaskCreateSerializer,
    CrawlerTaskUpdateSerializer
)
from .crawlers.weibo import WeiboCrawler
from .crawlers.twitter import TwitterCrawler
from .crawlers.facebook import FacebookCrawler

class CrawlerTaskViewSet(viewsets.ModelViewSet):
    """爬虫任务管理视图集"""
    permission_classes = [IsAuthenticated]
    
    def get_serializer_class(self):
        if self.action == 'create':
            return CrawlerTaskCreateSerializer
        elif self.action == 'update':
            return CrawlerTaskUpdateSerializer
        return CrawlerTaskSerializer
    
    def get_queryset(self):
        return CrawlerTask.objects.filter(user=self.request.user)
    
    def perform_create(self, serializer):
        serializer.save(user=self.request.user)
    
    @action(detail=True, methods=['post'])
    def start(self, request, pk=None):
        """启动爬虫任务"""
        task = self.get_object()
        if task.status == CrawlerTask.TaskStatus.PENDING:
            task.status = CrawlerTask.TaskStatus.RUNNING
            task.save()
            
            # 根据数据源类型选择相应的爬虫
            crawler_map = {
                'weibo': WeiboCrawler,
                'twitter': TwitterCrawler,
                'facebook': FacebookCrawler
            }
            
            crawler_class = crawler_map.get(task.data_source.type)
            if crawler_class:
                crawler = crawler_class(task)
                # TODO: 在这里使用异步任务处理
                crawler.start()
                return Response({'status': 'success', 'message': '任务已启动'})
            else:
                return Response(
                    {'status': 'error', 'message': '不支持的数据源类型'},
                    status=status.HTTP_400_BAD_REQUEST
                )
        return Response(
            {'status': 'error', 'message': '任务无法启动'},
            status=status.HTTP_400_BAD_REQUEST
        )
    
    @action(detail=True, methods=['post'])
    def cancel(self, request, pk=None):
        """取消爬虫任务"""
        task = self.get_object()
        if task.status in [CrawlerTask.TaskStatus.PENDING, CrawlerTask.TaskStatus.RUNNING]:
            task.status = CrawlerTask.TaskStatus.CANCELLED
            task.error_message = '任务被用户取消'
            task.save()
            return Response({'status': 'success', 'message': '任务已取消'})
        return Response(
            {'status': 'error', 'message': '任务无法取消'},
            status=status.HTTP_400_BAD_REQUEST
        )

class CrawledDataViewSet(viewsets.ReadOnlyModelViewSet):
    """爬取数据视图集"""
    serializer_class = CrawledDataSerializer
    permission_classes = [IsAuthenticated]
    
    def get_queryset(self):
        return CrawledData.objects.filter(task__user=self.request.user)
