# apps/zhihu/views.py

from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from django.core.cache import cache
from django.core.cache.utils import make_template_fragment_key
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.db.models import Prefetch
from ..system.models import CrawlerStatus, CrawlerConfig
from .models import ZhihuContent, ZhihuComment
from .serializers import ZhihuContentSerializer
from .tasks import build_comment_tree

class ZhihuTaskDataView(APIView):
    """
    根据任务ID获取知乎内容和评论数据。
    """
    # todo: 整体代码需要优化
    def get(self, request, task_id, format=None):
        try:
            # 获取分页参数
            page = int(request.query_params.get('page', 1))
            page_size = int(request.query_params.get('page_size', 20))
            
            # 定义缓存键
            content_cache_key = f"zhihu_content_{task_id}_{page}_{page_size}"
            comment_tree_cache_key = f"zhihu_comment_tree_{task_id}_{page}_{page_size}"
            task_status_key = f"zhihu_comment_tree_status_{task_id}_{page}_{page_size}"

            # 获取 CrawlerConfig 实例
            try:
                crawler_config = CrawlerConfig.objects.get(task_id=task_id)
            except CrawlerConfig.DoesNotExist:
                return Response(
                    {"detail": "任务配置未找到。"},
                    status=status.HTTP_404_NOT_FOUND
                )

            # 获取任务状态
            try:
                crawler_status = CrawlerStatus.objects.get(task_id=crawler_config)
            except CrawlerStatus.DoesNotExist:
                return Response(
                    {"detail": "任务正在执行中，请稍后查询。"},
                    status=status.HTTP_202_ACCEPTED
                )

            if crawler_status.crawler_status == "Failed":
                return Response(
                    {"detail": f"任务失败: {crawler_status.crawler_status_logs}"},
                    status=status.HTTP_400_BAD_REQUEST
                )
            elif crawler_status.crawler_status == "Succeed":
                # 检查是否有完整的缓存数据（包含评论树）
                cached_content = cache.get(content_cache_key)
                cached_comment_tree = cache.get(comment_tree_cache_key)
                
                if cached_content and cached_comment_tree:
                    # 如果两者都存在，返回完整数据
                    response_data = cached_content.copy()
                    response_data['results'] = [
                        {**item, 'comments': cached_comment_tree} 
                        for item in response_data['results']
                    ]
                    return Response(response_data, status=status.HTTP_200_OK)

                # 获取知乎内容
                zhihu_contents = ZhihuContent.objects.filter(task_id=task_id)
                
                # 序列化知乎内容
                serializer = ZhihuContentSerializer(zhihu_contents, many=True)
                serialized_data = serializer.data

                # 分页处理
                paginator = Paginator(serialized_data, page_size)
                try:
                    paginated_data = paginator.page(page)
                except PageNotAnInteger:
                    paginated_data = paginator.page(1)
                except EmptyPage:
                    paginated_data = paginator.page(paginator.num_pages)

                response_data = {
                    'total': paginator.count,
                    'num_pages': paginator.num_pages,
                    'current_page': paginated_data.number,
                    'results': paginated_data.object_list,
                }

                # 缓存文章内容
                cache.set(content_cache_key, response_data, timeout=86400)

                # 检查评论树构建任务状态
                tree_status = cache.get(task_status_key)
                if not tree_status:
                    # 如果没有构建状态，说明需要触发异步任务
                    build_comment_tree.delay(task_id, page, page_size)
                    cache.set(task_status_key, 'pending', timeout=86400)
                    response_data['comment_tree_status'] = 'pending'
                else:
                    # 如果已经在构建中或完成，获取评论树
                    cached_comment_tree = cache.get(comment_tree_cache_key)
                    if cached_comment_tree:
                        response_data['results'] = [
                            {**item, 'comments': cached_comment_tree} 
                            for item in response_data['results']
                        ]
                        response_data['comment_tree_status'] = 'completed'
                    else:
                        response_data['comment_tree_status'] = 'pending'

                return Response(response_data, status=status.HTTP_200_OK)
        except Exception as e:
            return Response(
                {"detail": f"处理请求时发生错误: {str(e)}"},
                status=status.HTTP_500_INTERNAL_SERVER_ERROR
            )

class TestView(APIView):
    def get(self, request, format=None):
        return Response({"message": "Hello, World!"}, status=status.HTTP_200_OK)
