
import json
import logging
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status
from django.http import StreamingHttpResponse
import time

# 配置日志
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger = logging.getLogger('llm_views')
logger.addHandler(handler)
logger.setLevel(logging.INFO)

# 引入 Coze SDK 相关模块
from cozepy import COZE_CN_BASE_URL
from cozepy import Coze, TokenAuth, Message, ChatEventType  # noqa

# 初始化 Coze 客户端
coze_api_token = 'pat_2PXI7WZjrtf7CkosZZx6Zl4oSU9IcRAWFihgFuZtJ4JkvkYvGC20nZCb7zkv09Nu'
coze_api_base = COZE_CN_BASE_URL
coze = Coze(auth=TokenAuth(token=coze_api_token), base_url=coze_api_base)

# 机器人 ID
bot_id = '7537615752749727782'

@api_view(['POST'])
def chat_with_ai(request):
    """
    处理与AI的聊天请求
    所有响应都使用流式响应
    """
    try:
        # 获取请求数据
        data = request.data
        user_message = data.get('message', '')
        user_id = data.get('user_id') or '123456' # 默认用户ID
        
        if not user_message:
            # 返回错误信息的流式响应
            def error_stream():
                error_data = json.dumps({'error': 'Message is required'})
                yield f"data: {error_data}\n\n"
                yield f"data: [DONE]\n\n"
            
            response = StreamingHttpResponse(error_stream(), content_type='text/event-stream')
            response['Cache-Control'] = 'no-cache'
            response.status_code = status.HTTP_400_BAD_REQUEST
            return response
        
        logger.info(f"Received chat request from user {user_id}: {user_message}")
        
        # 流式响应处理函数
        def stream_response():
            try:
                logger.info(f"Sending request to Coze API, bot_id: {bot_id}, user_id: {user_id}")
                
                for event in coze.chat.stream(
                    bot_id=bot_id,
                    user_id=user_id,
                    additional_messages=[
                        Message.build_user_question_text(user_message),
                    ],
                ):
                    if event.event == ChatEventType.CONVERSATION_MESSAGE_DELTA:
                        # 将内容作为JSON对象返回
                        response_data = json.dumps({
                            'type': 'content',
                            'content': event.message.content
                        })
                        yield f"data: {response_data}\n\n"
                        time.sleep(0.05)  # 控制流速度
                    
                    if event.event == ChatEventType.CONVERSATION_CHAT_COMPLETED:
                        # 发送完成信号和token使用量
                        token_data = json.dumps({
                            'type': 'metadata',
                            'token_usage': event.chat.usage.token_count
                        })
                        yield f"data: {token_data}\n\n"
                        yield f"data: {{\"type\": \"done\"}}\n\n"
                        yield f"data: [DONE]\n\n"
            except Exception as e:
                logger.error(f"Error during Coze API streaming: {str(e)}")
                error_data = json.dumps({
                    'type': 'error',
                    'error': str(e)
                })
                yield f"data: {error_data}\n\n"
                yield f"data: [DONE]\n\n"
        
        # 返回流式响应
        response = StreamingHttpResponse(stream_response(), content_type='text/event-stream')
        response['Cache-Control'] = 'no-cache'
        return response
        
    except Exception as e:
        logger.error(f"Error processing chat request: {str(e)}")
        # 处理请求初始阶段的错误
        def error_stream():
            error_data = json.dumps({'type': 'error', 'error': str(e)})
            yield f"data: {error_data}\n\n"
            yield f"data: [DONE]\n\n"
        
        response = StreamingHttpResponse(error_stream(), content_type='text/event-stream')
        response['Cache-Control'] = 'no-cache'
        response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
        return response

@api_view(['GET'])
def llm_status(request):
    """
    检查LLM服务状态（流式响应版本）
    """
    def status_stream():
        try:
            # 简单的检查，确保Coze客户端可以正常工作
            # 这里可以根据需要添加更复杂的健康检查逻辑
            status_data = json.dumps({
                'status': 'healthy',
                'bot_id': bot_id
            })
            yield f"data: {status_data}\n\n"
            yield f"data: [DONE]\n\n"
        except Exception as e:
            error_data = json.dumps({
                'status': 'unhealthy', 
                'error': str(e)
            })
            yield f"data: {error_data}\n\n"
            yield f"data: [DONE]\n\n"
    
    response = StreamingHttpResponse(status_stream(), content_type='text/event-stream')
    response['Cache-Control'] = 'no-cache'
    return response  
