from django.http import JsonResponse
from django.shortcuts import render
from django.conf import settings
from kafka import KafkaProducer
from confluent_kafka import Producer
from StorageBox.models import BigDataModel
import logging
import base64
import mysql.connector
import json

# 设置日志记录
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

def index(request):
    data_list = BigDataModel.objects.all()
    return render(request, 'extensions/extensionsList.html', {'extensionsList': data_list})
'''
利用confluent_kafka方式，将查到的数据传进Kafka里面
'''
def extensions_to_kafka(request):
    try:
        # 查询 BigData表内的数据
        data_list = BigDataModel.objects.all()
        # 连接Kafka
        producer = Producer(get_kafka_settings())
        # 讲BigData表内的数据的数据遍历出来，
        for data in data_list:
            # 自定义序列化这个模型实例 转成字典
            value = json.dumps({
                'id': data.id,
                'name': data.name,
                'data': base64.b64encode(data.data).decode('utf-8'),
                'version': data.version,
            })
            # 将数据转到 SSJOM_PROJ-LOGS_COLLECTION 这个topic里面去，注意confluent_kafka 需要在传数据是指向topic
            producer.produce(value=value,topic='SSJOM_PROJ-LOGS_COLLECTION')  # 假设json_data是您想要发送的数据字段
        producer.flush()  # 确保所有消息都被发送
        return JsonResponse({"status": "success", "message": "Messages sent to Kafka"})
    except Exception as e:
        logger.error(f"Failed to send messages to Kafka: {e}")
        return JsonResponse({"status": "error", "message": str(e)})

# 定义一个连接Kafka服务器的方法
def get_kafka_settings():
    kafka_settings = {
        'bootstrap.servers': settings.KAFKA_SETTINGS.get('bootstrap.servers'),
        'group.id': settings.KAFKA_SETTINGS.get('group.id'),
        'auto.offset.reset': settings.KAFKA_SETTINGS.get('auto.offset.reset'),
        # 'topic': settings.KAFKA_SETTINGS.get('topic'),
        'request.timeout.ms': settings.KAFKA_SETTINGS.get('request.timeout.ms')
    }
    return kafka_settings

# def extensions_to_kafka2(request):
#     try:
#         # 查询 BigData表内的数据
#         data_list = BigDataModel.objects.all()
#         # 获取 Kafka 配置
#         kafka_settings = get_kafka_settings()
#         # 连接 Kafka
#         producer = KafkaProducer(
#             bootstrap_servers=kafka_settings['bootstrap.servers'],
#             value_serializer=lambda v: json.dumps(v).encode('utf-8')  # 指定值的序列化方式
#         )
#         for data in data_list:
#             # 自定义序列化这个模型实例 转成字典
#             value = {
#                 'id': data.id,
#                 'name': data.name,
#                 'data': base64.b64encode(data.data).decode('utf-8'),  # 将二进制数据编码为 Base64 字符串
#                 'version': data.version,
#             }
#             # 将数据发送到指定的 topic
#             producer.send(kafka_settings['topic'], value=value),kafka_settings['request.timeout.ms']
#         producer.flush()  # 确保所有消息都被发送
#         return JsonResponse({"status": "success", "message": "Messages sent to Kafka"})
#     except Exception as e:
#         logger.error(f"Failed to send messages to Kafka: {e}")
#         return JsonResponse({"status": "error", "message": str(e)})