package com.spt.msk;

import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.RequestHandler;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.spt.msk.util.ConfigLoader;

public class MskKafkaLambda implements RequestHandler<Map<String, Object>, String> {

    // 初始化配置加载器
    private static final ConfigLoader config = ConfigLoader.getInstance();
    private static final ObjectMapper objectMapper = new ObjectMapper();

    // Kafka Topic（也可以动态传入）
    private static final String TOPIC_NAME = config.get("kafka.topic");

    // 加载 Kafka Producer 配置
    private static final Properties kafkaProps = loadKafkaProperties();

    public static void main(String[] args) {
        // 可用于本地测试
    }

    @SuppressWarnings("unchecked")
	@Override
    public String handleRequest(Map<String, Object> event, Context context) {
        context.getLogger().log("Received eventMap: " + event);
        event.put("sign", System.currentTimeMillis()+":"+UUID.randomUUID().toString());
        event.put("lambdaCurrentTime", System.currentTimeMillis());
        String messageJson = "";
        try {
            messageJson = objectMapper.writeValueAsString(event);
        } catch (Exception e) {
            context.getLogger().log("JSON serialization failed: " + e.getMessage());
            messageJson = event.toString();
        }

        context.getLogger().log("Received event JSON: " + messageJson);

        // 提取 IoT Topic
        String iotTopic = (String) event.get("topic");
        if (iotTopic == null && event.containsKey("records")) {
            Map<String, List<Map<String, Object>>> records = (Map<String, List<Map<String, Object>>>) event.get("records");
            // 假设第一个记录包含Topic信息
            if (!records.isEmpty()) {
                List<Map<String, Object>> firstRecordList = records.values().iterator().next();
                if (!firstRecordList.isEmpty()) {
                    Map<String, Object> firstRecord = firstRecordList.get(0);
                    iotTopic = (String) firstRecord.get("topic");
                }
            }
        }

        context.getLogger().log("Received topic: " + iotTopic);
        
        context.getLogger().log("Received kafkaProps JSON: " + kafkaProps);

        try (KafkaProducer<String, String> producer = new KafkaProducer<>(kafkaProps)) {
            context.getLogger().log("Sending message to Kafka topic...\n");

            ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC_NAME, iotTopic, messageJson);
            RecordMetadata metadata = producer.send(record).get();

            context.getLogger().log("Message sent successfully:\n");
            context.getLogger().log("Topic: " + metadata.topic() + "\n");
            context.getLogger().log("Partition: " + metadata.partition() + "\n");
            context.getLogger().log("Offset: " + metadata.offset() + "\n");
            context.getLogger().log("Timestamp: " + metadata.timestamp() + "\n");

        } catch (Exception e) {
            context.getLogger().log("Kafka send failed: " + e.getMessage() + "\n");
            return "ERROR";
        }

        return "Message sent to Kafka: " + messageJson;
    }

    // 从配置中加载 Kafka Producer 属性
    private static Properties loadKafkaProperties() {
        Properties props = new Properties();

        props.put("bootstrap.servers", config.get("kafka.bootstrap.servers"));
        props.put("security.protocol", config.get("kafka.security.protocol"));
        props.put("sasl.mechanism", config.get("kafka.sasl.mechanism"));
        props.put("sasl.jaas.config", config.get("kafka.sasl.jaas.config"));
        props.put("sasl.client.callback.handler.class", config.get("kafka.sasl.client.callback.handler.class"));
        props.put("key.serializer", config.get("kafka.key.serializer"));
        props.put("value.serializer", config.get("kafka.value.serializer"));
        props.put("aws.accessKeyId", config.get("aws.access.key"));
        props.put("aws.secretKey", config.get("aws.secret.key"));

        // 设置 AWS 环境变量（可选）
        System.setProperty("aws.accessKeyId", config.get("aws.access.key"));
        System.setProperty("aws.secretKey", config.get("aws.secret.key"));

        return props;
    }
}