package com.flink.hbase.elasticsearch2kafka;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import com.flink.hbase.kafka2elasticsearch.UserDocument;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.Map;

/**
 * 文档到 Kafka 消息转换器
 * 将 UserDocument 对象转换为 Kafka 消息格式（Key-Value 对）
 */
public class DocumentToKafkaConverter implements MapFunction<UserDocument, Tuple2<String, String>>, Serializable {
    private static final Logger LOG = LoggerFactory.getLogger(DocumentToKafkaConverter.class);
    private static final long serialVersionUID = 1L;
    
    private final String messageFormat;
    private final boolean includeMetadata;
    private final String timestampField;
    
    private transient ObjectMapper objectMapper;
    private transient DateTimeFormatter dateTimeFormatter;
    
    public DocumentToKafkaConverter(String messageFormat, boolean includeMetadata, String timestampField) {
        this.messageFormat = messageFormat != null ? messageFormat : "json";
        this.includeMetadata = includeMetadata;
        this.timestampField = timestampField != null ? timestampField : "update_time";
    }
    
    @Override
    public Tuple2<String, String> map(UserDocument document) throws Exception {
        if (objectMapper == null) {
            initializeMapper();
        }
        
        if (document == null || !document.isValid()) {
            LOG.warn("Invalid document, skipping: {}", document);
            return null;
        }
        
        try {
            String key = generateMessageKey(document);
            String value = generateMessageValue(document);
            
            return Tuple2.of(key, value);
            
        } catch (Exception e) {
            LOG.error("Error converting document to Kafka message: {}", document, e);
            throw e;
        }
    }
    
    /**
     * 初始化 ObjectMapper
     */
    private void initializeMapper() {
        objectMapper = new ObjectMapper();
        objectMapper.registerModule(new JavaTimeModule());
        dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
    }
    
    /**
     * 生成消息键
     */
    private String generateMessageKey(UserDocument document) {
        // 使用用户ID作为消息键，确保同一用户的消息发送到同一分区
        return document.getUserId();
    }
    
    /**
     * 生成消息值
     */
    private String generateMessageValue(UserDocument document) throws Exception {
        switch (messageFormat.toLowerCase()) {
            case "json":
                return generateJsonMessage(document);
            case "avro":
                return generateAvroMessage(document);
            case "custom":
                return generateCustomMessage(document);
            default:
                return generateJsonMessage(document);
        }
    }
    
    /**
     * 生成 JSON 格式消息
     */
    private String generateJsonMessage(UserDocument document) throws Exception {
        if (includeMetadata) {
            Map<String, Object> messageWithMetadata = new HashMap<>();
            messageWithMetadata.put("data", document);
            messageWithMetadata.put("metadata", createMetadata(document));
            return objectMapper.writeValueAsString(messageWithMetadata);
        } else {
            return objectMapper.writeValueAsString(document);
        }
    }
    
    /**
     * 生成 Avro 格式消息（简化版，实际使用需要 Avro Schema）
     */
    private String generateAvroMessage(UserDocument document) throws Exception {
        // 这里简化处理，实际应该使用 Avro 序列化
        Map<String, Object> avroData = new HashMap<>();
        avroData.put("schema", "user_document_schema_v1");
        avroData.put("payload", document);
        return objectMapper.writeValueAsString(avroData);
    }
    
    /**
     * 生成自定义格式消息
     */
    private String generateCustomMessage(UserDocument document) throws Exception {
        // 自定义格式：简化的键值对格式
        StringBuilder sb = new StringBuilder();
        sb.append("user_id=").append(document.getUserId()).append("|");
        sb.append("name=").append(document.getName()).append("|");
        sb.append("age=").append(document.getAge()).append("|");
        sb.append("email=").append(document.getEmail()).append("|");
        sb.append("status=").append(document.getStatus()).append("|");
        if (document.getUpdateTime() != null) {
            sb.append("update_time=").append(document.getUpdateTime().format(dateTimeFormatter));
        }
        return sb.toString();
    }
    
    /**
     * 创建元数据信息
     */
    private Map<String, Object> createMetadata(UserDocument document) {
        Map<String, Object> metadata = new HashMap<>();
        metadata.put("source", "elasticsearch");
        metadata.put("timestamp", System.currentTimeMillis());
        metadata.put("version", "1.0");
        metadata.put("format", messageFormat);
        
        if (document.getUpdateTime() != null) {
            metadata.put("source_timestamp", document.getUpdateTime().format(dateTimeFormatter));
        }
        
        return metadata;
    }
    
    /**
     * 带数据增强的转换器
     */
    public static class EnhancedConverter extends DocumentToKafkaConverter {
        
        public EnhancedConverter(String messageFormat, boolean includeMetadata, String timestampField) {
            super(messageFormat, includeMetadata, timestampField);
        }
        
        @Override
        public Tuple2<String, String> map(UserDocument document) throws Exception {
            if (document == null || !document.isValid()) {
                return null;
            }
            
            // 数据增强处理
            UserDocument enhancedDocument = enhanceDocument(document);
            
            return super.map(enhancedDocument);
        }
        
        /**
         * 数据增强处理
         */
        private UserDocument enhanceDocument(UserDocument document) {
            UserDocument.Builder builder = UserDocument.builder()
                .userId(document.getUserId())
                .name(document.getName())
                .age(document.getAge())
                .gender(document.getGender())
                .email(document.getEmail())
                .phone(document.getPhone())
                .address(document.getAddress())
                .city(document.getCity())
                .country(document.getCountry())
                .registrationDate(document.getRegistrationDate())
                .lastLogin(document.getLastLogin())
                .status(document.getStatus())
                .tags(document.getTags())
                .score(document.getScore())
                .createTime(document.getCreateTime())
                .updateTime(document.getUpdateTime());
            
            // 数据增强逻辑
            UserDocument enhanced = builder.build();
            
            // 标准化性别字段
            if (enhanced.getGender() != null) {
                String gender = enhanced.getGender().toLowerCase();
                if (gender.contains("male") || gender.contains("男")) {
                    enhanced.setGender("male");
                } else if (gender.contains("female") || gender.contains("女")) {
                    enhanced.setGender("female");
                } else {
                    enhanced.setGender("unknown");
                }
            }
            
            // 设置默认评分
            if (enhanced.getScore() == null) {
                enhanced.setScore(50.0);
            }
            
            // 标准化状态
            if (enhanced.getStatus() != null) {
                String status = enhanced.getStatus().toLowerCase();
                if (status.contains("active") || status.contains("激活")) {
                    enhanced.setStatus("active");
                } else if (status.contains("inactive") || status.contains("未激活")) {
                    enhanced.setStatus("inactive");
                } else {
                    enhanced.setStatus("unknown");
                }
            }
            
            return enhanced;
        }
    }
    
    /**
     * 工厂方法创建标准转换器
     */
    public static DocumentToKafkaConverter create(String messageFormat, boolean includeMetadata, String timestampField) {
        return new DocumentToKafkaConverter(messageFormat, includeMetadata, timestampField);
    }
    
    /**
     * 工厂方法创建增强转换器
     */
    public static EnhancedConverter createEnhanced(String messageFormat, boolean includeMetadata, String timestampField) {
        return new EnhancedConverter(messageFormat, includeMetadata, timestampField);
    }
    
    /**
     * 工厂方法创建默认转换器
     */
    public static DocumentToKafkaConverter createDefault() {
        return new DocumentToKafkaConverter("json", true, "update_time");
    }
} 