package com.meiya.flink.util;

import java.time.Duration;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import java.util.Set;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.PrintSinkFunction;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.meiya.flink.config.AppConfig;
import com.meiya.flink.mock.AccountDataGenerator;
import com.meiya.flink.mock.TransactionDataGenerator;

/**
 * Kafka测试数据工具类，用于测试Kafka连接和数据
 */
public class KafkaTestUtil {
    private static final Logger logger = LoggerFactory.getLogger(KafkaTestUtil.class);

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 添加数据源
        DataStream<String> source = env.fromData("Hello", "Flink", "World");

        // 添加转换操作
        DataStream<String> transformed = source.map(String::toUpperCase);

        // 添加数据接收器
        transformed.addSink(new PrintSinkFunction<>());

        // 执行作业
        env.execute("Simple Flink Job");
    }
    /**
     * 向Kafka发送测试数据
     */
    public static void sendTestDataToKafka() {
        logger.info("开始向Kafka发送测试数据...");
        
        try {
            // 发送账户数据
            int numAccounts = 10; // 生成10个账户
            Map<String, AccountDataGenerator.AccountInfo> accountMap = 
                AccountDataGenerator.sendAccountTestData(AppConfig.getKafkaBrokers(), "account-data", numAccounts);
            
            // 发送交易数据
            int numTransactions = 100; // 生成100条交易记录
            TransactionDataGenerator.sendTransactionTestData(AppConfig.getKafkaBrokers(), "transaction-data", numTransactions, accountMap);
            
            // 发送资金流水数据 - 使用相同的账户信息
            int numFundFlows = 50; // 生成50条资金流水记录
            logger.info("开始生成并发送资金流水数据...");
            TransactionDataGenerator.sendTransactionTestData(AppConfig.getKafkaBrokers(), "fund-flow-data", numFundFlows, accountMap);
            logger.info("资金流水数据发送完成");
            
            logger.info("测试数据发送完成，共发送 {} 个账户, {} 条交易记录, {} 条资金流水记录", 
                    numAccounts, numTransactions, numFundFlows);
            
            // 打印一些账户信息，用于验证关联关系
            logger.info("账户信息示例（用于验证关联关系）:");
            int count = 0;
            for (Map.Entry<String, AccountDataGenerator.AccountInfo> entry : accountMap.entrySet()) {
                if (count++ < 3) { // 只打印前3个账户
                    AccountDataGenerator.AccountInfo info = entry.getValue();
                    logger.info("账户: {}, 名称: {}, 证件号: {}, 银行: {}, 线索ID: {}, 插入时间: {}", 
                            info.bankCard, info.accountName, info.idNumber, info.bankName, info.clueId, info.crrq);
                }
            }
        } catch (Exception e) {
            logger.error("发送测试数据失败: {}", e.getMessage(), e);
        }
    }

    /**
     * 检查Kafka主题中的数据
     */
    public static void checkKafkaTopicData() {
        logger.info("开始直接检查Kafka主题中的数据...");
        Properties props = AppConfig.getKafkaConsumerProperties();
        props.put("group.id", "kafka-checker-" + System.currentTimeMillis());
        
        try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props)) {
            
            // 列出所有主题
            Set<String> topics = consumer.listTopics().keySet();
            logger.info("Kafka中的所有主题: {}", topics);
            
            // 检查transaction-data主题是否存在
            if (topics.contains(AppConfig.getKafkaTransactionTopic())) {
                logger.info("找到{}主题，开始检查数据...", AppConfig.getKafkaTransactionTopic());
                consumer.subscribe(Collections.singletonList(AppConfig.getKafkaTransactionTopic()));
                
                // 尝试获取记录，设置超时为10秒
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(10));
                
                if (records.isEmpty()) {
                    logger.warn("{}主题中没有数据", AppConfig.getKafkaTransactionTopic());
                } else {
                    logger.info("成功从{}主题中获取到 {} 条记录", AppConfig.getKafkaTransactionTopic(), records.count());
                    int count = 0;
                    for (org.apache.kafka.clients.consumer.ConsumerRecord<String, String> record : records) {
                        if (count++ < 5) { // 只打印前5条记录
                            logger.info("记录: key={}, value={}", record.key(), record.value());
                        }
                    }
                }
            } else {
                logger.warn("Kafka中不存在{}主题", AppConfig.getKafkaTransactionTopic());
            }
        } catch (Exception e) {
            logger.error("检查Kafka主题数据时出错: {}", e.getMessage(), e);
        }
    }

    /**
     * 直接从Kafka读取数据
     */
    public static void directReadFromKafka() {
        logger.info("尝试直接从Kafka读取数据...");
        try {
            // 使用原生Kafka消费者API直接读取数据
            Properties props = AppConfig.getKafkaConsumerProperties();
            props.put("group.id", "direct-reader-" + System.currentTimeMillis());
            props.put("max.poll.records", "5"); // 只获取5条记录
            
            try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props)) {
                
                consumer.subscribe(Collections.singletonList(AppConfig.getKafkaTransactionTopic()));
                
                // 尝试获取记录，设置超时为5秒
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5));
                
                if (records.isEmpty()) {
                    logger.warn("直接读取：{}主题中没有数据", AppConfig.getKafkaTransactionTopic());
                } else {
                    logger.info("直接读取：成功从{}主题中获取到 {} 条记录", AppConfig.getKafkaTransactionTopic(), records.count());
                    for (org.apache.kafka.clients.consumer.ConsumerRecord<String, String> record : records) {
                        logger.info("直接读取记录: key={}, value={}", record.key(), record.value());
                    }
                }
            }
        } catch (Exception e) {
            logger.error("直接从Kafka读取数据失败: {}", e.getMessage(), e);
        }
    }
} 