package com.datareport.task;

import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;

import java.sql.*;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * 数据抽取Flink任务
 */
@Slf4j
public class ExtractTask {

    public static void main(String[] args) throws Exception {
        // 解析参数
        final ParameterTool params = ParameterTool.fromArgs(args);
        String configJson = params.get("config");
        boolean isFullExtract = params.getBoolean("isFullExtract", true);
        
        if (configJson == null || configJson.isEmpty()) {
            throw new IllegalArgumentException("配置参数不能为空");
        }
        
        // 解析配置
        JSONObject config = JSON.parseObject(configJson);
        
        // 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(60000); // 每分钟做一次检查点
        
        // 创建数据源
        DataStream<String> sourceStream = env.addSource(new MySQLSourceFunction(config, isFullExtract))
                .name("MySQL-Source");
        
        // 数据转换
        DataStream<String> processedStream = sourceStream
                .map(new DataTransformMapper())
                .name("Data-Transform");
        
        // 数据输出
        processedStream.addSink(new KafkaSinkFunction())
                .name("Kafka-Sink");
        
        // 执行任务
        env.execute("Data-Extract-Task-" + config.getString("configId"));
    }
    
    /**
     * MySQL数据源函数
     */
    public static class MySQLSourceFunction implements SourceFunction<String> {
        private final JSONObject config;
        private final boolean isFullExtract;
        private volatile boolean isRunning = true;
        
        public MySQLSourceFunction(JSONObject config, boolean isFullExtract) {
            this.config = config;
            this.isFullExtract = isFullExtract;
        }
        
        @Override
        public void run(SourceContext<String> ctx) throws Exception {
            // 解析数据源配置
            JSONObject sourceConfig = JSON.parseObject(config.getString("sourceConfig"));
            String url = sourceConfig.getString("url");
            String username = sourceConfig.getString("username");
            String password = sourceConfig.getString("password");
            String table = config.getString("extractTable");
            String fields = config.getString("extractFields");
            String condition = config.getString("extractCondition");
            String incrementField = config.getString("incrementField");
            
            Connection conn = null;
            Statement stmt = null;
            ResultSet rs = null;
            
            try {
                // 建立数据库连接
                conn = DriverManager.getConnection(url, username, password);
                stmt = conn.createStatement();
                
                // 构建SQL
                StringBuilder sql = new StringBuilder();
                sql.append("SELECT ").append(fields).append(" FROM ").append(table);
                
                // 增量抽取条件
                if (!isFullExtract && incrementField != null && !incrementField.isEmpty()) {
                    if (condition != null && !condition.isEmpty()) {
                        sql.append(" WHERE ").append(condition).append(" AND ");
                    } else {
                        sql.append(" WHERE ");
                    }
                    
                    // 获取上次抽取的最大值
                    String lastValue = getLastExtractValue(incrementField, table);
                    sql.append(incrementField).append(" > '").append(lastValue).append("'");
                } else if (condition != null && !condition.isEmpty()) {
                    sql.append(" WHERE ").append(condition);
                }
                
                // 排序
                if (incrementField != null && !incrementField.isEmpty()) {
                    sql.append(" ORDER BY ").append(incrementField);
                }
                
                log.info("执行SQL: {}", sql);
                
                // 执行查询
                rs = stmt.executeQuery(sql.toString());
                ResultSetMetaData metaData = rs.getMetaData();
                int columnCount = metaData.getColumnCount();
                
                // 处理结果
                while (isRunning && rs.next()) {
                    Map<String, Object> row = new HashMap<>();
                    for (int i = 1; i <= columnCount; i++) {
                        row.put(metaData.getColumnName(i), rs.getObject(i));
                    }
                    
                    // 输出数据
                    ctx.collect(JSON.toJSONString(row));
                    
                    // 更新最大值
                    if (!isFullExtract && incrementField != null && !incrementField.isEmpty()) {
                        updateLastExtractValue(incrementField, table, rs.getString(incrementField));
                    }
                }
            } finally {
                // 关闭资源
                if (rs != null) {
                    rs.close();
                }
                if (stmt != null) {
                    stmt.close();
                }
                if (conn != null) {
                    conn.close();
                }
            }
        }
        
        @Override
        public void cancel() {
            isRunning = false;
        }
        
        /**
         * 获取上次抽取的最大值
         */
        private String getLastExtractValue(String incrementField, String table) {
            // 实际项目中应该从Redis或数据库中获取
            return "1970-01-01 00:00:00";
        }
        
        /**
         * 更新最大值
         */
        private void updateLastExtractValue(String incrementField, String table, String value) {
            // 实际项目中应该更新到Redis或数据库中
        }
    }
    
    /**
     * 数据转换Mapper
     */
    public static class DataTransformMapper implements MapFunction<String, String> {
        @Override
        public String map(String value) throws Exception {
            // 实际项目中可以进行数据转换、清洗等操作
            return value;
        }
    }
    
    /**
     * Kafka输出Sink
     */
    public static class KafkaSinkFunction implements SinkFunction<String> {
        @Override
        public void invoke(String value, Context context) throws Exception {
            // 实际项目中应该输出到Kafka
            log.info("输出数据: {}", value);
        }
    }
}