package com.datareport.task;

import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

/**
 * 统计计算任务
 */
@Slf4j
public class ComputeTask {
    
    public static void main(String[] args) throws Exception {
        // 解析参数
        String ruleId = args.length > 0 ? args[0] : null;
        if (ruleId == null) {
            log.error("规则ID不能为空");
            return;
        }
        
        // 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        
        // 配置检查点
        env.enableCheckpointing(60000); // 每分钟做一次检查点
        env.getCheckpointConfig().setCheckpointTimeout(30000);
        
        // 读取数据源
        DataStream<String> sourceStream = env.addSource(new KafkaSourceFunction(ruleId));
        
        // 数据处理转换
        DataStream<StatisticResult> resultStream = sourceStream
            .map(new StatisticMapper())
            .keyBy(new RuleKeySelector())
            .window(TumblingProcessingTimeWindows.of(Time.seconds(10)))
            .aggregate(new StatisticAggregateFunction());
        
        // 结果输出
        resultStream.addSink(new StatisticSink());
        
        // 执行任务
        env.execute("Statistics Computation Job - Rule: " + ruleId);
    }
    
    /**
     * Kafka数据源
     */
    public static class KafkaSourceFunction implements SourceFunction<String> {
        private final String ruleId;
        private boolean isRunning = true;
        
        public KafkaSourceFunction(String ruleId) {
            this.ruleId = ruleId;
        }
        
        @Override
        public void run(SourceContext<String> ctx) throws Exception {
            // 实际项目中应该从Kafka消费数据
            // 这里简化为模拟数据
            while (isRunning) {
                // 模拟数据
                Map<String, Object> data = new HashMap<>();
                data.put("ruleId", ruleId);
                data.put("timestamp", System.currentTimeMillis());
                data.put("value", Math.random() * 100);
                
                ctx.collect(JSON.toJSONString(data));
                
                // 控制生成速率
                TimeUnit.SECONDS.sleep(1);
            }
        }
        
        @Override
        public void cancel() {
            isRunning = false;
        }
    }
    
    /**
     * 统计数据映射
     */
    public static class StatisticMapper implements MapFunction<String, StatisticRecord> {
        @Override
        public StatisticRecord map(String value) throws Exception {
            JSONObject json = JSON.parseObject(value);
            
            StatisticRecord record = new StatisticRecord();
            record.setRuleId(json.getString("ruleId"));
            record.setTimestamp(json.getLong("timestamp"));
            record.setValue(json.getDouble("value"));
            
            return record;
        }
    }
    
    /**
     * 规则键选择器
     */
    public static class RuleKeySelector implements KeySelector<StatisticRecord, String> {
        @Override
        public String getKey(StatisticRecord record) throws Exception {
            return record.getRuleId();
        }
    }
    
    /**
     * 统计聚合函数
     */
    public static class StatisticAggregateFunction implements org.apache.flink.api.common.functions.AggregateFunction<StatisticRecord, StatisticAccumulator, StatisticResult> {
        @Override
        public StatisticAccumulator createAccumulator() {
            return new StatisticAccumulator();
        }
        
        @Override
        public StatisticAccumulator add(StatisticRecord value, StatisticAccumulator acc) {
            // 根据规则配置执行聚合计算
            acc.update(value);
            return acc;
        }
        
        @Override
        public StatisticResult getResult(StatisticAccumulator acc) {
            return acc.toResult();
        }
        
        @Override
        public StatisticAccumulator merge(StatisticAccumulator a, StatisticAccumulator b) {
            return a.merge(b);
        }
    }
    
    /**
     * 统计记录
     */
    public static class StatisticRecord {
        private String ruleId;
        private Long timestamp;
        private Double value;
        
        public String getRuleId() {
            return ruleId;
        }
        
        public void setRuleId(String ruleId) {
            this.ruleId = ruleId;
        }
        
        public Long getTimestamp() {
            return timestamp;
        }
        
        public void setTimestamp(Long timestamp) {
            this.timestamp = timestamp;
        }
        
        public Double getValue() {
            return value;
        }
        
        public void setValue(Double value) {
            this.value = value;
        }
    }
    
    /**
     * 统计累加器
     */
    public static class StatisticAccumulator {
        private String ruleId;
        private double sum = 0;
        private double min = Double.MAX_VALUE;
        private double max = Double.MIN_VALUE;
        private long count = 0;
        
        public void update(StatisticRecord record) {
            this.ruleId = record.getRuleId();
            this.sum += record.getValue();
            this.min = Math.min(this.min, record.getValue());
            this.max = Math.max(this.max, record.getValue());
            this.count++;
        }
        
        public StatisticAccumulator merge(StatisticAccumulator other) {
            this.sum += other.sum;
            this.min = Math.min(this.min, other.min);
            this.max = Math.max(this.max, other.max);
            this.count += other.count;
            return this;
        }
        
        public StatisticResult toResult() {
            StatisticResult result = new StatisticResult();
            result.setRuleId(this.ruleId);
            result.setSum(this.sum);
            result.setMin(this.min);
            result.setMax(this.max);
            result.setAvg(this.count > 0 ? this.sum / this.count : 0);
            result.setCount(this.count);
            result.setTimestamp(System.currentTimeMillis());
            return result;
        }
    }
    
    /**
     * 统计结果
     */
    public static class StatisticResult {
        private String ruleId;
        private double sum;
        private double min;
        private double max;
        private double avg;
        private long count;
        private long timestamp;
        
        public String getRuleId() {
            return ruleId;
        }
        
        public void setRuleId(String ruleId) {
            this.ruleId = ruleId;
        }
        
        public double getSum() {
            return sum;
        }
        
        public void setSum(double sum) {
            this.sum = sum;
        }
        
        public double getMin() {
            return min;
        }
        
        public void setMin(double min) {
            this.min = min;
        }
        
        public double getMax() {
            return max;
        }
        
        public void setMax(double max) {
            this.max = max;
        }
        
        public double getAvg() {
            return avg;
        }
        
        public void setAvg(double avg) {
            this.avg = avg;
        }
        
        public long getCount() {
            return count;
        }
        
        public void setCount(long count) {
            this.count = count;
        }
        
        public long getTimestamp() {
            return timestamp;
        }
        
        public void setTimestamp(long timestamp) {
            this.timestamp = timestamp;
        }
    }
    
    /**
     * 统计结果输出
     */
    public static class StatisticSink implements SinkFunction<StatisticResult> {
        @Override
        public void invoke(StatisticResult result, Context context) {
            try {
                // 实际项目中应该写入数据库
                log.info("统计结果: ruleId={}, sum={}, avg={}, min={}, max={}, count={}",
                        result.getRuleId(), result.getSum(), result.getAvg(),
                        result.getMin(), result.getMax(), result.getCount());
                
                // 这里简化为打印日志，实际项目中应该写入数据库
                saveResultToDatabase(result);
            } catch (Exception e) {
                log.error("保存统计结果失败", e);
            }
        }
        
        private void saveResultToDatabase(StatisticResult result) {
            // 实际项目中的数据库保存逻辑
            // 此处省略具体实现
        }
    }
}