package com.yl;

import cn.hutool.core.util.StrUtil;
import com.yl.constant.CdcType;
import com.yl.constant.Const;
import com.yl.entity.MultiDataEntity;
import com.yl.entity.PrimaryWarn;
import com.yl.entity.cdc.BaseTargetType;
import com.yl.entity.cdc.BatchIntegrity;
import com.yl.entity.cdc.MysqlCdcEntity;
import com.yl.flink.filter.*;
import com.yl.flink.processor.*;
import com.yl.flink.sink.CustomInfluxSink;
import com.yl.flink.sink.SinkTool;
import com.yl.flink.source.SourceTool;
import com.yl.util.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
import org.apache.flink.streaming.connectors.influxdb.InfluxDBConfig;
import org.apache.flink.streaming.connectors.influxdb.InfluxDBPoint;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.influxdb.InfluxDB;
import org.influxdb.dto.Query;
import org.influxdb.dto.QueryResult;

import java.util.List;
import java.util.Optional;

@Slf4j
public class Application {

    // 参数获取工具
    private ParameterTool params = null;
    // 流执行环境
    private StreamExecutionEnvironment env = null;
    // 配置广播流
    private BroadcastStream<MysqlCdcEntity> configStream = null;
    // 计算分流标签
    private List<OutputTag<MultiDataEntity>> calTags = null;

    public static void main(String[] args) {
        Application app = new Application();
        // 初始化配置
        app.init(args);
        // 执行流处理
        app.exec();
//        app.test();
    }

    private void test() {
        try {
            this.env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
//        InfluxDBConfig influxDBConfig = FlinkUtil.getInfluxDBConfig(this.params);
//        InfluxDB influxDB = InfluxUtil.connect(influxDBConfig);
//        String db = "db_common";
//        String typeTag = "bpbx";
//        String sql = StrUtil.format(Const.INFLUXDB_SQL_CQ, typeTag, db, typeTag, typeTag);
//        System.out.println(sql);
//        QueryResult query = influxDB.query(new Query(sql, db));
//        System.out.println(query.getResults());
//        influxDB.close();
    }

    /**
     * 初始化配置
     * 注意：执行顺序不能调换
     */
    private void init(String[] args) {
        try {
            // 初始化配置参数
            this.params = ParamUtil.getParameterTool(args);
            // 初始化流执行环境
            this.env = FlinkUtil.getStreamEnv(this.params);
            SingleOutputStreamOperator<MysqlCdcEntity> confStream = MysqlCdcUtil
                    // 获取配置流
                    .getConfigStream(this.env, this.params);
            // 初始化各类型统计每小时数据条数的连续查询
            confStream
                    // 过滤出测点类型配置流元素
                    .filter(new TypeFilter())
                    // 创建各类型的连续查询
                    .process(new InitCQFunc(this.params));
            // 初始化配置广播流
            this.configStream = confStream
                    // 广播配置流
                    .broadcast(FlinkUtil.getConfigDescriptor(Const.FLINK_CONFIG_CDC));
            // 初始化计算分流标签
            this.calTags = MysqlUtil.getCalTags(this.params);
        } catch (Exception e) {
            log.error("配置初始化失败！", e);
            e.printStackTrace();
        }
    }

    /**
     * 执行流计算
     */
    private void exec() {
        // 配置数据源
        DataStreamSource<String> streamSource = SourceTool.sourceStream(this.env, this.params);

        // 【第一部分】原始数据流
        SingleOutputStreamOperator<MultiDataEntity> rawStream = streamSource
                // 打印流元素
                .map(new PrintPayloadFunc())
                // 按照项目分组
                .keyBy(new ProjectSelector())
                // 合并配置流
                .connect(this.configStream)
                // 根据数据传输协议解析数据
                .process(new DataParseFunc());

        // 存储原始数据
        SinkTool.sinkRawStream(rawStream, this.params);

///**
        // 测点状态缓存redis
        rawStream
                // 只处理实时数据，延迟小于一个小时
                .filter(new DataFilter(false, 1))
                // 按照测点分组
                .keyBy(new TargetSelector())
                // 组织存储的key和value
                .process(new OnlineStatusMapFunc(this.params))
                // 使用redis-connector写入数据
                .addSink(SinkTool.customRedisSink(this.params));

        // 批次完整性判断-协议3
        rawStream
                // 过滤掉包信息为null的流元素，包含非协议3和协议3中同一个包中的流元素
                .filter(new NullPacketFilter())
                // 按照测点分组
                .keyBy(new TargetSelector())
                // 判断批次完整性
                .process(new IntegrityJudgeFunc())
                // 把批次完整性信息写入mysql
                .addSink(SinkTool.batchIntegrityMysqlSink(this.params));

        // 历史数据补传时，校正补传历史时间段统计的每小时数据量
        SingleOutputStreamOperator<Tuple4<String, String, Long, Integer>> countStream = rawStream
                // 只处理历史数据(延迟1小时以上)
                .filter(new DataFilter(true, 1))
                // 按照测点分组
                .keyBy(new TargetSelector())
                // 定时触发指定历史时间段数据条数重新统计
                .process(new HourCountFunc(this.params));

        countStream
                .map(new CountPointMapFunc())
                .addSink(new CustomInfluxSink(this.params));


        // 【第二部分】计算处理流
        SingleOutputStreamOperator<MultiDataEntity> tagStream = rawStream
                // 分流标签
                .process(new TagStreamFunc());

        // 合并所有测流
        DataStream<MultiDataEntity> calStream = rawStream;

        // 计算分流
        for (OutputTag<MultiDataEntity> calTag : calTags) {
            log.info("cal-tag: {}", calTag.getId());
            // 指标计算
            SingleOutputStreamOperator<MultiDataEntity> sideCalStream = FlinkUtil.calTypeQuotas(tagStream, calTag);
            // 存储计算数据
            SinkTool.sinkCalStream(sideCalStream, this.params);
            // 合并测流
            calStream = calStream.union(sideCalStream);
        }

///**

        // 【第三部分】告警处理流
        SingleOutputStreamOperator<PrimaryWarn> warnStream = calStream
                // 判断每个指标是否有告警产生
                .flatMap(new WarnJudgeFunc())
                // 打印告警信息
                .map(new PrintWarnFunc());

        // 缓存告警状态
        warnStream
                // 组织存储的key和value
                .map(new WarnStatusMapFunc(this.params.get(Const.APP_NAME)))
                // 使用redis-connector写入数据
                .addSink(SinkTool.customRedisSink(this.params));

        // 存储告警信息
        if (params.getBoolean(Const.WARN_SEND_MYSQL)) {
            warnStream
                    // 使用mysql-connector写入数据
                    .addSink(SinkTool.warnMysqlSink(this.params));
        }

        // 推送告警信息
        warnStream
                // 根据配置把告警信息推送到不同地方
                .process(new WarnSendFunc());

        //*/

        // 执行流处理
        try {
            this.env.execute(this.params.get(Const.APP_NAME));
        } catch (Exception e) {
            log.error("流任务执行失败！", e);
            // 异常时退出程序
            System.exit(1);
            e.printStackTrace();
        }
    }

}
