package com.bujunjie.study.realtime.dwd.db.split.app;

import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import com.alibaba.fastjson.parser.Feature;
import com.bujunjie.study.realtime.common.base.BaseApp;
import com.bujunjie.study.realtime.common.constant.FlinkConstant;
import com.bujunjie.study.realtime.common.model.sink.PageInfoModel;
import com.bujunjie.study.realtime.common.model.vo.CommonInfoVO;
import com.bujunjie.study.realtime.common.model.vo.PageInfoVO;
import com.bujunjie.study.realtime.common.util.FlinkSinkUtil;
import com.bujunjie.study.realtime.dwd.db.split.model.sink.*;
import com.bujunjie.study.realtime.dwd.db.split.model.vo.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * <p>日志分流</p>
 *
 * @author bu.junjie
 * @version 1.0.0
 * @createTime 2025/9/15 13:46
 */
@Slf4j
public class DwdBaseLog extends BaseApp {


    /**
     * 开始
     */
    private static final String START = "start";
    /**
     * 错误
     */
    private static final String ERR = "err";
    /**
     * 展示
     */
    private static final String DISPLAY = "display";
    /**
     * 动作
     */
    private static final String ACTION = "action";
    /**
     * 页面
     */
    private static final String PAGE = "page";

    public static void main(String[] args) throws Exception {
        new DwdBaseLog().start(10011, 4, "dwd_base_log", FlinkConstant.TOPIC_LOG);
    }


    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> kafkaStrDS) {
        //  进行 etl 数据处理
        SingleOutputStreamOperator<BaseLogVO> filterDS = this.etl(kafkaStrDS);
        // 对有错误的数据进行修复
        SingleOutputStreamOperator<BaseLogVO> fixedDS = this.fixedNewAndOld(filterDS);
        //  分流
        Map<String, DataStream<String>> streamMap = this.splitStream(fixedDS);
        // 将数据写入 kafka 中
        this.writeToKafka(streamMap);
    }


    /**
     * <p>将数据写入不同的kafka topic中</p>
     *
     * @param streamMap 流 Map 数据
     * @return
     * @author bu.junjie
     * @date 2025/9/16 11:10
     */
    private void writeToKafka(Map<String, DataStream<String>> streamMap) {
        streamMap
                .get(PAGE)
                .sinkTo(FlinkSinkUtil.getKafkaSink(FlinkConstant.TOPIC_DWD_TRAFFIC_PAGE));

        streamMap
                .get(ERR)
                .sinkTo(FlinkSinkUtil.getKafkaSink(FlinkConstant.TOPIC_DWD_TRAFFIC_ERR));

        streamMap
                .get(START)
                .sinkTo(FlinkSinkUtil.getKafkaSink(FlinkConstant.TOPIC_DWD_TRAFFIC_START));

        streamMap
                .get(ACTION)
                .sinkTo(FlinkSinkUtil.getKafkaSink(FlinkConstant.TOPIC_DWD_TRAFFIC_ACTION));

        streamMap
                .get(DISPLAY)
                .sinkTo(FlinkSinkUtil.getKafkaSink(FlinkConstant.TOPIC_DWD_TRAFFIC_DISPLAY));

    }

    /**
     * <p>对原始流进行分流操作</p>
     *
     * @param fixedDS 对修复的流进行分流操作
     * @return 返回各个模型的流数据
     * @author bu.junjie
     * @date 2025/9/16 10:27
     */
    private Map<String, DataStream<String>> splitStream(SingleOutputStreamOperator<BaseLogVO> fixedDS) {
        OutputTag<String> errTag = new OutputTag<String>("errTag") {
        };
        OutputTag<String> startTag = new OutputTag<String>("startTag") {
        };
        OutputTag<String> displayTag = new OutputTag<String>("displayTag") {
        };
        OutputTag<String> actionTag = new OutputTag<String>("actionTag") {
        };
        OutputTag<String> pageTag = new OutputTag<String>("pageTag") {
        };

        SingleOutputStreamOperator<String> splitDS = fixedDS.process(new ProcessFunction<BaseLogVO, String>() {
            @Override
            public void processElement(BaseLogVO baseLog, ProcessFunction<BaseLogVO, String>.Context ctx, Collector<String> out) throws Exception {
                ErrorInfoModel err = baseLog.getErr();
                //  错误日志
                if (ObjectUtil.isNotNull(err)) {
                    ctx.output(errTag, JSON.toJSONString(err));
                }
                //  启动日志
                StartInfoModel start = baseLog.getStart();
                if (ObjectUtil.isNotNull(start)) {
                    ctx.output(startTag, JSON.toJSONString(start));
                } else {
                    CommonInfoVO common = baseLog.getCommon();
                    PageInfoVO page = baseLog.getPage();
                    Long ts = baseLog.getTs();
                    //  曝光日志
                    List<DisPlayItemVO> displays = baseLog.getDisplays();
                    if (CollectionUtil.isNotEmpty(displays)) {
                        displays.forEach(disPlayItem -> {
                            DisPlayInfoModel displayInfo = DisPlayInfoModel.builder()
                                    .common(common)
                                    .page(page)
                                    .display(disPlayItem)
                                    .ts(ts)
                                    .build();
                            ctx.output(displayTag, JSON.toJSONString(displayInfo));
                        });
                    }
                    //  动作日志
                    List<ActionItemVO> actions = baseLog.getActions();
                    if (CollectionUtil.isNotEmpty(actions)) {
                        actions.forEach(actionItem -> {
                            ActionInfoModel actionInfo = ActionInfoModel.builder()
                                    .common(common)
                                    .page(page)
                                    .action(actionItem)
                                    .ts(ts)
                                    .build();
                            ctx.output(actionTag, JSON.toJSONString(actionInfo));
                        });
                    }

                    PageInfoModel pageInfoModel = PageInfoModel.builder()
                            .page(page)
                            .common(common)
                            .ts(ts)
                            .build();
                    ctx.output(pageTag, JSON.toJSONString(pageInfoModel));
                }
            }
        });

        SideOutputDataStream<String> errDS = splitDS.getSideOutput(errTag);
        SideOutputDataStream<String> startDS = splitDS.getSideOutput(startTag);
        SideOutputDataStream<String> displayDS = splitDS.getSideOutput(displayTag);
        SideOutputDataStream<String> actionDS = splitDS.getSideOutput(actionTag);
        SideOutputDataStream<String> pageDS = splitDS.getSideOutput(pageTag);

        pageDS.print("页面:");
        errDS.print("错误:");
        startDS.print("启动:");
        displayDS.print("曝光:");
        actionDS.print("动作:");
        Map<String, DataStream<String>> streamMap = new HashMap<>();
        streamMap.put(ERR, errDS);
        streamMap.put(START, startDS);
        streamMap.put(DISPLAY, displayDS);
        streamMap.put(ACTION, actionDS);
        streamMap.put(PAGE, pageDS);
        return streamMap;
    }

    /**
     * <p>修复新老数据</p>
     *
     * @param filterDS 过滤流数据
     * @return 返回修正后的数据
     * @author bu.junjie
     * @date 2025/9/15 14:58
     */
    public SingleOutputStreamOperator<BaseLogVO> fixedNewAndOld(SingleOutputStreamOperator<BaseLogVO> filterDS) {
        // 按照设备 id 进行分组
        KeyedStream<BaseLogVO, String> midKeyDS = filterDS.keyBy(baseLog -> baseLog.getCommon().getMid());
        //  修复新老数据值
        return midKeyDS.map(new RichMapFunction<BaseLogVO, BaseLogVO>() {
            /**
             * 上次访问的时间状态
             */
            private ValueState<Long> lastVisitDateState;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<Long> valueStateDescriptor = new ValueStateDescriptor<>("lastVisitDateState", Long.class);
                valueStateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(Time.seconds(10))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                        .build());
                lastVisitDateState = getRuntimeContext().getState(valueStateDescriptor);
            }

            @Override
            public BaseLogVO map(BaseLogVO baseLog) throws Exception {
                // 是否为新的用户
                String isNew = baseLog.getCommon().getIsNew();
                // 上一次访问的时间，毫秒级别的时间戳
                Long ts = lastVisitDateState.value();
                Long logTs = baseLog.getTs();
                if (FlinkConstant.ONE.equals(isNew)) {

                    if (ObjectUtil.isNull(ts)) {
                        lastVisitDateState.update(logTs);
                    } else {
                        if (!logTs.equals(ts)) {
                            //  设置成老用户
                            baseLog.getCommon().setIsNew(FlinkConstant.ZERO);
                        }
                    }
                } else {
                    //  代表不是新用户
                    if (ObjectUtil.isNull(ts)) {
                        baseLog.getCommon().setIsNew(FlinkConstant.ONE);
                        lastVisitDateState.update(logTs - 24 * 60 * 60 * 1000);
                    }
                }
                return baseLog;
            }
        });
    }


    /**
     * <p>对 kafka 流数据进行解析</p>
     *
     * @param kafkaStrDS kafka 流数据
     * @return 返回输出流
     * @author bu.junjie
     * @date 2025/9/15 13:56
     */
    private SingleOutputStreamOperator<BaseLogVO> etl(DataStreamSource<String> kafkaStrDS) {
        OutputTag<String> dirtyTag = new OutputTag<String>("dirty") {
        };
        SingleOutputStreamOperator<BaseLogVO> maxwellDS = kafkaStrDS.process(new ProcessFunction<String, BaseLogVO>() {
            @Override
            public void processElement(String value, ProcessFunction<String, BaseLogVO>.Context ctx, Collector<BaseLogVO> out) throws Exception {
                try {
                    BaseLogVO baseLog = JSON.parseObject(value, new TypeReference<BaseLogVO>() {
                    }, Feature.SupportNonPublicField, Feature.AllowUnQuotedFieldNames);
                    out.collect(baseLog);
                } catch (Exception e) {
                    log.error("value=【{}】 转换异常", value);
                    ctx.output(dirtyTag, value);
                }
            }
        });
        SideOutputDataStream<String> dirtyDS = maxwellDS.getSideOutput(dirtyTag);
        KafkaSink<String> kafkaSink = FlinkSinkUtil.getKafkaSink("dirty_data");
        dirtyDS.sinkTo(kafkaSink);
        return maxwellDS;
    }
}
