package net.bwie.dt.job.clear;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import utils.DorisUtil;
import utils.KafkaUtil;
import utils.MysqlCdcUtil;

public class ProductDataCleaning {
    public static void main(String[] args) throws Exception {
        // 1.执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 2.数据源-source
        DataStream<String> mysqlStream = MysqlCdcUtil.cdcMysqlDeser(env, "dt_realtime_ranking","dwd_product_table_info");
//        mysqlStream.print("mysql");

        // 3.数据转换-transformation
        DataStream<String> processMysqlStream = processMysql(mysqlStream);

        processMysqlStream.print("processMysql");
        // 4.数据输出-sink
        KafkaUtil.producerKafka(processMysqlStream,"dwd_new_product_table");

        //5.触发执行-execute
        env.execute("ProductDataCleaning");
    }

    private static DataStream<String> processMysql(DataStream<String> stream) {
        // 1.数据清洗
        DataStream<String> cleanStream = dataClean(stream);
        // 2.数据过滤
        DataStream<String> filterStream = dataFilter(cleanStream);


        return filterStream;
    }

    private static DataStream<String> dataFilter(DataStream<String> stream) {
        SingleOutputStreamOperator<String> filter = stream.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                Long produce_id = jsonObject.getLong("produce_id");
                if (produce_id == null) {
                    return false;
                }
                return true;
            }
        });
        return filter;
    }

    private static DataStream<String> dataClean(DataStream<String> stream) {
        // 存放错误日志
        OutputTag<String> dirtyLog = new OutputTag<String>("dirty") {};
        SingleOutputStreamOperator<String> streamOperator = stream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(value);
                    JSONObject after = jsonObject.getJSONObject("after");
                    if (after != null) {
                        // 按表中列顺序（produce_id, produce_name, number, action_type, ts）构造新的JSON对象
                        JSONObject orderedJson = new JSONObject(true);
                        orderedJson.put("produce_id", after.get("produce_id"));
                        orderedJson.put("produce_name", after.get("produce_name"));
                        orderedJson.put("number", after.get("number"));
                        orderedJson.put("action_type", after.get("action_type"));
                        orderedJson.put("ts", after.get("ts"));
                        // 输出按顺序构造的JSON字符串
                        out.collect(orderedJson.toJSONString());
                    } else {
                        // after为null时，存入侧流
                        ctx.output(dirtyLog, value);
                    }
                } catch (Exception e) {
                    // 解析异常时，存入侧流
                    ctx.output(dirtyLog, value);
                }
            }
        });
        SideOutputDataStream<String> dirty = streamOperator.getSideOutput(dirtyLog);
        KafkaUtil.producerKafka(dirty, "dirty_log");
        return streamOperator;
    }
}