package net.bwie.realtime.jtp.dwd.log.job;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import net.bwie.realtime.jtp.utils.DorisUtil;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

public class DouYinOdsJob {
    public static void main(String[] args) throws Exception{
        //1.执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(3000L);
        //2.数据源
        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "ods_live_event_log");
//        kafkaDataStream.print();
        // 3-解析 JSON
        SingleOutputStreamOperator<JSONObject> jsonStream = kafkaStream.map(s -> JSON.parseObject(s));

        // 4-数据清洗
        SingleOutputStreamOperator<String> cleanedStream = jsonStream.process(
                new ProcessFunction<JSONObject, String>() {
                    @Override
                    public void processElement(JSONObject jsonObject, Context context, Collector<String> collector) {

                        // 必填字段校验
                        String[] requiredFields = new String[]{
                                "live_room_id", "anchor_id", "platform", "start_time", "end_time", "live_duration_sec",
                                "user_id", "region", "source_id", "source_type",
                                "sku_id", "product_name", "product_price", "img_url",
                                "event_type", "event_properties", "event_time"
                        };
                        for (String field : requiredFields) {
                            if (!jsonObject.containsKey(field) || jsonObject.get(field) == null) {
                                return; // 跳过不完整数据
                            }
                        }

                        // 解析事件属性 event_properties
                        JSONObject eventProps = jsonObject.getJSONObject("event_properties");
                        if (eventProps == null) {
                            return; // 跳过解析失败数据
                        }

                        // 校验 amount = product_price * order_qty
                        Double product_price = eventProps.getDouble("product_price");
                        Integer order_qty = eventProps.getInteger("order_qty");
                        if (product_price != null && order_qty != null) {
                            double correctAmount = product_price * order_qty;
                            eventProps.put("amount", correctAmount);
                        }

                        // 更新 event_properties
                        jsonObject.put("event_properties", eventProps);

                        // 收集清洗后的数据
                        collector.collect(jsonObject.toJSONString());
                    }
                });

        cleanedStream.print("cleanedStream=========>");

        // 5-写入 DWD Kafka 主题
        KafkaUtil.producerKafka(cleanedStream,"dwd_live_room_event_log");

        // 6-触发执行
        env.execute("DWD_LiveRoomEventLog_Job");
    }
}
