package net.bwie.realtime.dwd.douyin.logs.job;

import com.alibaba.fastjson.JSONObject;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class LiveEventLogCleanJob {
    public static void main(String[] args) throws Exception {

        //1-执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2. 从kafka的ods订单行为日志表消费
        DataStream<String> eventStream = KafkaUtil.consumerKafka(env, "ods_live_event_log");
//        eventStream.print("event");

        //3-数据清洗-json解析+字段校验+过滤
        SingleOutputStreamOperator<String> cleanStream = eventStream.map(new MapFunction<String, String>() {
                    @Override
                    public String map(String s) throws Exception {
                        //json字符串解析
                        try {
                            return JSONObject.parseObject(s).toJSONString();
                        } catch (Exception e) {
                            return null;
                        }
                    }
                })
                //过滤空值
                .filter(new FilterFunction<String>() {
                    @Override
                    public boolean filter(String s) throws Exception {
                        if (s != null) {
                            return true;
                        }
                        return false;
                    }
                })
                //枚举值校验
                .filter(new FilterFunction<String>() {
                    @Override
                    public boolean filter(String s) throws Exception {
                        JSONObject jsonObject = JSONObject.parseObject(s);
                        String event_type = jsonObject.getString("event_type");
                        return event_type.equals("关注") || event_type.equals("点击") || event_type.equals("加购") ||
                                event_type.equals("下单") || event_type.equals("支付") || event_type.equals("评论") ||
                                event_type.equals("进入直播间") || event_type.equals("退出直播间")|| event_type.equals("加入粉丝团");
                    }
                });

        //4. 打印清洗后的数据（后续可以写入dwdkafka）
        cleanStream.print("cleanEventLog");

        KafkaUtil.producerKafka(cleanStream,"dwd_event_log");

        //5. 执行任务
        env.execute("liveEventLogCleanJob");//怎么关联维表 你不是唉mysql吗 我给插到doris了 那就直接在doris写sql关联 好
    }


}
