package com.ods;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.utils.KafkaUtils;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

/**
 * 从kafka主题ods_base_log中接受到数据
 * 实现对脏数据的简单处理,转为json 将帮喊common的数据发送到另一个主题 dwd_common_log上(实现新老用户的筛选)
 */
public class BaseLog {
    public static void main(String[] args) throws Exception {
        //创建流环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //从kafka主题ods_base_log中读取数据  实现对脏数据的处理
        FlinkKafkaConsumer<String> ods_base_log = KafkaUtils.createConsumer("ods_base_log","bshw");
        DataStreamSource<String> stream = env.addSource(ods_base_log);
        //对stream进行脏数据处理
        SingleOutputStreamOperator<String> commonStream = stream.filter(x -> {
            try {
                boolean common = JSON.parseObject(x).containsKey("common");
                return common;
            } catch (Exception e) {
                return false;
            }
        });
        SingleOutputStreamOperator<String> pageStream = stream.filter(x -> {
            try {
                boolean page = JSON.parseObject(x).containsKey("page");
                return page;
            } catch (Exception e) {
                return false;
            }
        });

        //将数据发送到kafka主题上dwd_page(common)_log上
        commonStream.addSink(KafkaUtils.createProduer("dwd_common_log"));
        pageStream.addSink(KafkaUtils.createProduer("dwd_page_log"));

        env.execute();
    }
}
