package com.clw.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.clw.KafkaUtils;
import com.clw.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

public class BaseLogApp {
    public static void main(String[] args) throws Exception {

        // 1. 创建流环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 获取kafka 的消费者的流
        FlinkKafkaConsumer<String> logSource = MyKafkaUtil.getKafkaConsumer("ods_base_log", "safsfsd");


        // 获取流数据
        DataStreamSource<String> logStream = env.addSource(logSource);

        SingleOutputStreamOperator<String> commonLog = logStream.filter(x -> {
            try {
                JSONObject object = JSON.parseObject(x);
                return object.containsKey("common");
            } catch (Exception e) {
                return false;
            }
        });

        SingleOutputStreamOperator<String> pageLog = logStream.filter(x -> {
            try {
                JSONObject object = JSON.parseObject(x);
                return object.containsKey("page");
            } catch (Exception e) {
                return false;
            }
        });

        // 发送到kafka
        commonLog.addSink(KafkaUtils.createProduer("dwd_common_log"));

        pageLog.addSink(KafkaUtils.createProduer("dwd_page_log"));


        env.execute();

    }

}
