package com.zzw.demo.text_A;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.zzw.demo.util.KafkaUtil;
import com.zzw.demo.util.MySinkHbase;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Text05 {
    public static void main(String[] args) throws Exception {
        //uv
        //在 DWD 层中，使用状态编程识别出新老用户。如果用户首次下单，则标记为新用户；否则标记为老用户，并在输出流中发送到相应的 Kafka 主题中（8分）
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(1);
        //读取数据流
        DataStreamSource<String> in = env.addSource(KafkaUtil.kafkaSource("dwd_order_info_is_new", "text05"));

        //读取数据 筛选新老用户
        //新用户输入到Kafka
        SingleOutputStreamOperator<JSONObject> is_new_1 = in.map(x -> {return JSON.parseObject(x);}).filter(x -> "1".equals(x.getString("is_new")));
        is_new_1.map(x->{return JSON.toJSONString(x);}).addSink(KafkaUtil.kafkaProducer("dwd_is_new_1"));
        //老用户输入到Kafka
        SingleOutputStreamOperator<JSONObject> is_new_0 = in.map(x -> {return JSON.parseObject(x);}).filter(x -> "0".equals(x.getString("is_new")));
        is_new_0.map(x->{return JSON.toJSONString(x);}).addSink(KafkaUtil.kafkaProducer("dwd_is_new_0"));
        //读取dwd用户的维度数据
        DataStreamSource<String> stringDataStreamSource = env.addSource(KafkaUtil.kafkaSource("dwd_user_info", "text05"));
        //写入HBase
        stringDataStreamSource.addSink(new MySinkHbase());

        //执行程序
        env.execute();
    }
}























































