package net.bwie.dt.job.Doris;

import net.bwie.dt.bean.ProductBean;
import net.bwie.dt.funtion.dtBeanMapFuctionProduct;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import utils.DorisUtil;
import utils.KafkaUtil;

public class ProductToDoris {
    public static void main(String[] args) throws Exception {
        // env执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(3000L);
        // 消费Kafka
        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "dwd_new_product_table");
        // 数据转换
        DataStream<String> resultStream = healde(kafkaStream);
        resultStream.print();


        // 存入Doris
        DorisUtil.saveToDoris(
                resultStream,
                "dt_realtime_ranking",
                "product_table"
        );
        // 触发执行
        env.execute("ProductToDoris");
    }

    private static DataStream<String> healde(DataStream<String> stream) {
        // 转实体类
        SingleOutputStreamOperator<ProductBean> beanStream = stream.map(new dtBeanMapFuctionProduct());


        SingleOutputStreamOperator<String> process = beanStream.process(new ProcessFunction<ProductBean, String>() {
            @Override
            public void processElement(ProductBean value, Context ctx, Collector<String> out) throws Exception {
                int produce_id = value.getProduce_id();
                String produce_name = value.getProduce_name();
                String ts = value.getTs();
                String output = produce_id + "," + produce_name +  "," + ts;
                out.collect(output);
            }
        });
        return process;
    }
}
