package com.qingyunge.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.qingyunge.bean.SpPlCountNumBean;
import com.qingyunge.util.MyClickHouseUtil;
import com.qingyunge.util.MyKafkaUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
public class Dws_sp_pl_count_num {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
        String productEval_topic = "dwd_traffic_eval";
        String sku_topic = "dwd_traffic_sku";
        String topicSpCountNum = "";
        SingleOutputStreamOperator<SpPlCountNumBean> spReduce = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(sku_topic, "topicSpCountNum"))
                .flatMap(new FlatMapFunction<String, SpPlCountNumBean>() {
                    @Override
                    public void flatMap(String s, Collector<SpPlCountNumBean> collector) throws Exception {
                        try {
                            JSONObject jsonObject = JSONObject.parseObject(s);
                            Long ts = jsonObject.getLong("ts");
                            collector.collect(new SpPlCountNumBean("商品总数", 1L, ts));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }).keyBy(new KeySelector<SpPlCountNumBean, String>() {
                    @Override
                    public String getKey(SpPlCountNumBean spPlCountNumBean) throws Exception {
                        return spPlCountNumBean.getId();
                    }
                }).reduce(new ReduceFunction<SpPlCountNumBean>() {
                    @Override
                    public SpPlCountNumBean reduce(SpPlCountNumBean spPlCountNumBean, SpPlCountNumBean t1) throws Exception {
                        spPlCountNumBean.setNum(spPlCountNumBean.getNum() + t1.getNum());
                        return spPlCountNumBean;
                    }
                });

//        SingleOutputStreamOperator<Tuple2<String, Long>> plReD = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(productEval_topic, "topicPlCountNum"))
//                .flatMap(new FlatMapFunction<String, Tuple2<String, Long>>() {
//                    @Override
//                    public void flatMap(String s, Collector<Tuple2<String, Long>> collector) throws Exception {
//                        try {
//                            JSONObject jsonObject = JSONObject.parseObject(s);
//                            collector.collect(Tuple2.of("id", 1L));
//                        } catch (Exception e) {
//                            e.printStackTrace();
//                        }
//                    }
//                })
//                .keyBy(new KeySelector<Tuple2<String, Long>, String>() {
//                    @Override
//                    public String getKey(Tuple2<String, Long> stringLongTuple2) throws Exception {
//                        return stringLongTuple2.f0;
//                    }
//                })
//                .reduce(new ReduceFunction<Tuple2<String, Long>>() {
//                    @Override
//                    public Tuple2<String, Long> reduce(Tuple2<String, Long> stringLongTuple2, Tuple2<String, Long> t1) throws Exception {
//                        return Tuple2.of(stringLongTuple2.f0, stringLongTuple2.f1 + t1.f1);
//                    }
//                });

        SingleOutputStreamOperator<SpPlCountNumBean> plReduce = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(productEval_topic, "topicSpCountNum"))
                .flatMap(new FlatMapFunction<String, SpPlCountNumBean>() {
                    @Override
                    public void flatMap(String s, Collector<SpPlCountNumBean> collector) throws Exception {
                        try {
                            JSONObject jsonObject = JSONObject.parseObject(s);
                            Long ts = jsonObject.getLong("ts");
                            collector.collect(new SpPlCountNumBean("总评论量", 1L, ts));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }).keyBy(new KeySelector<SpPlCountNumBean, String>() {
                    @Override
                    public String getKey(SpPlCountNumBean spPlCountNumBean) throws Exception {
                        return spPlCountNumBean.getId();
                    }
                }).reduce(new ReduceFunction<SpPlCountNumBean>() {
                    @Override
                    public SpPlCountNumBean reduce(SpPlCountNumBean spPlCountNumBean, SpPlCountNumBean t1) throws Exception {
                        spPlCountNumBean.setNum(spPlCountNumBean.getNum() + t1.getNum());
                        return spPlCountNumBean;
                    }
                });
        spReduce.print("==");
        spReduce.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_sp_pl_count_num values(1,?,?,?)"));
        //TODO 累加
//        spReduce.addSink(MyClickHouseUtil.getSinkFunction("INSERT INTO dws_sp_pl_count_num_agg " +
//                "SELECT 2 AS id,? AS name, sumState(toUInt64(?)) AS count_state, maxState(toUInt64(?)) AS ts"));
        plReduce.print("==");
        plReduce.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_sp_pl_count_num values(2,?,?,?)"));
        //TODO 累加
//        // Java代码示例
//        plReduce.addSink(MyClickHouseUtil.getSinkFunction("INSERT INTO dws_sp_pl_count_num_agg " +
//                "SELECT 2 AS id,? AS name, sumState(toUInt64(?)) AS count_state, maxState(toUInt64(?)) AS ts"));

        env.execute("DwsCpClCountNum");
    }
}
