package com.atguigu.wuliu.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.wuliu.bean.DwdBoundChuKuBean;
import com.atguigu.wuliu.bean.DwdBoundIRukuBean;
import com.atguigu.wuliu.bean.DwdBoundfenJianBean;
import com.atguigu.wuliu.bean.DwdOrderOrgBoundOriginBean;
import com.atguigu.wuliu.utils.DateFormatUtil;
import com.atguigu.wuliu.utils.KafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/*
* （1）从Kafka topic_db主题读取数据
（2）筛选中转流程数据
筛选原始表名为order_org_bound的数据即可。
（3）定义侧输出流标签
（4）分流
① 将数据类型由String转换为DwdOrderOrgBoundOriginBean。
② 入库操作发生时该表会插入一条数据，op为c的即为入库业务过程的数据。补全时间戳字段和入库时间字段，数据类型由DwdOrderOrgBoundOriginBean转换为DwdBoundInboundBean。
③ 分拣完成操作发生时sort_time字段会由null变更为具体时间，满足上述条件的即为分拣完成业务过程的数据。补全时间戳字段和分拣完成时间字段，数据类型由DwdOrderOrgBoundOriginBean转换为DwdBoundSortBean。
④ 出库操作发生时outbound_time字段会由null变更为具体时间，满足上述条件的即为出库业务过程的数据。补全时间戳字段和出库时间字段，数据类型由DwdOrderOrgBoundOriginBean转换为DwdBoundOutboundBean。
（5）提取侧输出流
（6）写入Kafka指定主题
* */
public class DwdZhongApp {
    public static void main(String[] args) throws Exception {
//--------------------------------------------1.创建流式处理环境----------------------------------------------------------
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
//--------------------------------------------2.检查点设置---------------------------------------------------------------
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//--------------------------------------------3.从kafka读数据---------------------------------------------------------------
        String topic = "topic_db";
        String groupId = "dwd_zhong_group";
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(topic, groupId);
        DataStreamSource<String> kafkaSourceDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafkaSource");
        //kafkaSourceDS.print("%%%");
//--------------------------------------------4.筛选运单和运单明细数据----------------------------------------------------
        SingleOutputStreamOperator<String> filterDS = kafkaSourceDS.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String jsonStr) throws Exception {
                JSONObject jsonObj = JSON.parseObject(jsonStr);
                String tableName = jsonObj.getString("table");
                return "order_org_bound".equals(tableName);
            }
        });
        filterDS.print("----");
//--------------------------------------------5.定义侧输出流标签------------------------------------------------------
        OutputTag<String> fenjianTag = new OutputTag<String>("fenjianTag") {};
        OutputTag<String> chukuTag = new OutputTag<String>("chukuTag") {};
//--------------------------------------------6.分流------------------------------------------------------
        SingleOutputStreamOperator<String> inboundDS = filterDS.process(
                new ProcessFunction<String, String>() {
                    @Override
                    public void processElement(String jsonStr, Context ctx, Collector<String> out) throws Exception {
                        JSONObject jsonObj = JSON.parseObject(jsonStr);
                        DwdOrderOrgBoundOriginBean data = jsonObj.getObject("data", DwdOrderOrgBoundOriginBean.class);
                        DwdOrderOrgBoundOriginBean old = jsonObj.getObject("old", DwdOrderOrgBoundOriginBean.class);
                        String id = data.getId();//获取中转数据id
                        String orderId = data.getOrderId(); //获取运单id
                        String orgId = data.getOrgId();//获取机构id
                        String type = jsonObj.getString("type");//获取操作类型
                        if ("insert".equals(type)) {
                            String inboundTime = data.getInboundTime();
                            String inboundEmpId = data.getInboundEmpId();
                            Long ts = DateFormatUtil.getTs(inboundTime);
                            DwdBoundIRukuBean inboundBean = DwdBoundIRukuBean.builder()
                                    .id(id)
                                    .orderId(orderId)
                                    .orgId(orgId)
                                    .inboundTime(inboundTime)
                                    .inboundEmpId(inboundEmpId)
                                    .ts(ts)
                                    .build();
                            String s = JSON.toJSONString(inboundBean);
                            System.out.println(s);
                            out.collect(s);
                        } else {
                            //分拣或者出库
                            //筛选分拣操作  将分拣数据放到分拣侧输出流
                            String oldSortTime = old.getSortTime();
                            String dataSortTime = data.getSortTime();
                            if (oldSortTime == null && dataSortTime != null) {
                                String sortTime = data.getSortTime();
                                Long ts =DateFormatUtil.getTs(sortTime);
                                String sorterEmpId = data.getSorterEmpId();
                                DwdBoundfenJianBean sortBean = DwdBoundfenJianBean.builder()
                                        .id(id)
                                        .orderId(orderId)
                                        .orgId(orgId)
                                        .sortTime(sortTime)
                                        .sorterEmpId(sorterEmpId)
                                        .ts(ts)
                                        .build();
                                ctx.output(fenjianTag, JSON.toJSONString(sortBean));
                            }
                            //筛选出库操作  将出库数据放到出库侧输出流
                            String oldOutboundTime = old.getOutboundTime();
                            String dataOutboundTime = data.getOutboundTime();
                            if (oldOutboundTime == null && dataOutboundTime != null) {
                                String outboundTime = data.getOutboundTime();
                                Long ts = DateFormatUtil.getTs(outboundTime);
                                String outboundEmpId = data.getOutboundEmpId();
                                DwdBoundChuKuBean outboundBean = DwdBoundChuKuBean.builder()
                                        .id(id)
                                        .orderId(orderId)
                                        .orgId(orgId)
                                        .outboundTime(outboundTime)
                                        .outboundEmpId(outboundEmpId)
                                        .ts(ts)
                                        .build();
                                ctx.output(chukuTag, JSON.toJSONString(outboundBean));
                            }
                        }
                    }
                }
        );

        //TODO 6.从主流中提取侧输出流
        //分拣流
        SideOutputDataStream<String> sortDS = inboundDS.getSideOutput(fenjianTag);
        //出库流
        SideOutputDataStream<String> outboundDS = inboundDS.getSideOutput(chukuTag);

        //TODO 7.将不同流的数据写到kafka的主题
        //中转域入库事实主题
        String inboundTopic = "tms_dwd_bound_inbound";
        //中转域分拣事实主题
        String sortTopic = "tms_dwd_bound_sort";
        //中转域出库事实主题
        String outboundTopic = "tms_dwd_bound_outbound";

        inboundDS.print("入库数据：");
        inboundDS.sinkTo(KafkaUtil.getKafkaSink(inboundTopic));
        sortDS.print("分拣数据");
        sortDS.sinkTo(KafkaUtil.getKafkaSink(sortTopic));
        outboundDS.print("出库数据");
        outboundDS.sinkTo(KafkaUtil.getKafkaSink(outboundTopic));

        env.execute();
    }
}