package cn.com.itg.main;

import cn.com.itg.api.OthInrecordAPI;
import cn.com.itg.api.OthoutrecordAPI;
import cn.com.itg.api.SalesOutrecordAPI;
import cn.com.itg.common.BuildVOUtil;
import cn.com.itg.common.DescriptorUtil;
import cn.com.itg.common.PaperOutputTagUtil;
import cn.com.itg.function.process.*;
import cn.com.itg.function.source.ZtmmSource;
import cn.com.itg.function.source.ZttcidenticalSource;
import cn.com.itg.function.watetmarks.MatdocSendAPW;
import cn.com.itg.pojo.domain.entity.MATDOC;
import cn.com.itg.pojo.domain.entity.ZTMM;
import cn.com.itg.pojo.domain.entity.ZTTCIDENTICAL;
import cn.com.itg.pojo.domain.vo.MatdocSendVO;
import cn.com.itg.pojo.domain.vo.OthInSingleSaveVO;
import cn.com.itg.pojo.domain.vo.OthoutSingleSaveVO;
import cn.com.itg.pojo.domain.vo.SalesOutMergeSourceDataSaveVO;
import cn.com.itg.pojo.kafka.KafkaConsumerRecord;
import cn.com.itg.pojo.kafka.MatDocCDC;
import cn.com.itg.pojo.kafka.ZtmmCDC;
import cn.com.itg.pojo.kafka.ZttcidenticalCDC;
import cn.com.itg.util.SmartDateUtil;
import cn.com.itg.util.SpringContextHolder;
import com.alibaba.fastjson.JSON;
import com.github.lianjiatech.retrofit.spring.boot.core.RetrofitScan;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.transaction.annotation.EnableTransactionManagement;

import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Properties;

@Slf4j
@SpringBootApplication(scanBasePackages = {"cn.com.itg"})
@MapperScan(basePackages = {"cn.com.itg.mapper"})
@EnableTransactionManagement
@RetrofitScan(basePackages = {"cn.com.itg.api"})
public class StreamPaperMatdoc {

    public static void main(String[] args) throws Exception {

        ConfigurableApplicationContext applicationContext = SpringApplication.run(StreamPaperMatdoc.class, args);
        SpringContextHolder springContextHolder = new SpringContextHolder();
        springContextHolder.setApplicationContext(applicationContext);
//        SpringContextHolder.setApplicationContext(applicationContext);
        /*
            1）初始化flink流处理的运行环境
         */
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.getConfig().setAutoWatermarkInterval(200);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        /*
             设置Spring上下文
         */
//        SpringContextHolder.setApplicationContext(new AnnotationConfigApplicationContext(SpringConfig.class));
        /*
            设置kafka消费配置
         */
//        Properties kafkaProperties = getKafkaProperties("192.168.1.100:9092", "flink_local7");
//        Properties kafkaProperties = getKafkaProperties("172.16.89.2:9092", "flink_local7");
        Properties kafkaProperties = getKafkaProperties("172.20.1.59:9092", "itg_paper_stream");

        //从kafka拉取MATDOC
        FlinkKafkaConsumer<KafkaConsumerRecord> kafkaConsumer =
                new FlinkKafkaConsumer<KafkaConsumerRecord>(
                        "ITGDTP-SAP-MATDOC-TOPIC",
                        new PaperKafkaSchema(),
                        kafkaProperties
                );
        //从kafka拉取ZTTCIDENTICAL
        FlinkKafkaConsumer<KafkaConsumerRecord> zttConsumer =
                new FlinkKafkaConsumer<KafkaConsumerRecord>(
                        "ITGDTP-SAP-ZTTCIDENTICAL-TOPIC",
                        new PaperKafkaSchema(),
                        kafkaProperties
                );
        //从kafka拉取ZTMM
        FlinkKafkaConsumer<KafkaConsumerRecord> ztmmConsumer =
                new FlinkKafkaConsumer<KafkaConsumerRecord>(
                        "ITGDTP-SAP-ZTMM-TOPIC",
                        new PaperKafkaSchema(),
                        kafkaProperties
                );
//        kafkaConsumer.setStartFromTimestamp(SmartDateUtil.getFrontDay(SmartDateUtil.toDay(),2).getTime());
//        kafkaConsumer.setStartFromTimestamp(SmartDateUtil.getDayBegin(SmartDateUtil.toDay()).getTime());
        zttConsumer.setStartFromTimestamp(SmartDateUtil.toDay().getTime());
        ztmmConsumer.setStartFromTimestamp(SmartDateUtil.toDay().getTime());

        /*
         * 数据源加载
         */
        //ZTTCIDENTICAL源-数据初始化
        DataStreamSource<List<ZTTCIDENTICAL>> zttcidenticalSource = env.addSource(new ZttcidenticalSource()).setParallelism(1);
        //ZTMM源-数据初始化
        DataStreamSource<List<ZTMM>> ztmmSource = env.addSource(new ZtmmSource()).setParallelism(1);
        //MATDOC源（CDC）
        DataStreamSource<KafkaConsumerRecord> matdocSource = env.addSource(kafkaConsumer);
        //MATDOC源（CDC）
        DataStreamSource<KafkaConsumerRecord> zttCdcSource = env.addSource(zttConsumer);
        //MATDOC源（CDC）
        DataStreamSource<KafkaConsumerRecord> ztmmCdcSource = env.addSource(ztmmConsumer);
        /*
            数据处理
         */
        //ZTTCIDENTICAL
//        BroadcastStream<List<ZTTCIDENTICAL>> zttDbBS =
//                zttcidenticalSource
//                        .broadcast(DescriptorUtil.getZttDbState());
        SingleOutputStreamOperator<ZTTCIDENTICAL> zttSideOut = zttCdcSource
                .name("zttCdcSource")
                .uid("zttCdcSource")
                .process(new ProcessFunction<KafkaConsumerRecord, ZTTCIDENTICAL>() {
                    @Override
                    public void processElement(KafkaConsumerRecord record, Context context, Collector<ZTTCIDENTICAL> collector) throws Exception {
                        ZTTCIDENTICAL zttcidentical = new ZTTCIDENTICAL();
                        ZttcidenticalCDC zttcidenticalCDC = JSON.parseObject(record.getValue(), ZttcidenticalCDC.class);
                        zttcidentical = zttcidenticalCDC.getAfter();
                        zttcidentical.setTimestamp(record.getTimeStamp());
                        if (Objects.equals(zttcidenticalCDC.getType(), "insert")) {
                            zttcidentical.setOpType("Insert");
                            context.output(PaperOutputTagUtil.ZTT_INSERT_TAG, zttcidentical);
                        } else if (Objects.equals(zttcidenticalCDC.getType(), "update")) {
                            zttcidentical.setOpType("Update");
                            context.output(PaperOutputTagUtil.ZTT_UPDATE_TAG, zttcidentical);
                        }
                        log.info("zttcidentical的cdc信息为:【{}】,类型为：【{}】", JSON.toJSONString(zttcidentical), zttcidentical.getOpType());
                        collector.collect(zttcidentical);
                    }
                });
        SingleOutputStreamOperator<List<ZTTCIDENTICAL>> sideOutZtt = zttSideOut
                .connect(zttcidenticalSource)
                .process(new ZttDbConnectCdcFunction())
                .name("zttcidentical")
                .uid("zttcidentical")
                .assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks<List<ZTTCIDENTICAL>>() {
                    private long currentMaxTs = Long.MIN_VALUE;

                    @Nullable
                    @Override
                    public Watermark getCurrentWatermark() {
                        Watermark watermark = new Watermark(currentMaxTs);
                        return watermark;
                    }

                    @Override
                    public long extractTimestamp(List<ZTTCIDENTICAL> zttcidenticals, long l) {
                        //获取当前记录的时间戳
                        long recordTimeStamp = zttcidenticals.stream().findFirst().orElseGet(ZTTCIDENTICAL::new).getTimestamp();
                        //更新最大时间戳
                        currentMaxTs = Math.max(recordTimeStamp, currentMaxTs);
                        //返回当前记录时间戳
                        return recordTimeStamp;
                    }
                })
                .process(new ProcessFunction<List<ZTTCIDENTICAL>, List<ZTTCIDENTICAL>>() {
                    @Override
                    public void processElement(List<ZTTCIDENTICAL> zttcidenticals, Context context, Collector<List<ZTTCIDENTICAL>> collector) throws Exception {
                        List<ZTTCIDENTICAL> zqrwerks = new ArrayList<>();
                        List<ZTTCIDENTICAL> zqrbwart = new ArrayList<>();
                        zttcidenticals.forEach(zttcidentical -> {
                            if (Objects.equals("ZQRWERKS", zttcidentical.getZstrc())) {
                                zqrwerks.add(zttcidentical);
                            }
                        });
                        log.info("ztt的ZQRWERKS侧流信息：【{}】", JSON.toJSONString(zqrwerks));
                        context.output(PaperOutputTagUtil.ZQRWERKS_TAG, zqrwerks);

                        zttcidenticals.forEach(zttcidentical -> {
                            if (Objects.equals("ZQRBWART", zttcidentical.getZstrc())) {
                                zqrbwart.add(zttcidentical);
                            }
                        });
                        log.info("ztt的ZQRBWART侧流信息：【{}】", JSON.toJSONString(zqrbwart));
                        context.output(PaperOutputTagUtil.ZQRBWART_TAG, zqrbwart);
                    }
                });
//        //ZTTCIDENTICAL与广播流进行add或者update
//        SingleOutputStreamOperator<List<ZTTCIDENTICAL>> sideOutZtt = zttSideOut
//                .connect(zttDbBS)
//                .process(new BroadcastProcessFunction<ZTTCIDENTICAL, List<ZTTCIDENTICAL>, List<ZTTCIDENTICAL>>() {
//                    private List<ZTTCIDENTICAL> ztt = new ArrayList<>();
//
//                    @Override
//                    public void processElement(ZTTCIDENTICAL zttcidentical, ReadOnlyContext readOnlyContext, Collector<List<ZTTCIDENTICAL>> collector) throws Exception {
//                        if (zttcidentical.getOpType().equals("Insert")) {
//                            ztt.add(zttcidentical);
//                        } else if (zttcidentical.getOpType().equals("Update")) {
//                            ZTTCIDENTICAL update = ztt.stream()
//                                    .filter(ztt -> Objects.equals(zttcidentical.getZstrc(), ztt.getZstrc())
//                                            && Objects.equals(zttcidentical.getZdtype(), ztt.getZdtype())
//                                            && Objects.equals(zttcidentical.getZfield(), ztt.getZfield())
//                                            && Objects.equals(zttcidentical.getZfield2(), ztt.getZfield2()))
//                                    .findFirst().orElseGet(ZTTCIDENTICAL::new);
//                            ztt.remove(update);
//                            ztt.add(zttcidentical);
//                        }
//                        collector.collect(ztt);
//                    }
//
//                    @Override
//                    public void processBroadcastElement(List<ZTTCIDENTICAL> zttcidenticals, Context context, Collector<List<ZTTCIDENTICAL>> collector) throws Exception {
//                        ztt = zttcidenticals;
//                        collector.collect(ztt);
//                    }
//                })
//                .name("zttcidentical")
//                .uid("zttcidentical")
//                .assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks<List<ZTTCIDENTICAL>>() {
//                    private long currentMaxTs = Long.MIN_VALUE;
//
//                    @Nullable
//                    @Override
//                    public Watermark getCurrentWatermark() {
//                        Watermark watermark = new Watermark(currentMaxTs);
//                        return watermark;
//                    }
//
//                    @Override
//                    public long extractTimestamp(List<ZTTCIDENTICAL> zttcidenticals, long l) {
//                        //获取当前记录的时间戳
//                        long recordTimeStamp = zttcidenticals.stream().findFirst().orElseGet(ZTTCIDENTICAL::new).getTimestamp();
//                        //更新最大时间戳
//                        currentMaxTs = Math.max(recordTimeStamp, currentMaxTs);
//                        //返回当前记录时间戳
//                        return recordTimeStamp;
//                    }
//                })
//                .process(new ProcessFunction<List<ZTTCIDENTICAL>, List<ZTTCIDENTICAL>>() {
//                    @Override
//                    public void processElement(List<ZTTCIDENTICAL> zttcidenticals, Context context, Collector<List<ZTTCIDENTICAL>> collector) throws Exception {
//                        List<ZTTCIDENTICAL> zqrwerks = new ArrayList<>();
//                        List<ZTTCIDENTICAL> zqrbwart = new ArrayList<>();
//                        zttcidenticals.forEach(zttcidentical -> {
//                            if (Objects.equals("ZQRWERKS", zttcidentical.getZstrc())) {
//                                zqrwerks.add(zttcidentical);
//                            }
//                        });
//                        log.info("ztt的ZQRWERKS侧流信息：【{}】",JSON.toJSONString(zqrwerks));
//                        context.output(PaperOutputTagUtil.ZQRWERKS_TAG, zqrwerks);
//
//                        zttcidenticals.forEach(zttcidentical -> {
//                            if (Objects.equals("ZQRBWART", zttcidentical.getZstrc())) {
//                                zqrbwart.add(zttcidentical);
//                            }
//                        });
//                        log.info("ztt的ZQRBWART侧流信息：【{}】",JSON.toJSONString(zqrbwart));
//                        context.output(PaperOutputTagUtil.ZQRBWART_TAG, zqrbwart);
//                    }
//                });
        //1.在zttcidentical中取出ZQRWERKS和ZQRBWART两个侧流
        //侧流拿到ZQRWERKS流和ZQRBWART流,并广播
        BroadcastStream<List<ZTTCIDENTICAL>> zttZqrwerksBS = sideOutZtt
                .getSideOutput(PaperOutputTagUtil.ZQRWERKS_TAG)
                .broadcast(DescriptorUtil.getZttZqrwerksState());

        BroadcastStream<List<ZTTCIDENTICAL>> zttZqrbwartBS = sideOutZtt
                .getSideOutput(PaperOutputTagUtil.ZQRBWART_TAG)
                .broadcast(DescriptorUtil.getZttZqrbwartState());

        //ZTMM

//        BroadcastStream<List<ZTMM>> ztmmDbBS =
//                ztmmSource
//                        .broadcast(DescriptorUtil.getZtmmDbState());

        SingleOutputStreamOperator<ZTMM> ztmmSideOut = ztmmCdcSource
                .name("ztmmCdcSource")
                .uid("ztmmCdcSource")
                .process(new ProcessFunction<KafkaConsumerRecord, ZTMM>() {
                    @Override
                    public void processElement(KafkaConsumerRecord record, Context context, Collector<ZTMM> collector) throws Exception {
                        ZtmmCDC ztmmCDC = JSON.parseObject(record.getValue(), ZtmmCDC.class);
                        ZTMM ztmm = ztmmCDC.getAfter();
                        ztmm.setTimestamp(record.getTimeStamp());
                        if (Objects.equals(ztmmCDC.getType(), "insert")) {
                            ztmm.setOpType("Insert");
                            context.output(PaperOutputTagUtil.ZTMM_INSERT_TAG, ztmm);
                        } else if (Objects.equals(ztmmCDC.getType(), "update")) {
                            ztmm.setOpType("Update");
                            context.output(PaperOutputTagUtil.ZTMM_UPDATE_TAG, ztmm);
                        }
                        log.info("ztmm的cdc信息为:【{}】,类型为：【{}】", JSON.toJSONString(ztmm), ztmm.getOpType());
                        collector.collect(ztmm);
                    }
                });
        BroadcastStream<List<ZTMM>> ztmmBS = ztmmSideOut
                .connect(ztmmSource)
                .process(new ZtmmDbConnectCdcFunction())
                .name("ztmm")
                .uid("ztmm")
                .assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks<List<ZTMM>>() {
                    private long currentMaxTs = Long.MIN_VALUE;

                    @Nullable
                    @Override
                    public Watermark getCurrentWatermark() {
                        return new Watermark(currentMaxTs);
                    }

                    @Override
                    public long extractTimestamp(List<ZTMM> ztmms, long l) {
                        //获取当前记录的时间戳
                        long recordTimeStamp = ztmms.stream().findFirst().orElseGet(ZTMM::new).getTimestamp();
                        //更新最大时间戳
                        currentMaxTs = Math.max(recordTimeStamp, currentMaxTs);
                        //返回当前记录时间戳
                        return recordTimeStamp;
                    }
                })
                .broadcast(DescriptorUtil.getZtmmState());
//        //ZTMML与广播流进行add或者update
//        //ZTMM进行广播
//        BroadcastStream<List<ZTMM>> ztmmBS = ztmmSideOut
//                .connect(ztmmDbBS)
//                .process(new BroadcastProcessFunction<ZTMM, List<ZTMM>, List<ZTMM>>() {
//                    private List<ZTMM> ztmms = new ArrayList<>();
//
//                    @Override
//                    public void processElement(ZTMM ztmm, ReadOnlyContext readOnlyContext, Collector<List<ZTMM>> collector) throws Exception {
//                        if (ztmm.getOpType().equals("Insert")) {
//                            ztmms.add(ztmm);
//                        } else if (ztmm.getOpType().equals("Update")) {
//                            ZTMM update = ztmms.stream()
//                                    .filter(z -> Objects.equals(ztmm.getCredId(), z.getCredId()))
//                                    .findFirst().orElseGet(ZTMM::new);
//                            ztmms.remove(update);
//                            ztmms.add(ztmm);
//                        }
//                        log.info("ztmm的更新cdc广播流信息：【{}】",JSON.toJSONString(ztmms));
//                        collector.collect(ztmms);
//                    }
//
//                    @Override
//                    public void processBroadcastElement(List<ZTMM> ztmmList, Context context, Collector<List<ZTMM>> collector) throws Exception {
//                        ztmms = ztmmList;
//                        log.info("ztmm的db广播流信息：【{}】",JSON.toJSONString(ztmms));
////                        collector.collect(ztmms);
//                    }
//                    return false;
//                })
//                .name("ztmm")
//                .uid("ztmm")
//                .assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks<List<ZTMM>>() {
//                    private long currentMaxTs = Long.MIN_VALUE;
//
//                    @Nullable
//                    @Override
//                    public Watermark getCurrentWatermark() {
//                        return new Watermark(currentMaxTs);
//                    }
//
//                    @Override
//                    public long extractTimestamp(List<ZTMM> ztmms, long l) {
//                        //获取当前记录的时间戳
//                        long recordTimeStamp = ztmms.stream().findFirst().orElseGet(ZTMM::new).getTimestamp();
//                        //更新最大时间戳
//                        currentMaxTs = Math.max(recordTimeStamp, currentMaxTs);
//                        //返回当前记录时间戳
//                        return recordTimeStamp;
//                    }
//                })
//                .broadcast(DescriptorUtil.getZtmmState());

        SingleOutputStreamOperator<MATDOC> matdocInfos = matdocSource
                .name("matdocSource")
                .uid("matdocSource")
                .map(new RichMapFunction<KafkaConsumerRecord, MATDOC>() {
                    @Override
                    public MATDOC map(KafkaConsumerRecord s) throws Exception {
                        MATDOC matdoc = new MATDOC();
                        MatDocCDC matDocCDC = JSON.parseObject(s.getValue(), MatDocCDC.class);
                        if (Objects.equals(matDocCDC.getType(), "insert")) {
                            matdoc = matDocCDC.getAfter();
                            matdoc.setOpType("Insert");
                        } else if (Objects.equals(matDocCDC.getType(), "update")) {
                            matdoc = matDocCDC.getAfter();
                            matdoc.setOpType("Update");
                        }
                        matdoc.setTimestamp(s.getTimeStamp());
                        log.info("matdoc信息为:【{}】", JSON.toJSONString(matdoc));
                        return matdoc;
                    }
                })
                .assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks<MATDOC>() {
                    private long currentMaxTs = Long.MIN_VALUE;

                    @Nullable
                    @Override
                    public Watermark getCurrentWatermark() {
                        return new Watermark(currentMaxTs);
                    }

                    @Override
                    public long extractTimestamp(MATDOC record, long l) {
                        //获取当前记录的时间戳
                        long recordTimeStamp = record.getTimestamp();
                        //更新最大时间戳
                        currentMaxTs = Math.max(recordTimeStamp, currentMaxTs);
                        //返回当前记录时间戳
                        return recordTimeStamp;
                    }
                })
                .keyBy(MATDOC::getMatnr)
                //第一次关联进行条件过滤
                .connect(zttZqrwerksBS)
                .process(new MatdocZqrwerksBPFunction());
        matdocInfos.print("matdocInfos");
        KeyedStream<MatdocSendVO, String> matdocSendZttKeyedStream = matdocInfos
                //第二次关联进行信息处理
                .connect(zttZqrbwartBS)
                .process(new MatdocZqrbwartBPFunction())
                .process(new ProcessFunction<MatdocSendVO, MatdocSendVO>() {
                    @Override
                    public void processElement(MatdocSendVO matdocSendVO, Context context, Collector<MatdocSendVO> collector) throws Exception {
                        if (Objects.equals(matdocSendVO.getZritype(), "A30001")
                                && StringUtils.isBlank(matdocSendVO.getZrefdoc())
                                && StringUtils.isBlank(matdocSendVO.getZrefdoc1())) {
                            matdocSendVO.setZritype("A10001");
                            matdocSendVO.setZrefdoc("");
                            matdocSendVO.setZrefdoc1("");
                            matdocSendVO.setZrefflag("");
                        }
                        collector.collect(matdocSendVO);
                    }
                })
                .name("filter by zttcidentical")
                .keyBy(MatdocSendVO::getMatnr);

        SingleOutputStreamOperator<MatdocSendVO> sendOutput = matdocSendZttKeyedStream
                //关联ZTMM进行信息处理
                .connect(ztmmBS)
                .process(new MatdocZtmmBPFunction())
                .name("join ztmm")
                .assignTimestampsAndWatermarks(new MatdocSendAPW())
                .process(new ProcessFunction<MatdocSendVO, MatdocSendVO>() {
                    @Override
                    public void processElement(MatdocSendVO matdocSend, Context context, Collector<MatdocSendVO> collector) throws Exception {
                        log.info("matdocSend info is :{}", JSON.toJSONString(matdocSend));
                        if (Objects.equals("A08001", matdocSend.getZritype())) {
                            //入库
                            context.output(PaperOutputTagUtil.GR_TAG, matdocSend);
                        } else if (Objects.equals("A10001", matdocSend.getZritype())) {
                            //出库
                            context.output(PaperOutputTagUtil.GI_TAG, matdocSend);
                        } else if (Objects.equals("A30001", matdocSend.getZritype())) {
                            //销售出库
                            context.output(PaperOutputTagUtil.SGI_TAG, matdocSend);
                        }
                    }
                })
                .name("gr/gi/sgi side out")
                .uid("gr/gi/sgi side out");
        matdocSendZttKeyedStream.print("matdocSendZttKeyedStream");
        //发送其他入库信息
        sendOutput.getSideOutput(PaperOutputTagUtil.GR_TAG)
                .addSink(new RichSinkFunction<MatdocSendVO>()  {
                    private OthInrecordAPI othInrecordAPI;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        //入库api
                        OthInrecordAPI oth = (OthInrecordAPI)applicationContext.getBean("othInrecordAPI");

                        othInrecordAPI = SpringContextHolder.getBean("othInrecordAPI");
                    }

                    @Override
                    public void invoke(MatdocSendVO value, Context context) throws Exception {
                        //TODO:这里将参数传入api接口推送
                        OthInSingleSaveVO othInSingleSaveVO = BuildVOUtil.buildOthInSingleSaveVO(value);
                        log.info("send : {}", JSON.toJSONString(othInSingleSaveVO));
                        Object r = othInrecordAPI.singleSave(othInSingleSaveVO);
                        log.info("gr http callback is : {}", JSON.toJSONString(r));
                    }
                })
                .name("gr send by http");

        //发送其他出库信息
        sendOutput.getSideOutput(PaperOutputTagUtil.GI_TAG)
                .addSink(new RichSinkFunction<MatdocSendVO>() {
                    private OthoutrecordAPI othoutrecordAPI;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        //出库api
                        othoutrecordAPI = SpringContextHolder.getBean("othoutrecordAPI");
                    }

                    @Override
                    public void invoke(MatdocSendVO value, Context context) throws Exception {
                        //TODO:这里将参数传入api接口推送
                        OthoutSingleSaveVO othoutSingleSaveVO = BuildVOUtil.buildOthoutSingleSaveVO(value);
                        log.info("send : {}", JSON.toJSONString(othoutSingleSaveVO));
                        Object othOutResponse = othoutrecordAPI.singleSave(othoutSingleSaveVO);
                        log.info("gi http callback is : {}", JSON.toJSONString(othOutResponse));
                    }
                }).name("gi send by http");
        //销售出库
        sendOutput.getSideOutput(PaperOutputTagUtil.SGI_TAG)
                .addSink(new RichSinkFunction<MatdocSendVO>() {
                    private SalesOutrecordAPI salesOutrecordAPI;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        //销售出库api
                        salesOutrecordAPI = SpringContextHolder.getBean("salesOutrecordAPI");
                    }

                    @Override
                    public void invoke(MatdocSendVO value, Context context) throws Exception {
                        //TODO:这里将参数传入api接口推送
                        SalesOutMergeSourceDataSaveVO salesOutMergeSourceDataSaveVO = BuildVOUtil.buildSalesOutMergeSourceDataSaveVO(value);
                        log.info("send : {}", JSON.toJSONString(salesOutMergeSourceDataSaveVO));
                        Object salesOutResponse = salesOutrecordAPI.mergeSourceDataSave(salesOutMergeSourceDataSaveVO);
                        log.info("sgi http callback is : {}", JSON.toJSONString(salesOutResponse));
                    }

                    @Override
                    public void close() throws Exception {

                    }
                })
                .name("sgi send by http");

        env.execute();


    }

    private static Properties getKafkaProperties(String bootstrap, String group) {
        //创建matdoc数据源
        Properties properties = new Properties();
        //封装kafka的连接地址
        properties.setProperty("bootstrap.servers", bootstrap);
        //指定消费者id
        properties.setProperty("group.id", group);
        //设置动态监测分区或者主题的变化
        properties.setProperty("flink.partition-discovery.interval-millis", "30000");

        //消息key的反序列化方式
        properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        //消息value的反序列化方式
        properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        //消息拉取策略-
        properties.setProperty("auto.offset.reset", "earliest");
        return properties;
    }

}
