package cn.itcast.task;

import cn.itcast.bean.CleanBean;
import cn.itcast.bean.StockBean;
import cn.itcast.config.QuotConfig;
import cn.itcast.function.KeyFunction;
import cn.itcast.function.MinStockWindowFunction;
import cn.itcast.inter.ProcessDataInterface;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.Properties;

public class StockMinTask implements ProcessDataInterface {
    @Override
    public void process(DataStream<CleanBean> waterData) {
        /**
         * 数据存储在druid
         *  1.插入druid数据源是json格式，
         *     （1）需要把最终的数据转换成json
         *     （2）开启摄取进程
         *     （3）需要提前创建好topic，在同步到druid
         *  2.时间窗口60s
         *  3.新建kafka生产者对象
         *  4.侧边流：对主数据流进行拆分，根据source进行拆分，分表存储
         *  5.计算分时成交数据，(量、金额）使用 mapState
         */
        /**
         * 开发步骤
         *  1.定义侧边流
         *  2.数据分组
         *  3.划分时间窗口
         *  4.分时数据处理（新建分时窗口函数）
         *  5.数据分流
         *  6.数据分流转换
         *  7.分表存储(写入kafka)
         */

        //todo 1.定义侧边流
        //封装深市转换之后的数据
        OutputTag<StockBean> szseOpt = new OutputTag<>("szseOpt", TypeInformation.of(StockBean.class));
        //todo 2.数据分组
        SingleOutputStreamOperator<StockBean> processData = waterData.keyBy(new KeyFunction())
                //todo 3.划分时间窗口
                .timeWindow(Time.seconds(60))
                //todo 4.分时数据处理（新建分时窗口函数）
                .apply(new MinStockWindowFunction()) //获取分时行情数据
                //todo 5.数据分流
                .process(new ProcessFunction<StockBean, StockBean>() {
                    @Override
                    public void processElement(StockBean stockBean, ProcessFunction<StockBean, StockBean>.Context context, Collector<StockBean> collector) throws Exception {
                        if (stockBean.getSource().equals(QuotConfig.config.getProperty("sse.topic"))) {
                            collector.collect(stockBean);
                        } else {
                            context.output(szseOpt, stockBean);//通过上下文对象，将数据封装到侧边流
                        }
                    }
                });

        //todo 6.数据分流转换
        //插入druid数据源时json格式，需要把最终的数据转换成json字符串
        //沪市分时行情
        SingleOutputStreamOperator<String> sseStr = processData.map(new MapFunction<StockBean, String>() {
            @Override
            public String map(StockBean stockBean) throws Exception {
                return JSON.toJSONString(stockBean);
            }
        });
        //深市分时行情
        SingleOutputStreamOperator<String> szseStr = processData.getSideOutput(szseOpt).map(new MapFunction<StockBean, String>() {
            @Override
            public String map(StockBean stockBean) throws Exception {
                return JSON.toJSONString(stockBean);
            }
        });
        //todo 7.分表存储(写入kafka)
        Properties prop = new Properties();
        prop.setProperty("bootstrap.servers",QuotConfig.config.getProperty("bootstrap.servers"));
        FlinkKafkaProducer011<String> sseKafkaPro = new FlinkKafkaProducer011<>(
                QuotConfig.config.getProperty("sse.stock.topic"),
                new SimpleStringSchema(), prop);
        FlinkKafkaProducer011<String> szseKafkaPro = new FlinkKafkaProducer011<>(
                QuotConfig.config.getProperty("szse.stock.topic"),
                new SimpleStringSchema(), prop);

        sseStr.addSink(sseKafkaPro);
        szseStr.addSink(szseKafkaPro);
    }
}
