import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.typeutils.MapTypeInfo;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import utils.ImpalaUtil;

import javax.sql.DataSource;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;

/**
 * @ClassName RichSourceTest
 * @Deacription TODO
 * @Author wushumin
 * @Date 2021-06-22 8:54
 * @Version 1.0
 **/
public class RichSourceTest {

    public static final String INDICATOR_CONFIG_STATE = "IndicatorConfigState";
    public static MapStateDescriptor<String, Map<Long,Indicator>> stateDescriptor = new MapStateDescriptor<>(INDICATOR_CONFIG_STATE,
            BasicTypeInfo.STRING_TYPE_INFO, new MapTypeInfo<>(Long.class,Indicator.class));


    public static void main(String[] args) throws InterruptedException {
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        //step1: indicator stream for braodCast
                BroadcastStream<Map<Long, Indicator>> indicatorConfigBroadcastStream = environment
                .addSource(new MyTableSource())
                .broadcast(stateDescriptor);

        //step2: business datastream
//        List<WordCount.Person> data = new ArrayList<>();
//        data.add( new WordCount.Person("Fred", 9));
//        data.add(new WordCount.Person("lisi", 8));
//        data.add(new WordCount.Person("lisi2", 8));
//        data.add(new WordCount.Person("wangwu", 15));
//        data.add(new WordCount.Person("wangwu2", 15));
//        data.add(new WordCount.Person("wangwu3", 15));
//        for (int i = 0; i < 10000; i++) {
//            data.add(new WordCount.Person("Test2", RandomUtils.nextInt(0,19)));
//            Thread.sleep(6000);
//        }
//        DataStreamSource<WordCount.Person> dataStream = environment.fromCollection(data);
        //kafka source stream
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "cdh1.lcbint.cn:9092,cdh2.lcbint.cn:9092,cdh3.lcbint.cn:9092");
        properties.setProperty("group.id", "testflink_dev");
        FlinkKafkaConsumer011 kafkaConsumer011 = new FlinkKafkaConsumer011<>("testtopic",new SimpleStringSchema(),properties);
        DataStreamSource kafkaDataStream = environment.addSource(kafkaConsumer011);
        //map to Person pojo
        DataStream<WordCount.Person> personDataStream = kafkaDataStream.map(item -> {
            WordCount.Person person = JSON.parseObject((String) item, WordCount.Person.class);
            return person;
        });
        personDataStream.keyBy(item->item.getAge())
                .connect(indicatorConfigBroadcastStream)
                .process(new MyKeydBroadCastProcessFunction())
                //由于java泛型中将具体类型擦除，所以此处需要返回真实的类型给dataStream
                .returns(String.class)
                .print();


        try {
            environment.execute("double stream cals");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public static class MyTableSource extends RichSourceFunction<Map<Long,Indicator>>{
        private Logger log =LoggerFactory.getLogger(MyTableSource.class);
        private volatile boolean isRunning = true;
        @Override
        public void run(SourceContext<Map<Long, Indicator>> ctx) throws Exception {
            DataSource dataSource = ImpalaUtil.getImpalaDataSource();
//            while(isRunning) {
                List<Map<String, Object>> datas = ImpalaUtil.executeQuery(dataSource, "select * from information_schema.schema_indicator_config_info order by indicator_code",null);
                if (CollectionUtils.isEmpty(datas)) {
                    log.error("======>there is not indicator data");
                }
            Map<Long, Indicator> dataMap = datas.stream()
                    .filter(item -> item.get("dimension_name") != null && item.get("indicator_table") != null)
                        .map(item -> new Indicator(Long.valueOf(item.get("id").toString()),
                                item.get("indicator_code").toString(), item.get("dimension_code").toString(),
                                item.get("dimension_name").toString(), item.get("indicator_table").toString(),
                                item.get("indicator_biz_table").toString())
                        ).collect(Collectors.toMap(item -> item.getId(), item -> item));
                log.info("======>query indicator data:{}", JSON.toJSONString(dataMap));
                ctx.collect(dataMap);
//                Thread.sleep(60*1000);
//            }
        }

        @Override
        public void cancel() {
            isRunning = false;
        }
    }

    public static class Indicator implements Serializable {
        private Long id;
        private String indicatorCode;
        private String dimensionCode;
        private String dimenstions;
        private String indicatorTable;
        private String indicatorBizTable;

        public Long getId() {
            return id;
        }

        public String getIndicatorCode() {
            return indicatorCode;
        }

        public String getDimensionCode() {
            return dimensionCode;
        }

        public String getDimenstions() {
            return dimenstions;
        }

        public String getIndicatorTable() {
            return indicatorTable;
        }

        public String getIndicatorBizTable() {
            return indicatorBizTable;
        }

        public Indicator() {
        }

        public Indicator(Long id, String indicatorCode, String dimensionCode, String dimenstions, String indicatorTable, String indicatorBizTable) {
            this.id = id;
            this.indicatorCode = indicatorCode;
            this.dimensionCode = dimensionCode;
            this.dimenstions = dimenstions;
            this.indicatorTable = indicatorTable;
            this.indicatorBizTable = indicatorBizTable;
        }


        @Override
        public String toString() {
            return "Indicator{" +
                    "id=" + id +
                    ", indicatorCode='" + indicatorCode + '\'' +
                    ", dimensionCode='" + dimensionCode + '\'' +
                    ", dimenstions='" + dimenstions + '\'' +
                    ", indicatorTable='" + indicatorTable + '\'' +
                    ", indicatorBizTable='" + indicatorBizTable + '\'' +
                    '}';
        }
    }

    /**
     * 广播变量处理函数
     */
    @Slf4j
    public static class MyKeydBroadCastProcessFunction extends KeyedBroadcastProcessFunction<String, WordCount.Person,Map<Long,Indicator>,String> {
//        MapStateDescriptor<String, Map<Long,Indicator>> indicatorConfigState = new MapStateDescriptor<>(INDICATOR_CONFIG_STATE,
//                BasicTypeInfo.STRING_TYPE_INFO, new MapTypeInfo<>(Long.class,Indicator.class));

        @Override
        public void processElement(WordCount.Person value, ReadOnlyContext ctx, Collector<String> out) throws Exception {
            ReadOnlyBroadcastState<String, Map<Long, Indicator>> broadcastState = ctx.getBroadcastState(stateDescriptor);
            Indicator indicator = null;
            if(broadcastState.contains(INDICATOR_CONFIG_STATE)) {
                Map<Long, Indicator> indicatorConfigState = broadcastState.get(INDICATOR_CONFIG_STATE);
                indicator = indicatorConfigState.get(Long.valueOf(String.valueOf(value.getAge())));
            }

            log.info("======>process person data:{}",JSON.toJSONString(value));
            if(indicator!=null){
                out.collect(value.getName()+":" + indicator.dimensionCode+":"+indicator.indicatorCode);
            }else{
                log.error("======>there is not indicator for the person:{}", JSON.toJSONString(value));
            }
        }

        @Override
        public void processBroadcastElement(Map<Long,Indicator> values, Context ctx, Collector<String> out) throws Exception {
            BroadcastState<String, Map<Long, Indicator>> broadcastState = ctx.getBroadcastState(stateDescriptor);
            log.info("======>process broadcast data:{}",JSON.toJSONString(values));
            broadcastState.put(INDICATOR_CONFIG_STATE,values);
            }
        }
}
