package org.myfram.flink.flinkonjar.application.job;

import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.connector.hbase.sink.HBaseSinkFunction;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.myfram.flink.flinkonjar.common.config.KafkaConfig;
import org.myfram.flink.flinkonjar.common.connector.hbase.sink.HBaseSinkBuilder;
import org.myfram.flink.flinkonjar.common.connector.hbase.source.HBaseSourceBuilder;
import org.myfram.flink.flinkonjar.common.connector.kafka.source.KafkaSourceBuilder;
import org.myfram.flink.flinkonjar.common.properties.HBaseProperties;
import org.myfram.flink.flinkonjar.common.properties.KafkaProperties;
import org.myfram.flink.flinkonjar.common.util.YamlUtils;
import org.myfram.flink.flinkonjar.domain.entity.KafkaJoinHBase.*;
import org.myfram.flink.flinkonjar.domain.process.kafkaJoinHbase.BroadcastJoinFunction;
import org.myfram.flink.flinkonjar.domain.process.kafkaJoinHbase.KafkaUserFieldProcessFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Properties;

public class KafkaJoinHbaseJob extends BaseFlinkJob {

    private static final Logger log = LoggerFactory.getLogger(KafkaJoinHbaseJob.class.getName());

    public static final MapStateDescriptor<String, HBaseDimensionTable> DIMENSION_STATE_DESCRIPTOR =
            new MapStateDescriptor<>(
                    "dimensionBroadcastState",
                    String.class,
                    HBaseDimensionTable.class
            );

    @Override
    protected void process(StreamExecutionEnvironment env) throws Exception {
        // 获取配置
        KafkaProperties kafkaProperties = YamlUtils.getObject("flink.kafka", KafkaProperties.class);
        HBaseProperties hBaseProperties = YamlUtils.getObject("flink.hbase", HBaseProperties.class);
        Properties sourceProps = KafkaConfig.getSourceProps(kafkaProperties.getSource());

        // 构建Kafka Source
        DataStreamSource<String> kafkaSource = KafkaSourceBuilder.build(env, kafkaProperties.getSource().getTopic(), sourceProps);

        // 处理Kafka数据流
        KeyedStream<KafkaSourceUserEvent, String> kafkaStream = kafkaSource
                .map(new KafkaUserFieldProcessFunction())
                .map(u -> {
                    log.info("Kafka数据：{}", u.toString());
                    return u;
                })
                .keyBy(KafkaSourceUserEvent::getId);// 按 id 分区
//        SingleOutputStreamOperator<KafkaTargetUserEvent> kafkaStream = kafkaSource
//                .map(new UserFieldProcessFunction())
//                .map(u -> {
//                    log.info("Kafka数据：{}", u.toString());
//                    return u;
//                });

        // 构建HBase维表Source
        DataStreamSource<HBaseDimensionTable> hbaseDimensionSource = env.addSource(
                new HBaseSourceBuilder<HBaseDimensionTable>(hBaseProperties)
                        .setTableName("test_flink_jar")
                        .addColumnFamily("info")
                        .addColumn("info", "name")
                        .setCacheSize(500)
                        .setHbaseRowConvert(new HBaseDimensionRowConvert())
                        .build(),
                "hbaseDimensionSource",
                TypeInformation.of(HBaseDimensionTable.class)
        );
        BroadcastStream<HBaseDimensionTable> dimensionBroadcastStream = hbaseDimensionSource.broadcast(DIMENSION_STATE_DESCRIPTOR);

        // 维表关联处理
        SingleOutputStreamOperator<KafkaHBaseJoinedResult> joinedStream = kafkaStream
                .connect(dimensionBroadcastStream)
                .process(new BroadcastJoinFunction());
//        SingleOutputStreamOperator<JoinedResult> joinedStream = kafkaStream
//                .connect(hbaseDimensionSource)
//                .process(new DimensionJoinFunction());
        joinedStream.print();
        // 构建HBase Sink
        HBaseSinkFunction<String> hbaseSink = new HBaseSinkBuilder<String>(new HBaseSinkTable(), hBaseProperties).build();

        // 将关联结果序列化并写入HBase
//        joinedStream.map(new MapFunction<JoinedResult, String>() {
//            @Override
//            public String map(JoinedResult result) throws Exception {
//                return "{\"id\":\"" + result.getId() + "\",\"name\":\"" + result.getName() + "\"}";
//            }
//        }).addSink(hbaseSink);
    }




}