package demo.kafka;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import com.we.flink.deserialization.PoJoDeserializationSchema;
import demo.bean.TableLendAuditRecordNew;

import java.util.Objects;

/** 使用 POJO 数据结构,中间不用来回切换数据类型,提高任务的性能. */
public class FlinkStreamingKafkaDemo {
    private static final String TOPIC = "fdm_testuserinfo";
    private static final String BROKER_LIST =
            "10.10.15.243:9092,10.10.15.224:9092,10.10.15.222:9092";
    private static final String GROUP_ID = "test";

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);

        KafkaSource<TableLendAuditRecordNew> source =
                KafkaSource.<TableLendAuditRecordNew>builder()
                        .setProperty("auto.offset.reset", "latest")
                        // discover new partitions per 10 seconds
                        .setProperty("partition.discovery.interval.ms", "10000")
                        .setBootstrapServers(BROKER_LIST)
                        .setTopics(TOPIC)
                        .setGroupId(GROUP_ID)
                        .setStartingOffsets(OffsetsInitializer.latest())
                        .setDeserializer(
                                KafkaRecordDeserializationSchema.of(
                                        new PoJoDeserializationSchema(
                                                TableLendAuditRecordNew.class, false)))
                        .build();

        DataStreamSource<TableLendAuditRecordNew> kafka_source =
                env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");

        kafka_source
                .filter(Objects::nonNull)
                .keyBy((KeySelector<TableLendAuditRecordNew, Long>) value -> value.getUid())
                .flatMap(new MyFlatFunction())
                .uid("TableLendAuditRecordNew")
                .name("TableLendAuditRecordNew");

        env.execute(FlinkStreamingKafkaDemo.class.getSimpleName());
    }

    static class MyFlatFunction
            extends RichFlatMapFunction<TableLendAuditRecordNew, TableLendAuditRecordNew> {
        private transient ValueState<TableLendAuditRecordNew> lendAuditRecordVState;

        @Override
        public void open(Configuration parameters) throws Exception {
            lendAuditRecordVState =
                    getRuntimeContext()
                            .getState(
                                    new ValueStateDescriptor<>(
                                            "TableLendAuditRecordNew",
                                            TableLendAuditRecordNew.class));
        }

        @Override
        public void flatMap(TableLendAuditRecordNew value, Collector<TableLendAuditRecordNew> out)
                throws Exception {
            long uid = value.getUid();
            int last_loan_audit_status = value.getLast_loan_audit_status();
            int updated_at = value.getUpdated_at();

            TableLendAuditRecordNew tmpState = lendAuditRecordVState.value();
            TableLendAuditRecordNew curState =
                    tmpState == null ? new TableLendAuditRecordNew(uid, -1, -1, -1) : tmpState;
            if (curState.getLast_loan_audit_time() < updated_at) {
                curState.setLast_loan_audit_time(updated_at);
                curState.setLast_loan_audit_status(last_loan_audit_status);
                value.setLast_loan_audit_status(
                        (last_loan_audit_status == -1 ? null : last_loan_audit_status));
                value.setLast_loan_audit_time((updated_at == -1 ? null : updated_at));
            }
            /** 0:拒绝 1:通过 不考虑借款状态发生改变的情况 */
            if (last_loan_audit_status == 0 && (curState.getUpdated_at() < updated_at)) {
                curState.setUpdated_at(updated_at);
                value.setLast_loan_audit_time((updated_at == -1 ? null : updated_at));
            }
            out.collect(value);
            lendAuditRecordVState.update(curState);
        }
    }
}
