package org.rrd.kafka.phoneix.run;

import com.alibaba.fastjson.JSON;
import com.alibaba.otter.canal.protocol.FlatMessage;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.rrd.kafka.phoneix.entity.TestToHbase1;
import org.rrd.kafka.phoneix.util.ConstantUtil;

import java.util.Map;
import java.util.Properties;

/**
 * @program: kafka-phoneix
 * @description: tosql
 * @author: sunteng
 * @create: 2020-03-19 11:02
 **/
@Slf4j
public class SqlServer {

    private static Properties properties;

    private static void setUp() {
        properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, ConstantUtil.BOOTSTRAP_SERVER_TEST);
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, ConstantUtil.GROUP_ID);
        properties.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, " ");
    }

    private static void flinkKafkaConsumer() throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        FlinkKafkaConsumer011 flinkKafkaConsumer011 = new FlinkKafkaConsumer011<>(ConstantUtil.FROM_TOPIC_TEST, new SimpleStringSchema(), properties);

        flinkKafkaConsumer011.assignTimestampsAndWatermarks(new AscendingTimestampExtractor() {
            @Override
            public long extractAscendingTimestamp(Object element) {
                return JSON.parseObject((String) element, FlatMessage.class).getEs();
            }
        });
        DataStream<String> stream = env.addSource(flinkKafkaConsumer011);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        if (stream != null) {

            try {

                SingleOutputStreamOperator<TestToHbase1> testToHbase1Stream = stream
                        .map(a -> ParseJson(a))
                        .name("ParseJson")
                        .filter(getFilter())
                        .name("FILTER-DDL")
                        .process(getProcessFunction())
                        .name("CONVERT-ENTITY");


                StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
                Table testToHbase1 = tEnv.fromDataStream(testToHbase1Stream);


                Table result = testToHbase1.select("*");

                Table result1 = testToHbase1.select("id,create_time");
                tEnv.toAppendStream(result, TestToHbase1.class)
                        .print("First")
                        .name("PRINT-SELECT-ALL");
                tEnv.toAppendStream(result1, TestToHbase1.class)
                        .print("Second")
                        .name("PRINT-SELECT-COLUMN");

            } catch (NullPointerException e) {
                log.warn("##message is null");
            }

            env.execute("testSql");
        }
    }

    private static ProcessFunction<FlatMessage, TestToHbase1> getProcessFunction() {
        return new ProcessFunction<FlatMessage, TestToHbase1>() {
            @Override
            public void processElement(FlatMessage flatMessage, Context ctx, Collector<TestToHbase1> out) throws Exception {
                Map<String, String> data = flatMessage.getData().get(0);

                out.collect(new ObjectMapper().convertValue(data, TestToHbase1.class));
            }
        };
    }

    private static FilterFunction<FlatMessage> getFilter() {
        return new FilterFunction<FlatMessage>() {
            @Override
            public boolean filter(FlatMessage value) throws Exception {
                return !value.getIsDdl();
            }
        };
    }

    private static FlatMessage ParseJson(String element) {

        return JSON.parseObject(element, FlatMessage.class);
    }

    public static void main(String[] args) throws Exception {

        setUp();
        flinkKafkaConsumer();

    }
}
