package org.rrd.kafka.phoneix.run;

import com.alibaba.fastjson.JSON;
import com.alibaba.otter.canal.protocol.FlatMessage;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.rrd.kafka.phoneix.db.PhoneixJdbc;
import org.rrd.kafka.phoneix.util.ConstantUtil;

import java.util.*;

/**
 * @program: dataStreming
 * @description:
 * @author: sunteng
 * @create: 2020-02-26 16:30
 **/

@Slf4j
public class Server {

    private static Properties properties;

    private static void setUp() {
        log.info("##setting properties.....");
        properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, ConstantUtil.BOOTSTRAP_SERVER_TEST);
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, ConstantUtil.GROUP_ID);
        properties.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, " ");
    }

    private static void flinkKafkaConsumer() throws Exception {
        log.info("##begin consumer kafka from topic {}", ConstantUtil.FROM_TOPIC_TEST);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        FlinkKafkaConsumer011 flinkKafkaConsumer011 = new FlinkKafkaConsumer011<>(ConstantUtil.FROM_TOPIC_TEST, new SimpleStringSchema(), properties);

        flinkKafkaConsumer011.assignTimestampsAndWatermarks(new AscendingTimestampExtractor() {
            @Override
            public long extractAscendingTimestamp(Object element) {
                return JSON.parseObject((String) element, FlatMessage.class).getEs();
            }
        });
        DataStream<String> stream = env.addSource(flinkKafkaConsumer011);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        if (stream != null) {

            try {

                stream
                        .map(a -> ParseJson(a))
                        .filter(new FilterFunction<FlatMessage>() {
                            @Override
                            public boolean filter(FlatMessage value) throws Exception {
                                log.info("##filter DDL of {}", value);
                                return !value.getIsDdl();
                            }
                        }).name("FILTER-DDL")
                        .addSink(new PhoneixJdbc())
                        .name("PhoenixJdbc");


            } catch (NullPointerException e) {
                log.warn("##message is null");
            }

            env.execute("testPhoenix");
            log.info("##end consumer kafka from topic {}.............", ConstantUtil.FROM_TOPIC_TEST);
        }
    }

    private static ProcessWindowFunction<FlatMessage, List<FlatMessage>, Object, TimeWindow> processFunction() {
        return new ProcessWindowFunction<FlatMessage, List<FlatMessage>, Object, TimeWindow>() {
            @Override
            public void process(Object o, Context context, Iterable<FlatMessage> elements, Collector<List<FlatMessage>> out) throws Exception {
                Iterator<FlatMessage> flatMessageIterator = elements.iterator();
                List<FlatMessage> flatMessages = new ArrayList<>();
                while (flatMessageIterator.hasNext()) {

                    flatMessages.add(flatMessageIterator.next());
                }

                if (flatMessages.size() > 0) {
                    log.info("##sort elements");
                    Collections.sort(flatMessages, new Comparator<FlatMessage>() {
                        @Override
                        public int compare(FlatMessage o1, FlatMessage o2) {
                            return o1.getEs() - o2.getEs() > 0 ? -1 : 1;
                        }
                    });
                }
                out.collect(flatMessages);
            }
        };
    }

    private static KeySelector<FlatMessage, Object> getKey() {
        return new KeySelector<FlatMessage, Object>() {
            @Override
            public Object getKey(FlatMessage value) throws Exception {
                log.info("## keyBy of {}", value);
                return value.getId();
            }
        };
    }

    private static FlatMessage ParseJson(String element) {

        log.info("##ParseJson element {}", element);
        return JSON.parseObject(element, FlatMessage.class);

    }

    public static void main(String[] args) throws Exception {

        setUp();
        flinkKafkaConsumer();

    }
}
