package demo.table;

import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
import org.apache.flink.streaming.util.serialization.KeyedSerializationSchema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

import java.util.Properties;

public class FlinkTableApiDemo {
    public static final String KFKSERVER = "10.10.15.243:9092,10.10.15.224:9092,10.10.15.222:9092";
    public static final String KFKPRODTPC = "just_test_jk";
    public static final int KFKTPCPARTITIONS = 6;

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment envStream = StreamExecutionEnvironment.getExecutionEnvironment();
        CheckpointConfig conf = envStream.getCheckpointConfig();
        conf.enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        StreamTableEnvironment envTable = StreamTableEnvironment.create(envStream);

        String name = "cdh6_hive";
        String defaultDb = "default";
        String hiveConfDir = "/opt/cloudera/parcels/FLINK/lib/flink/conf";

        HiveCatalog hiveCatalog = new HiveCatalog(name, defaultDb, hiveConfDir);
        envTable.registerCatalog("hive", hiveCatalog);
        envTable.useCatalog("hive");

        Properties sinkProp = new Properties();
        sinkProp.setProperty("bootstrap.servers", KFKSERVER);

        //        Table tableResult = envTable.sqlQuery("select id, name, age, sex from
        // fdm.testuserinfo");
        //        Table tableResult = envTable.sqlQuery("select uid, type, val from
        // fdm.hhl01_risk_tag");
        //        DataStream<Tuple2<Boolean, Row>> tableStream =
        // envTable.toRetractStream(tableResult, Row.class);
        Table tableResult = envTable.sqlQuery(TableSaveMoneyCardMemberSpecialPriceDemo.QUERYSQL);
        DataStream<Tuple2<Boolean, TableSaveMoneyCardMemberSpecialPriceDemo>> input =
                envTable.toRetractStream(
                        tableResult, TableSaveMoneyCardMemberSpecialPriceDemo.class);

        SingleOutputStreamOperator<TableSaveMoneyCardMemberSpecialPriceDemo> out =
                input.filter(
                                new FilterFunction<
                                        Tuple2<
                                                Boolean,
                                                TableSaveMoneyCardMemberSpecialPriceDemo>>() {
                                    @Override
                                    public boolean filter(
                                            Tuple2<
                                                            Boolean,
                                                            TableSaveMoneyCardMemberSpecialPriceDemo>
                                                    value)
                                            throws Exception {
                                        return value.f0;
                                    }
                                })
                        .map(
                                new MapFunction<
                                        Tuple2<Boolean, TableSaveMoneyCardMemberSpecialPriceDemo>,
                                        TableSaveMoneyCardMemberSpecialPriceDemo>() {
                                    @Override
                                    public TableSaveMoneyCardMemberSpecialPriceDemo map(
                                            Tuple2<
                                                            Boolean,
                                                            TableSaveMoneyCardMemberSpecialPriceDemo>
                                                    value)
                                            throws Exception {
                                        return value.f1;
                                    }
                                });

        FlinkKafkaProducer<TableSaveMoneyCardMemberSpecialPriceDemo>
                userLabelAllFlinkKafkaProducer =
                        new FlinkKafkaProducer<TableSaveMoneyCardMemberSpecialPriceDemo>(
                                KFKPRODTPC,
                                new WeKafkaKeyedSerializationSchema(),
                                sinkProp,
                                java.util.Optional.of(new WeKafkaCustomPartitioner()));

        out.addSink(userLabelAllFlinkKafkaProducer).setParallelism(KFKTPCPARTITIONS);

        envStream.execute("Flink-Table-Api-Test");
    }

    private static class WeKafkaKeyedSerializationSchema
            implements KeyedSerializationSchema<TableSaveMoneyCardMemberSpecialPriceDemo> {
        @Override
        public byte[] serializeKey(TableSaveMoneyCardMemberSpecialPriceDemo element) {
            return String.valueOf(element.getUid()).getBytes();
        }

        @Override
        public byte[] serializeValue(TableSaveMoneyCardMemberSpecialPriceDemo element) {
            return element.toString().getBytes();
        }

        @Override
        public String getTargetTopic(TableSaveMoneyCardMemberSpecialPriceDemo element) {
            return null;
        }
    }

    private static class WeKafkaCustomPartitioner
            extends FlinkKafkaPartitioner<TableSaveMoneyCardMemberSpecialPriceDemo> {

        @Override
        public int partition(
                TableSaveMoneyCardMemberSpecialPriceDemo record,
                byte[] key,
                byte[] value,
                String targetTopic,
                int[] partitions) {
            int partition = Math.abs(new String(key).hashCode() % partitions.length);
            //            System.out.println("WeKafkaCustomPartitioner partitions: " +
            // partitions.length + " partition: " + partition + " key: " + new String(key));
            return partition;
        }
    }
}
