package com.yc.bigdata.flink.demo;

import com.datastax.driver.mapping.annotations.Column;
import com.datastax.driver.mapping.annotations.Table;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;

/**
 * <p></p>
 *
 * drop table excelsior.t;
 * create table excelsior.t(
 * 	pk int,
 * 	t int,
 * 	s varchar,
 * 	v varchar,
 * 	primary key(pk, t)
 * );
 *
 * @author: YuanChilde
 * @date: 2020-02-10 9:33
 * @version: 1.0
 * Modification History:
 * Date    Author      Version     Description
 * -----------------------------------------------------------------
 * 2020-02-10 9:33    YuanChilde     1.0        新增
 */
public class DataStreamToCassandra {

    public static void main(String[] args) throws Exception {

        String[] args1 = {"--input-topic",""};
        final ParameterTool params = ParameterTool.fromArgs(args1);
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        if (!params.has("bootstrap.servers")) {
            System.out.println("Use --input-topic to specify file input.");
            System.out.println("Use --bootstrap.servers to specify file input.");
            System.out.println("Use --bootstrap.servers to specify file input.");
        }

        params.getProperties().put("group.id", "flink-group");
        env.getConfig().setGlobalJobParameters(params);
        // 设置检查点
        env.enableCheckpointing(5000);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        FlinkKafkaConsumer consumer = new FlinkKafkaConsumer<>(params.get("input-topic"), new SimpleStringSchema(), params.getProperties());
        consumer.assignTimestampsAndWatermarks(new MessageWaterEmitter());

/*        DataStream<String> stream = env
                .addSource(consumer)
                .flatMap(new MessageSplitter())
                .keyBy()
                .timeWindow(Time.seconds(1));
        stream.print();*/

/*        CassandraSink.addSink(stream)
                .setClusterBuilder(new ClusterBuilder() {
                    @Override
                    protected Cluster buildCluster(Cluster.Builder builder) {
                        return builder.addContactPoint("127.0.0.1").withCredentials("username", "password").build();
                    }
                })
                .setQuery("INSERT INTO dm.user(id, name) values (?, ?);")
               *//* .setMapperOptions(() -> new Mapper.Option[]{Mapper.Option.saveNullFields(true)})*//*
                .build();*/



    }
    @AllArgsConstructor @NoArgsConstructor
    @Table(keyspace = "dm", name = "user")
    static class Test {

        @Column(name = "id")
        private Long id;

        @Column(name = "name")
        private String name;

        public Long getId() {
            return id;
        }

        public void setId(Long id) {
            this.id = id;
        }

        public String getName() {
            return name;
        }

        public void setName(String name) {
            this.name = name;
        }
    }
}
