package com.flink;

import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import com.flink.sink.MySqlToPojoSink;
import com.flink.source.Data;

import java.util.Properties;
import java.util.concurrent.TimeUnit;


public class KafkaToMysql {

    public static final String BROKER_LIST = "localhost:9092";
    //kafka的topic
    public static final String TOPIC_WEB = "web";
    //kafka序列化的方式，采用字符串的形式
    public static final String KEY_SERIALIZER = "org.apache.kafka.common.serialization.StringSerializer";
    //value的序列化方式
    public static final String VALUE_SERIALIZER = "org.apache.kafka.common.serialization.StringSerializer";

    public static void transformFromKafka() throws Exception {
        // 构建流执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //kafka 配置
        Properties prop = new Properties();
        prop.put("bootstrap.servers", BROKER_LIST);
        prop.put("zookeeper.connect", "127.0.0.1:2181");
        prop.put("group.id", TOPIC_WEB);
        prop.put("key.serializer", KEY_SERIALIZER);
        prop.put("value.serializer", VALUE_SERIALIZER);
        prop.put("auto.offset.reset", "earliest");

        SingleOutputStreamOperator<Data> webStream = env.addSource(new FlinkKafkaConsumer<>(
                        TOPIC_WEB,
                        new SimpleStringSchema(),
                        prop
                )).setParallelism(1)
                .map(string-> JSONObject.parseObject(string,Data.class));

        webStream.addSink(new MySqlToPojoSink());
        env.execute();
    }

    public static void main(String[] args) throws Exception {
        transformFromKafka();
    }
}
