package com.wudl.flink.hbase;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.gson.JsonObject;
import com.wudl.flink.hbase.model.User;
import com.wudl.flink.hbase.slink.HbaseSink;
import com.wudl.flink.hbase.utils.KafkaUtils;
import io.vertx.core.json.Json;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.concurrent.TimeUnit;

/**
 * @author ：wudl
 * @date ：Created in 2021-12-08 22:18
 * @description：
 * @modified By：
 * @version: 1.0
 */

public class Application {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        String sourceTopic = "wudltopic";
        String groupId = "2019";
        DataStreamSource<String> kafkaDs = env.addSource(KafkaUtils.getKafkaConsumer(sourceTopic, groupId));

        DataStream<User> userStream = kafkaDs.map(new MapFunction<String, User>() {
            @Override
            public User map(String s) throws Exception {
                User user = JSON.parseObject(s, User.class);
                return user;
            }
        });
        userStream.addSink(new HbaseSink());

        env.setRestartStrategy(RestartStrategies.failureRateRestart(
                //最大失败次数
                3,
                // 衡量失败次数的是时间段
                Time.of(2, TimeUnit.SECONDS),
                // 间隔
                Time.of(5, TimeUnit.SECONDS)
        ));
        env.execute();


    }

}
