import com.yuki.api.common.GsonUtil;
import com.yuki.api.model.UmsLog;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

/**
 * @description:
 * @author: Yuki Tadayoshi
 * @time: 2021/4/2 9:45
 */
public class UVReceiver {
    public static void main(String[] args) throws Exception {
        String topic = "malluv";
        String host = "master";
        int port = 9092;
        int databaseId = 6;

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(TimeUnit.MINUTES.toMillis(60 * 1000));
        env.setParallelism(5);

        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        //验证最后的数据处理结果和数据摄入时，没有数据的丢失与重复，立即验证
        checkpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //一旦Flink处理程序被Cancel，后会保留checkpoint数据
        checkpointConfig.enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, host + ":" + port);
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "app-uv-start");

        FlinkKafkaConsumerBase<String> kafkaConsumer = new FlinkKafkaConsumer<String>(topic, new SimpleStringSchema(), properties);
        FlinkJedisPoolConfig config = new FlinkJedisPoolConfig
                .Builder()
                .setDatabase(databaseId)
                .setHost(host).build();
        env.addSource(kafkaConsumer).map(string -> {
            System.out.println("【" + string + "】");
            StringBuffer sb = new StringBuffer(1000);
            try {
                List<String> list = Arrays.asList(string.split(","));
                sb.append("{");
                Field[] fs = UmsLog.class.getDeclaredFields();
                for (int i = 0; i < list.size(); i++) {
                    sb.append(fs[i].getName() + ":" + list.get(i) + ",");
                }
                sb.deleteCharAt(sb.lastIndexOf(","));
            } catch (Exception e) {
                e.printStackTrace();
            }
            UmsLog u = GsonUtil.fromJson(sb.toString(), UmsLog.class);
            return u;
        }).keyBy("update_time", "log_type").map(new RichMapFunction<UmsLog, Tuple2<String,Long>>() {
            private MapState<String, Boolean> userIdState;
            private ValueState<Long> uvState;

            @Override
            public Tuple2<String, Long> map(UmsLog umsLog) throws Exception {
                if (uvState.value() == null) {
                    uvState.update(0L);
                }
                if (userIdState.contains(umsLog.getUserid().toString())) {
                    userIdState.put(umsLog.getUserid().toString(), null);
                    uvState.update(uvState.value() + 1);
                }
                String redisKey = umsLog.getUpdateTime() + "_" + umsLog.getLogType();
                System.out.println(redisKey + ">>>>>>>>>>>>" + uvState.value());
                return Tuple2.of(redisKey, uvState.value());
            }

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                userIdState = getRuntimeContext().getMapState(new MapStateDescriptor<String, Boolean>(
                        "userIdState",
                        TypeInformation.of(new TypeHint<String>() {
                        }),
                        TypeInformation.of(new TypeHint<Boolean>() {
                        })
                ));
                uvState = getRuntimeContext().getState(new ValueStateDescriptor<Long>("uvState",
                        TypeInformation.of(new TypeHint<Long>() {
                        })
                ));
            }
        }).addSink(new RedisSink<>(config,new RedisSetSinkMapper()));
        env.execute();
    }

    public static class RedisSetSinkMapper implements RedisMapper<Tuple2<String, Long>> {

        @Override
        public RedisCommandDescription getCommandDescription() {
            return new RedisCommandDescription(RedisCommand.SET);
        }

        @Override
        public String getKeyFromData(Tuple2<String, Long> data) {
            return data.f0;
        }

        @Override
        public String getValueFromData(Tuple2<String, Long> data) {
            return data.f1.toString();
        }
    }
}
