package com.innodealing.process;

import com.innodealing.model.User;
import com.innodealing.model.UserKafkaMessage;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class UserNameCountFunction extends CoProcessFunction<User, UserKafkaMessage, Tuple2<String, Long>> {
    private static final Logger logger = LoggerFactory.getLogger(UserNameCountFunction.class);

    private transient MapState<String, Long> usernameCountState;

    @Override
    public void open(Configuration parameters) {
        MapStateDescriptor<String, Long> descriptor = new MapStateDescriptor<>(
                "usernameCountState",
                String.class,
                Long.class
        );
        usernameCountState = getRuntimeContext().getMapState(descriptor);
    }

    @Override
    public void processElement1(User user, Context ctx, Collector<Tuple2<String, Long>> out) throws Exception {
        updateCount(user.getUsername(), out, "DB Snapshot");
    }

    @Override
    public void processElement2(UserKafkaMessage kafkaMessage, Context ctx, Collector<Tuple2<String, Long>> out) throws Exception {
        updateCount(kafkaMessage.getUsername(), out, "Kafka Stream");
    }

    private void updateCount(String username, Collector<Tuple2<String, Long>> out, String source) throws Exception {
        if (username != null) {
            Long count = usernameCountState.get(username);
            if (count == null) {
                count = 0L;
            }
            count++;
            usernameCountState.put(username, count);
            logger.info("【{}】username: {}, count: {}", source, username, count);

            // 输出给下游汇总
            out.collect(Tuple2.of(username, count));
        }
    }
}
