package org.example.portrait.module;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.shaded.guava30.com.google.common.collect.Maps;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.example.portrait.module.model.UserBehavior;
import org.example.portrait.module.model.UserProfile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Map;
import java.util.Properties;

/**
 * Flink实时计算用户画像主类
 */
public class UserProfileCalculator {
    // 初始化日志记录器
    private static final Logger logger = LoggerFactory.getLogger(UserProfileCalculator.class);

    // Kafka主题 用户行为、用户画像
    private static final String BEHAVIOR_TOPIC = "user-behavior-topic";
    private static final String PROFILE_TOPIC = "user-profile-topic";

    // Kafka服务器地址
    private static final String BOOTSTRAP_SERVERS = "124.222.42.79:9092";

    // 用户行为消费者
    private static final String BEHAVIOR_CONSUMER = "user-profile-group";

    public static void main(String[] args) throws Exception {
        logger.info("用户画像分析程序启动");

        // 1. 创建执行环境
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 2. 配置Kafka消费者属性
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", BOOTSTRAP_SERVERS);
        properties.setProperty("group.id", BEHAVIOR_CONSUMER);
        properties.setProperty("auto.offset.reset", "latest");

        // 3. 创建Kafka消费者并接入数据流
        DataStream<String> kafkaDataStream = env.addSource(new FlinkKafkaConsumer<>(BEHAVIOR_TOPIC, new SimpleStringSchema(), properties));

        // 4. 数据转换：使用日志记录原始JSON和解析后的对象
        DataStream<UserBehavior> userBehaviorStream = kafkaDataStream
                // 记录原始JSON数据到日志
                .map(jsonString -> {
                    logger.info("原始JSON: {}", jsonString);
                    return jsonString;
                })
                .map(jsonString -> {
                    try {
                        UserBehavior behavior = JSON.parseObject(jsonString, UserBehavior.class);
                        logger.info("解析后的对象: {}", behavior);
                        return behavior;
                    } catch (Exception e) {
                        logger.error("JSON解析失败: {}，错误: {}", jsonString, e.getMessage(), e);
                        throw e;
                    }
                })
                .name("ParseUserBehavior")
                .uid("parse-user-behavior");

        // 5. 计算用户画像：记录聚合结果
        DataStream<UserProfile> userProfileStream = userBehaviorStream
                .keyBy(UserBehavior::getUserId)
                .window(TumblingProcessingTimeWindows.of(Time.seconds(10)))
                .aggregate(new UserProfileAggregateFunction())
                .map(profile -> {
                    logger.info("计算得到的用户画像: {}", profile);
                    return profile;
                })
                .name("CalculateUserProfile")
                .uid("calculate-user-profile");

        // 6. 输出结果：可以输出到Kafka、数据库或其他存储
        userProfileStream
                .map(profile -> JSON.toJSONString(profile))
                .addSink(new FlinkKafkaProducer<>(
                        PROFILE_TOPIC,
                        new SimpleStringSchema(),
                        properties
                ))
                .name("WriteToKafka")
                .uid("write-to-kafka");

        // 7. 执行作业
        env.execute("Real-time User Profile Calculation");
    }

    /**
     * 用户画像聚合函数
     */
    public static class UserProfileAggregateFunction implements AggregateFunction<
            UserBehavior,                // 输入类型
            UserProfile,                 // 累加器类型
            UserProfile                  // 输出类型
            > {
        private static final Logger aggregateLogger = LoggerFactory.getLogger(UserProfileAggregateFunction.class);

        @Override
        public UserProfile createAccumulator() {
            return new UserProfile();
        }

        @Override
        public UserProfile add(UserBehavior behavior, UserProfile accumulator) {
            try {
                // 初始化累加器（第一次处理该用户时）
                if (accumulator.getUserId() == null) {
                    accumulator = new UserProfile(behavior.getUserId());
                }

                // 更新最后活跃时间
                accumulator.setLastActiveTime(behavior.getTimestamp());

                // 根据行为类型更新相应指标
                switch (behavior.getBehaviorType()) {
                    case "click":
                        accumulator.setTotalClicks(accumulator.getTotalClicks() + 1);
                        aggregateLogger.trace("用户[{}]点击行为增加，当前总点击数: {}", accumulator.getUserId(), accumulator.getTotalClicks());
                        break;
                    case "purchase":
                        accumulator.setTotalPurchases(accumulator.getTotalPurchases() + 1);
                        aggregateLogger.trace("用户[{}]购买行为增加，当前总购买数: {}", accumulator.getUserId(), accumulator.getTotalPurchases());
                        break;
                    case "share":
                        accumulator.setTotalShare(accumulator.getTotalShare() + 1);
                        aggregateLogger.trace("用户[{}]分享行为增加，当前总分享数: {}", accumulator.getUserId(), accumulator.getTotalPurchases());
                        break;
                    default:
                        aggregateLogger.warn("未知的行为类型: {}", behavior.getBehaviorType());
                }

                // 更新分类偏好
                String category = behavior.getCategory();
                accumulator.getCategoryPreferences().put(category, accumulator.getCategoryPreferences().getOrDefault(category, 0L) + 1);
                return accumulator;
            } catch (Exception e) {
                aggregateLogger.error("处理用户行为时出错", e);
                throw e;
            }
        }

        @Override
        public UserProfile getResult(UserProfile accumulator) {
            return accumulator;
        }

        @Override
        public UserProfile merge(UserProfile a, UserProfile b) {
            aggregateLogger.debug("合并用户[{}]和[{}]的画像数据", a.getUserId(), b.getUserId());
            UserProfile merged = new UserProfile(a.getUserId());

            // 合并点击数和购买数
            merged.setTotalClicks(a.getTotalClicks() + b.getTotalClicks());
            merged.setTotalPurchases(a.getTotalPurchases() + b.getTotalPurchases());
            merged.setTotalShare(a.getTotalShare() + b.getTotalShare());

            // 合并最后活跃时间
            merged.setLastActiveTime(Math.max(a.getLastActiveTime(), b.getLastActiveTime()));

            // 合并分类偏好
            Map<String, Long> mergedCategories = Maps.newHashMap(a.getCategoryPreferences());
            b.getCategoryPreferences().forEach((k, v) ->
                    mergedCategories.put(k, mergedCategories.getOrDefault(k, 0L) + v)
            );
            merged.setCategoryPreferences(mergedCategories);

            return merged;
        }
    }
}
