package com.example.flink;

import com.example.flink.service.KafkaProducerService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Random;
import java.util.UUID;

@Component
public class LogGeneratorTask {
    private static final String[] LEVELS = {"INFO", "WARN", "ERROR"};
    private static final String[] MESSAGES = {
            "用户登录成功",
            "订单已创建",
            "支付完成",
            "库存不足",
            "系统异常",
            "网络请求超时",
            "数据写入成功",
            "权限校验失败",
            "服务启动完成",
            "未知错误"
    };
    private static final Random RANDOM = new Random();
    private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");

    @Autowired
    private KafkaProducerService kafkaProducerService;

    @Scheduled(fixedRate = 10000)
    public void generateLogs() {
        int count = RANDOM.nextInt(10) + 1; // 1~10条
        for (int i = 0; i < count; i++) {
            String time = System.currentTimeMillis()+"";
            String level = LEVELS[RANDOM.nextInt(LEVELS.length)];
            String message = MESSAGES[RANDOM.nextInt(MESSAGES.length)];
            String thread = "main";
            String logger = "com.example.app";
            String traceId = UUID.randomUUID().toString();
            String spanId = UUID.randomUUID().toString();
            LogEvent logEvent = new LogEvent(time, level, message, thread, logger, traceId, spanId);
            kafkaProducerService.sendLog(logEvent);
        }
    }
} 