package demo.spark.local;

import org.apache.spark.sql.*;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.StreamingQueryException;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.functions;

import java.util.concurrent.TimeoutException;

import static org.apache.spark.sql.functions.*;

public class KafkaLogAnalysis {

    public static void main(String[] args) throws StreamingQueryException, TimeoutException {
        // 2. 创建SparkSession
        SparkSession spark = SparkSession.builder()
                .appName("Kafka Log Analysis")
                .master("local[*]")
                .config("spark.sql.streaming.checkpointLocation", "checkpoint")
                .getOrCreate();

        // 3. 定义日志Schema
        StructType logSchema = new StructType()
                .add("timestamp", "string")
                .add("level", "string")
                .add("className", "string")
                .add("message", "string")
                .add("ip", "string");

        // 4. 从Kafka读取数据
        Dataset<Row> kafkaDF = spark
                .readStream()
                .format("kafka")
                .option("kafka.bootstrap.servers", "localhost:9092")
                .option("subscribe", "logs")
                .load();

        // 5. 解析Kafka消息
        Dataset<Row> logDF = kafkaDF
                .selectExpr("CAST(value AS STRING)")
                .select(functions.from_json(col("value"), logSchema).as("data"))
                .select("data.*");

        // 6. 实时统计分析
        // 6.1 按日志级别统计
        Dataset<Row> levelCounts = logDF
                .groupBy(
                        window(col("timestamp"), "1 minute"),
                        col("level")
                )
                .count();

        // 6.2 错误日志统计
        Dataset<Row> errorLogs = logDF
                .filter(col("level").equalTo("ERROR"))
                .groupBy(
                        window(col("timestamp"), "1 minute"),
                        col("className")
                )
                .count();

        // 6.3 IP访问统计
        Dataset<Row> ipStats = logDF
                .groupBy(
                        window(col("timestamp"), "1 minute"),
                        col("ip")
                )
                .count()
                .orderBy(col("count").desc());

        // 7. 启动查询并输出结果
        // 7.1 日志级别统计输出
        StreamingQuery levelQuery = levelCounts
                .writeStream()
                .outputMode("complete")
                .format("console")
                .start();

        // 7.2 错误日志统计输出
        StreamingQuery errorQuery = errorLogs
                .writeStream()
                .outputMode("complete")
                .format("console")
                .start();

        // 7.3 IP统计输出
        StreamingQuery ipQuery = ipStats
                .writeStream()
                .outputMode("complete")
                .format("console")
                .start();

        // 8. 等待查询终止
        levelQuery.awaitTermination();
        errorQuery.awaitTermination();
        ipQuery.awaitTermination();
    }

}
