package demo.spark.local;

import demo.vo.LogEntry;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.io.Serializable;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Objects;

/**
 * 编译
 * <blockquote><pre>
 *     javac -cp "spark-core_2.12-3.0.0.jar:spark-sql_2.12-3.0.0.jar" LogAnalysis.java
 * </pre></blockquote>
 * <p>
 * 运行
 * <blockquote><pre>
 * spark-submit \
 *   --class LogAnalysis \
 *   --master local[*] \
 *   yourjar.jar
 * </pre></blockquote>
 */
public class LogAnalysis {

    public static void main(String[] args) {
        // 2. 配置Spark
        SparkConf conf = new SparkConf()
                .setAppName("Log Analysis")
                .setMaster("local[*]");
        JavaSparkContext sc = new JavaSparkContext(conf);
        SparkSession spark = SparkSession.builder()
                .config(conf)
                .getOrCreate();

        // 3. 读取日志文件
        JavaRDD<String> logLines = sc.textFile("logs/*.log");

        // 4. 解析日志
        JavaRDD<LogEntry> logEntries = logLines.map(line -> {
            try {
                String[] parts = line.split("\\|");
                return new LogEntry(
                        parts[0].trim(),
                        parts[1].trim(),
                        parts[2].trim(),
                        parts[3].trim(),
                        parts[4].trim()
                );
            } catch (Exception e) {
                return null;
            }
        }).filter(Objects::nonNull);

        // 5. 转换为DataFrame 。结构化数据处理：DataFrame提供了结构化的数据视图，可以使用SQL进行查询，提供了优化的执行计划
        Dataset<Row> logDF = spark.createDataFrame(logEntries, LogEntry.class);
        logDF.createOrReplaceTempView("logs");

        // 6. 统计分析
        // 6.1 按日志级别统计
        System.out.println("统计各日志级别数量：");
        Dataset<Row> levelStats = spark.sql(
                "SELECT level, COUNT(*) as count " +
                        "FROM logs " +
                        "GROUP BY level " +
                        "ORDER BY count DESC"
        );
        levelStats.show();

        // 6.2 统计错误日志
        System.out.println("错误日志统计：");
        Dataset<Row> errorLogs = spark.sql(
                "SELECT className, COUNT(*) as error_count " +
                        "FROM logs " +
                        "WHERE level = 'ERROR' " +
                        "GROUP BY className " +
                        "ORDER BY error_count DESC"
        );
        errorLogs.show();

        // 6.3 按IP统计
        System.out.println("IP访问统计：");
        Dataset<Row> ipStats = spark.sql(
                "SELECT ip, COUNT(*) as access_count " +
                        "FROM logs " +
                        "GROUP BY ip " +
                        "ORDER BY access_count DESC " +
                        "LIMIT 10"
        );
        ipStats.show();

        // 6.4 时间段统计
        System.out.println("每小时日志统计：");
        Dataset<Row> hourlyStats = spark.sql(
                "SELECT SUBSTRING(timestamp, 1, 13) as hour, " +
                        "COUNT(*) as count " +
                        "FROM logs " +
                        "GROUP BY SUBSTRING(timestamp, 1, 13) " +
                        "ORDER BY hour"
        );
        hourlyStats.show();

        // 7. 保存结果
        try {
            levelStats.write()
                    .mode("overwrite")
                    .format("csv")
                    .save("output/level_stats");

            errorLogs.write()
                    .mode("overwrite")
                    .format("csv")
                    .save("output/error_logs");

            ipStats.write()
                    .mode("overwrite")
                    .format("csv")
                    .save("output/ip_stats");
        } catch (Exception e) {
            System.err.println("保存结果失败: " + e.getMessage());
        }

        // 8. 关闭资源
        spark.close();
        sc.close();
    }
}
