
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

import java.util.Arrays;

public class ParquetEncryptionApp {
    public static void main(String[] args) {
        // 1. 初始化 SparkSession（本地模式 + Parquet 加密配置）
        SparkSession spark = SparkSession.builder()
                .appName("ParquetEncryptionApp")
                //.master("local[*]")  // 本地运行
                .config("spark.hadoop.parquet.encryption.kms.client.class",
                        "InMemoryKMS")
                .config("spark.hadoop.parquet.encryption.key.list",
                        "footerKey:AAECAwQFBgcICQoLDA0ODw==, columnKey:AAECAwQFBgcICQoLDA0ODw==")
                .config("spark.hadoop.parquet.crypto.factory.class",
                        "org.apache.parquet.crypto.keytools.PropertiesDrivenCryptoFactory")
                .getOrCreate();

        // 2. 创建示例数据（模拟数据源）
        Dataset<Row> data = spark.createDataset(
                Arrays.asList(
                        new Person(11, "Alice"),
                        new Person(21, "Bob")
                ),
                Encoders.bean(Person.class)
        ).toDF("id", "name");

        data.show();

        // 3. 保存为加密的 Parquet 文件
        String outputPath = "oss://risk-ml-featurestore/w-93b109b38a92c892/spark-warehouse/wx_encrypted_people_2";
        data.write()
                .option("parquet.encryption.footer.key", "footerKey")
                .option("parquet.encryption.column.keys", "columnKey:id,name")
                .mode(SaveMode.Append)
                //.insertInto("");
                .parquet(outputPath);
        System.out.println("加密文件已保存到: " + outputPath);

        // 4. 读取加密文件（自动解密）
        Dataset<Row> decryptedData = spark.read().parquet(outputPath);
        System.out.println("读取解密文件：");
        decryptedData.show();

        // 5. 关闭 Spark
        spark.stop();
    }
}
