package org.example.test;

import lombok.Data;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;

import java.sql.Date;
import java.util.Arrays;
import java.util.List;

/**
 * @author lwc
 * @description: TODO
 * @date 2023/12/22 10:56
 */
public class SparkDataFram {
    public static void main(String[] args) {

        SparkSession spark = SparkSession.builder()
                .appName("YourDatasetCreationExample")
                .config("spark.master", "local")
                .getOrCreate();

        // 假设你有一个包含数据的 List（数据结构需匹配表结构）
        List<Row> data = Arrays.asList(
                RowFactory.create("2023-06-28 00:00:00", "2023-06-28 00:00:00", "10128E25", 96L, 1.234567, 0L)
                // 添加更多行，确保与表结构匹配
        );

        // 构造 StructType 以匹配表结构
        StructType schema = new StructType()
                .add("f_data_collection_time", DataTypes.StringType, false)
                .add("f_data_input_time", DataTypes.StringType, true)
                .add("f_key_name", DataTypes.StringType, false)
                .add("f_measurement_points", DataTypes.LongType, false)
                .add("data_va", DataTypes.DoubleType, true)
                .add("f_delete", DataTypes.LongType, true);

        // 创建 DataFrame
        Dataset<Row> yourDataset = spark.createDataFrame(data, schema);
        yourDataset.show();
        yourDataset.write()
                .mode("append") // 你可以根据需要将模式更改为 "overwrite" 或 "ignore"
                .format("jdbc")
                .option("url", "jdbc:mysql://localhost:3306/test")
                .option("dbtable", "t_obj_202312ld_hour_va")
                .option("driver", "com.mysql.cj.jdbc.Driver")
                .option("batchsize", "1000") // 设置适当的批处理大小
                .option("isolationLevel", "READ_COMMITTED") // 如果需要，设置隔离级别
                .option("user", "root")
                .option("password", "9XME3z94xs9nhCj")
                .save();
        // 现在，你有了一个符合表结构的 Dataset<Row>（yourDataset）

        spark.stop();


    }


    @Data
    private static class Result {
        private Long point;
        private Date dataDate;
        private String phaseFlag;
        private String fKeyName = "10128E25";
        private Double dataVa = 0.0;
        private Double dataVb = 0.0;
        private Double dataVc = 0.0;

        public Long getPoint() {
            return point;
        }

        public void setPoint(Long point) {
            this.point = point;
        }

        public Date getDataDate() {
            return dataDate;
        }

        public void setDataDate(Date dataDate) {
            this.dataDate = dataDate;
        }

        public String getPhaseFlag() {
            return phaseFlag;
        }

        public void setPhaseFlag(String phaseFlag) {
            this.phaseFlag = phaseFlag;
        }

        public String getfKeyName() {
            return fKeyName;
        }

        public void setfKeyName(String fKeyName) {
            this.fKeyName = fKeyName;
        }

        public Double getDataVa() {
            return dataVa;
        }

        public void setDataVa(Double dataVa) {
            this.dataVa = dataVa;
        }

        public Double getDataVb() {
            return dataVb;
        }

        public void setDataVb(Double dataVb) {
            this.dataVb = dataVb;
        }

        public Double getDataVc() {
            return dataVc;
        }

        public void setDataVc(Double dataVc) {
            this.dataVc = dataVc;
        }

        public Result() {
        }

        public Result(Long point, Date dataDate, String phaseFlag, String fKeyName, Double dataVa, Double dataVb, Double dataVc) {
            this.point = point;
            this.dataDate = dataDate;
            this.phaseFlag = phaseFlag;
            this.fKeyName = fKeyName;
            this.dataVa = dataVa;
            this.dataVb = dataVb;
            this.dataVc = dataVc;
        }

    }
}
