package io.a.sql.source;

import io.a.utils.H;
import lombok.extern.slf4j.Slf4j;
import org.apache.spark.sql.*;

import static io.a.utils.Const.*;

@Slf4j
public class SourceCsv {

    private static final String FORMAT_JDBC = "jdbc";

    private static SparkSession session;

    private static final DataFrameReader dataFrameReader = init();

    private static DataFrameReader init() {
        session = SparkSession
                .builder()
                .master("local[*]")
                .appName(SourceCsv.class.getName())
                .getOrCreate();

        return session.read().format(FORMAT_JDBC)
                .option(DRIVER, H.DB.getDriver()).option(URL, H.DB.getUrl())
                .option(USER, H.DB.getUser()).option(PASSWORD, H.DB.getPassword())
                /*.option("queryTimeout", "300")*/;
    }

    private static void finish() {
        if (session != null) {
            session.stop();
        }
    }


    public static void main(String[] args) {

        Dataset<Row> csv = session.read()
                .option("header", "true") //表头
                .option("delimiter", ",") // 分隔符
                .csv("source/user.csv");

        csv.write()
                .option("header", "true")
                .mode(SaveMode.Overwrite)
                .csv("sink");

        finish();
    }
}
