package week08;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.execution.command.LeafRunnableCommand;
import scala.Enumeration;
import scala.collection.immutable.Seq;

// implements LeafRunnableCommand
public class Work1  {

    public static void main(String[] args) {
        SparkConf sparkConf = new SparkConf();
        sparkConf.setAppName("Week08_Work1");
        sparkConf.setMaster("local[*]");

        JavaSparkContext sparkContext = new JavaSparkContext();
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).getOrCreate();

        String jdbcUrl = "jdbc:mysql://127.0.0.1:3306/demo2022";
        String jdbcTable = "demo_table";
        String jdbcUser = "demo";
        String jdbcPassword = "demo";
        Dataset<Row> sparkDataset = sparkSession.read().format("jdbc")
                .option("url", jdbcUrl)
                .option("user", jdbcUser)
                .option("password", jdbcPassword)
                .option("select", "show versions").load();

        System.out.println("loadData:" + sparkDataset.javaRDD().collect());

    }

    //@Override
    public Seq<Row> run(SparkSession sparkSession) {
        String sparkVersion = sparkSession.version();
        String javaVersion = System.getProperty("java.version");
        String output = "Spark Version: " + sparkVersion + " Java Version: " + javaVersion;
        Row row = Row.empty();
        row.mkString(output);
        Seq<Row> seq = row.getSeq(0);
        return seq;
    }

    //@Override
    public void org$apache$spark$sql$catalyst$plans$logical$Command$_setter_$nodePatterns_$eq(Seq<Enumeration.Value> x$1) {
        x$1.toSeq();
    }

    //@Override
    public Seq<Enumeration.Value> nodePatterns() {
        Row row = Row.empty();
        Seq<Enumeration.Value> seq = row.getSeq(0);
        return seq;
    }
}
