package com.xxxx.sqlTest;

import org.apache.spark.sql.*;
import scala.math.Ordering;

import javax.xml.transform.Result;
import java.text.Format;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * @program: day0316
 * @description:
 * @author: CoreDao
 * @create: 2021-03-16 20:28
 **/

public class CreateDS {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder()
                .appName("ds")
                .master("local")
                .getOrCreate();

        /**
         * parquet
         * 列式存储
         */
        //特殊格式 json = format("json").load(path)
        /*Dataset<Row> json = spark.read().json("src/main/resources/data/json");
        //parquet = format("parquet").save(path)
        json.write().mode(SaveMode.ErrorIfExists).parquet("src/main/resources/data/parquet");

        spark.read().parquet("src/main/resources/data/parquet").show();*/

        /**
         * mysql
         * 多种连接方式
         * 3种方式，读取两次，写入一次
         * 200 并行度（默认）
         */
        Properties properties = new Properties();
        properties.put("user","root");
        properties.put("password","root");
        Dataset<Row> person = spark.read().jdbc("jdbc:mysql://192.168.11.201:3306/spark", "person", properties);
        person.show();
        person.createOrReplaceTempView("score");

        Map options = new HashMap();
        options.put("url","jdbc:mysql://192.168.11.201:3306/spark");
        options.put("dbtable","score");
        options.put("driver","com.mysql.jdbc.Driver");
        options.put("user","root");
        options.put("password","root");
        Dataset<Row> score = spark.read().format("jdbc").options(options).load();
        score.createOrReplaceTempView("person");

        Dataset<Row> result = spark.sql("select person.id,person.name,person.age,score.score " +
                "from person,score " +
                "where person.id = score.id and score.score > 81 ");

        result.write().mode(SaveMode.Overwrite).format("jdbc")
                .option("url","jdbc:mysql://192.168.11.201:3306/spark")
                .option("dbtable","result")
                .option("driver","com.mysql.jdbc.Driver")
                .option("user","root")
                .option("password","root").save();

        spark.stop();
    }
}
