package com.galeno.sparksql02

import org.apache.spark.sql.{DataFrame, Dataset, SaveMode, SparkSession}

import java.util.Properties

/**
 * @Title: ${file_name}
 * @Description: ${todo}
 * @author galeno
 * @date 2021/9/514:51
 */
object C14 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("")
      .master("local")
      .getOrCreate()
    var rdd= spark.sparkContext.makeRDD(Seq(
      (1,"aa",18),
      (2,"bb",22),
      (3,"cc",15),
      (4,"dd",199),
    ))
    import spark.implicits._
    val ds: Dataset[(Int, String, Int)] = rdd.toDS()

    val df2: DataFrame = rdd.toDF("id","name","age")

   //df2.write.parquet("data/par/")
   // df2.selectExpr("concat_ws(',',id,name,age)").write.text("data/txt")

   // df2.write.json("/data/json")
   val properties = new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","root")

    /**
     * 写入mysql多种保存模式
     */
   // df2.write.jdbc("jdbc:mysql://localhost:3306/spark","teacher",properties)
    //df2.write.mode(SaveMode.Ignore).jdbc("jdbc:mysql://localhost:3306/spark","teacher",properties)

    df2.write.csv("hdfs://galeno01:8020/apple")

  }

}
