package doit20.sparksql

import java.util.Properties

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{DataTypes, StructField, StructType}


/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-04-10
 * @desc 将计算结果保存到mysql中去
 */
object Demo13 {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("dsl风格api演示")
      .config("spark.sql.shuffle.partitions",1)  // spark-submit --master yarn --deploy-mode cluster --spark.sql.shuffle.partitions=400
      .master("local")
      .enableHiveSupport()
      .getOrCreate()

    // id,name,age,score,gender
    val schema = StructType(Seq(
      StructField("id", DataTypes.IntegerType),
      StructField("name", DataTypes.StringType),
      StructField("age", DataTypes.IntegerType),
      StructField("score", DataTypes.DoubleType),
      StructField("gender", DataTypes.StringType)
    ))

    val frame = spark.read.schema(schema).option("header", "true").csv("data/stu2.txt")

    val props = new Properties()
    props.setProperty("user","root")
    props.setProperty("password","123456")
    frame.write.jdbc("jdbc:mysql://localhost:3306/abc","stu",props)

    spark.close()

  }

}
