package com.baicun.wish.spark


import org.apache.spark.sql.SparkSession

/**
 * @author baicun
 * @description ProvinceAdmit数据处理类
 */
object ProvinceAdmitDataHandle {

  def main(args: Array[String]): Unit = {
    handle()
  }

  def handle() = {

    val filenamePath = "hdfs://master:9000/baicun/province_admit.csv"

    val sparkSession = SparkSession.builder().appName("ProvinceAdmitDataHandle").master("local").getOrCreate()

    val dataFrame = sparkSession.read.option("header", value = true).csv(filenamePath)

    dataFrame.show()
    dataFrame.printSchema()

//    远程数据库：112.124.18.46
//    localhost

    dataFrame.write.format("jdbc")
      .option("url","jdbc:mysql://112.124.18.46:3306/wish?useSSL=false&useUnicode=true&characterEcoding=utf-8&serverTimezone=Asia/Shanghai")
      .option("driver", "com.mysql.cj.jdbc.Driver")
      .option("user","root")
      .option("password","root")
      .option("dbtable","province_admit")
      .save()
  }


}
