import java.util.Properties

import org.apache.spark.sql.{SaveMode, SparkSession}


object exportData {

//  1. 导出wordcount数据到mysql

  def exportWc():Unit={

  }

  def main(args: Array[String]): Unit = {
    var  data_path = "result/cate_wc/*"
    var host="localhost"
    var user = "root"
    var password = "root"


    //    三个参数
    if(args.length>0)
    {
      data_path = args(0)
      println("使用命令行路径和mysql用户名:"+data_path)
      host=args(1)
      user = args(2)
      password= args(3)
    }


    val spark = SparkSession.builder()
      .master("local[2]").appName("jobClean").getOrCreate()
    spark.sparkContext.setLogLevel("WARN")

    val data =spark.read.option("header","true").csv(data_path)
    val prop  = new  Properties
    prop.setProperty("user",user)
    prop.setProperty("driver", "com.mysql.jdbc.Driver")
    prop.setProperty("password",password)
    data.write.mode(SaveMode.Overwrite).jdbc(s"jdbc:mysql://${host}:3306/job?useUnicode=true&characterEncoding=utf-8","cate_wc",prop)


  }
}
