package edu.csl.study.spark.basic

import java.sql.{Connection, DriverManager, PreparedStatement}

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Core_MYSQL_Scala_local {

  val rootDir = System.getProperty("user.dir")+ "\\testFile\\"

  def main(args: Array[String]): Unit = {

    //1、构建sparkConf对象 设置application名称和master地址  spark://centos20:7077
    val sparkConf: SparkConf = new SparkConf().setAppName("Mysql").setMaster("local[2]")

    //2、构建sparkContext对象,该对象非常重要，它是所有spark程序的执行入口
    // 它内部会构建  DAGScheduler和 TaskScheduler 对象
    val sc = new SparkContext(sparkConf)

    //设置日志输出级别
    sc.setLogLevel("warn")

    //3、读取数据文件
    val data: RDD[String] = sc.textFile(rootDir+"persons.txt")

    println(data.partitions.length)
    data.foreach(line =>{

       println(line)

    })
    //使用广播变量代码示例 : 传输次数减少了，不需要每个Task中传输，而是每个Executor中只有一份Driver端的变量副本
    val remark="测试spark连JDBC"
    //通过调用sparkContext对象的broadcast方法把数据广播出去
    val broadCast = sc.broadcast(remark)
    println("-----------------------------------")
    //累加器：创建accumulator并初始化为0
    val accumulator = sc.longAccumulator("count")

    data.foreachPartition(iter =>{
      val connection:Connection = DriverManager.getConnection("jdbc:mysql://127.0.0.1:3306/bigdata?useUnicode=true&characterEncoding=utf8","root","admin")
      val SQL = "INSERT INTO `bigdata`.`person` (`name`, `pass`, `age`, `sex`, `phone`, `remark`) VALUES (?, ?, ?, ?, ?,?)"
     try {
       val ps: PreparedStatement = connection.prepareStatement(SQL)
       iter.foreach(line => {

         val array = line.split(" ")
         println(line)

         ps.setString(1, array(0))
         ps.setString(2, array(1))
         ps.setInt(3, array(2).toInt)
         ps.setInt(4, array(3).toInt)
         ps.setString(5, array(4))
         ps.setString(6, broadCast.value)
         //设置批量提交
         ps.addBatch()
         accumulator.add(1)
       })
       //执行批量提交
       ps.executeBatch()
     }catch {
       case e:Exception => e.printStackTrace()
     }finally {
       if(connection !=null) {
         connection.close()
       }
     }
    })

    println("-----------------------------------"+accumulator.value)


    //8、关闭sc
    sc.stop()



  }
}
