package com.shujia.onhbase

import java.sql.{Connection, DriverManager, PreparedStatement}

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo3Mysql {

  def main(args: Array[String]): Unit = {


    val conf: SparkConf = new SparkConf().setMaster("local").setAppName("onhabse")

    val sc: SparkContext = new SparkContext(conf)

    val student: RDD[String] = sc.textFile("spark/data/students.txt")


    /**
      * *
      * DROP TABLE IF EXISTS `student`;
      * CREATE TABLE `student` (
      * `id` varchar(255) CHARACTER SET utf8 DEFAULT NULL,
      * `name` varchar(255) DEFAULT NULL,
      * `age` int(11) DEFAULT NULL,
      * `gender` varchar(255) DEFAULT NULL,
      * `clazz` varchar(255) DEFAULT NULL
      * ) ENGINE=MyISAM DEFAULT CHARSET=utf8;
      *
      *
      *
      */


    student.foreachPartition(iter => {

      // 1 加载驱动
      Class.forName("com.mysql.jdbc.Driver")

      //2  创建连接
      val connection: Connection = DriverManager.getConnection("jdbc:mysql://master:3306/test?characterEncoding=utf-8", "root", "123456")

      //3  获取预编译执行器
      val stat: PreparedStatement = connection.prepareStatement("insert into student(id,name,age,gender,clazz) values(?,?,?,?,?)")

      iter.foreach(line => {
        val split: Array[String] = line.split(",")


        //指定列值
        stat.setString(1, split(0))
        stat.setString(2, split(1))
        stat.setInt(3, split(2).toInt)
        stat.setString(4, split(3))
        stat.setString(5, split(4))


        //新建一行
        stat.addBatch()
      })

      //提交一批
      stat.executeBatch()
      //关闭连接
      connection.close()

    })

  }

}
