package spark.code.study.io.sink

import java.util.Properties

import org.apache.spark.sql.SparkSession

import scala.util.Random

/**
  * Created by peibin on 2017/2/3.
  */
object MysqlSink {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder
      .appName("prepare sql")
      .master("local[*]")
      .getOrCreate()
    val slices = if (args.length > 0) args(0).toInt else 20
    val n = math.min(100000L * slices, Int.MaxValue).toInt
    // avoid overflow
    val count = spark.sparkContext.parallelize(1 until n, slices).map { i =>
      val random = new Random()
      val x = random.nextInt()
      val y = random.nextString(10)
      val z = random.nextString(10)
      SbTest(x, y, z)
    }
    var properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("driver", "com.mysql.jdbc.Driver")
    spark.createDataFrame(count).write.mode("append").jdbc("jdbc:mysql://10.199.212.84:14000/dbtest", "sbtest", properties)
    spark.stop()
  }

  case class SbTest(k: Int, c: String, pad: String)
}
