package com.study.sql

import com.study.utils.SparkUtils

import java.util.Properties

/**
 * @program: spark2.3-study
 * @author: jzhou
 * @date: 2022-10-27 15:35
 * @version: 1.0
 * @description: 探究 spark 原始 jdbc 接口实现方式
 * 2022-10-27 看了一会真是头晕，如同低级炼药师看了高品阶药房 ^-^
 * */
object JDBCSink {

	def main(args: Array[String]): Unit = {
		val spark = SparkUtils.sparkSessionBuild()
		import spark.implicits._
		val df1 = Seq((1, 2, 3),(1, 2, 4),(1, 3, 3),(2, 2, 3),
			(4, 5, 6),(5, 5, 8),(5, 5, 9)).toDF("col1", "col2", "col3")

		println(df1.rdd.getNumPartitions)

		val url = "jdbc:mysql://172.16.129.24:3306/test?characterEncoding=UTF-8"
		val table = "sink_test"
		val jdbcProperties = new Properties()
		jdbcProperties.setProperty("driver", "com.mysql.jdbc.Driver")
		jdbcProperties.setProperty("user","root")
		jdbcProperties.setProperty("password", "123456MySQL_")
		df1.write.jdbc(url,table,jdbcProperties)
	}

}
