package cn.darksoul3.spark.read

import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession}

import java.util.Properties

object ReadTsv {
  def main(args: Array[String]): Unit = {

    val ss: SparkSession = SparkSession.builder().appName("ReadTsv").master("local[*]").getOrCreate()

    val titlesDataFrame: DataFrame = ss.read.format("csv")
      .option("inferSchema", "true")
      .option("sep", "\t")
      .option("header", "true")
      .load("C:\\Users\\cary2\\Desktop\\title.basics.tsv")


    val mysql = "jdbc:mysql://192.168.1.51:3306/imdb?characterEncoding=utf8&useUnicode=true&rewriteBatchedStatements=true"
    val props = new Properties()
    props.setProperty("driver","com.mysql.jdbc.Driver")
    props.setProperty("user","root")
    props.setProperty("password","root")

    val row = titlesDataFrame.limit(1)
    row.write.mode(SaveMode.Append).jdbc(mysql, "titles", props)


    ss.stop()
  }
}
