package com.bdc.Spark

import java.io.{BufferedReader, FileReader}
import java.util.Properties

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * 换乘分析
  */
object SparkSessionReadMySql {

  val cdict = readFile("所要读取文件的路径！")

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder().appName("SparkSessonReadMySql").master("local[3]").config("spark.debug.maxToStringFields", "1000").getOrCreate()
    spark.sparkContext.setLogLevel("WARN")
    val prop: Properties = new Properties()
    prop.setProperty("user", "root")
    prop.setProperty("password", "123456")
    val mysqlDF: DataFrame = spark.read.jdbc("jdbc:mysql://localhost:3306/users?useUnicode=true&characterEncoding=UTF-8", "table_name", prop)
    import spark.sql
    mysqlDF.createOrReplaceTempView("passenger")
    sql("select * from passenger").show()
  }

  /**
    * 读取文件，解析文件
    *
    * @param path
    * @return
    */
  def readFile(path: String): String = {
    val reader = new FileReader(path)
    val bufferReader = new BufferedReader(reader)
    val pdict = bufferReader.readLine()
    bufferReader.close()
    reader.close()
    pdict
  }


}
