package com.demo.mysql

import java.sql.{DriverManager, ResultSet}

import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

object JDBCTest extends App {

  System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.2.0-bin-master")

  val conf = new SparkConf().setAppName("MySQL-Demo")
    .setMaster("local[2]")
  val sc = new SparkContext(conf)


  def getConnection() = {
    Class.forName("com.mysql.jdbc.Driver").newInstance()
    DriverManager.getConnection("jdbc:mysql://10.82.27.22:3306/test", "root", "123456")
  }

  def flatValue(result: ResultSet) = {
    (result.getInt("id"), result.getString("name"))
  }

  //1：读取MySQL
  val data = new JdbcRDD(
    sc,
    getConnection,
    "select id,name from test.person where id>=? and id<=?",
    29,
    32,
    2,
    flatValue
  )
//  println(data.collect().toList)

  //2：创建schema
  //  val schemaString = "name age city salary"
  //  val fields = schemaString.split(" ").map {
  //    filedName => StructField(filedName, StringType, nullable = true)
  //  }
  //  val schema = StructType(fields)

  //通过StructType直接指定每个字段的schema
  val schema = StructType(
    List(
      StructField("name", StringType, true),
      StructField("age", IntegerType, true),
      StructField("city", StringType, true),
      StructField("salary", IntegerType, true)
    )
  )

  //3：数据转成Row,将RDD映射到rowRDD
  val rowRdd = data.map(_._2.split(" ")).map(attributes => Row(attributes(0), Integer.valueOf(attributes(1)), attributes(2), Integer.valueOf(attributes(3))))
  //4：创建DF
  val spark: SparkSession = SparkSession.builder
    .appName("MySQL-Demo")
    .master("local[*]")
    .enableHiveSupport()
    .getOrCreate
  // 将schema信息应用到rowRDD上
  val personDF = spark.createDataFrame(rowRdd, schema)
  println("DataFrame打印如下：")
  personDF.show()

  //注册视图：
  //全局表，生命周期多个session可以共享并且创建该视图的sparksession停止该视图也不会过期
  //personDF.createGlobalTempView("GlobalTempView_Person")
  //临时表，存在的话覆盖。生命周期和sparksession相同
  personDF.createOrReplaceTempView("TempView_Person")
  //personDF.createTempView("TempView_Person") //如果视图已经存在则异常

  //全局视图存储在global_temp数据库中，如果不加数据库前缀异常，提示找不到视图
  //spark.sql("select * from global_temp.GlobalTempView_Person").show(2)
  //临时表不需要添加数据库
  spark.sql("select * from TempView_Person order by salary").show(5)

  sc.stop()
  spark.stop()
}
