package com.spark.demo.database

import java.util.{Properties, UUID}

import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SaveMode, SparkSession}

/**
  * spark平台中，读取oracle数据，并且写入
  */
object SparkReadOracle {

  def main(args: Array[String]): Unit = {
    // 打开Session
    val spark = SparkSession.builder().getOrCreate()

    // 加载驱动类
    Class.forName("oracle.jdbc.driver.OracleDriver").newInstance()

    // 配置
    val properties = new Properties()
    properties.put("user", "kpi")
    properties.put("password", "Zpmckpi1234")

    // 读取oracle表
    val table = spark.read.jdbc("jdbc:oracle:thin:@47.92.7.76:1627:zhdb", "system_parameter", properties)

    // 展示 ， = println
    table.show()

    // 写入表
    table.write.mode(SaveMode.Append).jdbc("jdbc:oracle:thin:@47.92.7.76:1627:zhdb", "system_parameter_copy", properties)

    // 创建新的数据
    val users = spark.sparkContext.parallelize(1 to 10)
      .map(_ => Param(UUID.randomUUID().toString,
        "demo",
        "demo",
        "demo",
        "demo")
      )
      .map(f => Row(f.ID, f.TYPE))


    // 创建schema
    val schema = StructType(Array(
      StructField("ID", StringType, nullable = true),
      StructField("TYPE", StringType, nullable = true),
      StructField("NAME", StringType, nullable = true),
      StructField("KEY", StringType, nullable = true),
      StructField("VALUE", StringType, nullable = true)))

    // 写入表
    spark.sqlContext.createDataFrame(users, schema).write.mode(SaveMode.Append).jdbc("jdbc:oracle:thin:@47.92.7.76:1627:zhdb", "system_parameter_copy", properties)
  }


  case class Param(ID: String, TYPE: String, NAME: String, KEY: String, VALUE: String)

}
