package com.sys.tdhclient.startapp

import java.util.Properties

import com.sys.tdhclient.utils.SparkSc
import org.apache.spark.SparkContext
import org.apache.spark.sql.{SaveMode, SparkSession}

object SparkRdOracle {
  private val sparkContext: SparkContext = SparkSc.getSparkContext()
  private val sparkSession: SparkSession = SparkSc.getSparkSession()
  private val properties: Properties = SparkSc.getProperties()
  private val hdfs_path: String = properties.getProperty("hdfs_input_data_url")
  private val oracle_url: String = properties.getProperty("oracle_url")
  private val oracle_user: String = properties.getProperty("oracle_user")
  private val oracle_passw: String = properties.getProperty("oracle_passw")
  private val oracle_rd_user: String = properties.getProperty("oracle_rd_user")
  private val table_name: String = properties.getProperty("table_name")
  private val sysname: String = properties.getProperty("sysname")
  private val dhfs_output_data_url: String = properties.getProperty("dhfs_output_data_url")

  def main(args: Array[String]): Unit = {
    val jdbcDF = sparkSession.read
      .format("jdbc")
      .option("url", oracle_url)
      .option("dbtable", oracle_rd_user+"."+table_name)
      .option("user", oracle_user)
      .option("password", oracle_passw)
      .option("driver", "oracle.jdbc.driver.OracleDriver")
      .load();
//    jdbcDF.show()
    jdbcDF.createOrReplaceTempView(sysname+table_name)
    val result = sparkSession.sql("SELECT * FROM " +sysname+table_name)
    result.createOrReplaceTempView("aaaa")

    val aaaa_result = sparkSession.sql("SELECT * FROM aaaa")
    aaaa_result.show()

//result.rdd.saveAsTextFile("file:///D:/obj/tdhclient/log/test.csv")
//    result.write.mode("Overwrite").csv("file:///D:/obj/tdhclient/log/test.csv")
    //将文件写入到本地
//    result.coalesce(1)  //设置为一个partition, 这样可以把输出文件合并成一个文件
//      .write.mode(SaveMode.Overwrite)
//      .format("txt")
//      .csv("file:///D:/obj/tdhclient/log/test.csv")com.databricks.spark.
//      .save("file:///D:/obj/tdhclient/log/test.txt")
//    result.coalesce(1)
//      .write
////      .format("com.databricks.spark.csv")
//      .format("json")
//      .mode("append")
//      .option("header", "true")
//      .option("encoding","ANSI")
//      .save(dhfs_output_data_url)

  }

}
