package com.laosg.spark.base.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{Column, SparkSession}

/**
  * Created by kaimin on 27/5/2019.
  * time : 13:23
  */
object SparkSqlOpe {


  val driver = "oracle.jdbc.driver.OracleDriver"
  val connection = "jdbc:oracle:thin:@192.168.2.166:1521:orcl"
  val dbName = "emrqc"
  val dbPwd = "emrqc"

  case class PeopleSql(NAME:String,OPER_NAME:String)
  def main(args: Array[String]): Unit = {

    val sparkSession = SparkSession.builder().appName("ruan_sql").getOrCreate();
    val df = sparkSession.read.format("jdbc")
      .option("dbtable", "(select * from FIRSTPAGEMEDICALRECORD) t")
      .option("url", connection)
      .option("user", dbName)
      .option("password", dbPwd).load()

    /*df.createOrReplaceTempView("FIRSTPAGEMEDICALRECORD")
    df.sqlContext.sql("SELECT a.NAME,b.OPER_NAME from FIRSTPAGEMEDICALRECORD  a" +
      "LEFT JOIN MR_OPERATIONRECORD b on a.HP_S_NO=b.HP_S_NO").show(10)*/

    SparkSqlUtil.creatTmpAndView(
      connection,
      dbName,
      dbPwd,
      List("FIRSTPAGEMEDICALRECORD","MR_OPERATIONRECORD"),
      sparkSession)

    sparkSession.sql("show tables").show()
    val firstDF = sparkSession.sql("select * from FIRSTPAGEMEDICALRECORD")
//      .rdd

    import sparkSession.implicits._
    println("------")
    val firstOpDF = sparkSession.sql("select * from MR_OPERATIONRECORD")
//      .rdd
//    firstDF.joinWith(firstOpDF,"HP_S_NO","left")
    val uninoDF = firstDF.join(firstOpDF,firstDF("HP_S_NO")===firstOpDF("HP_S_NO"),"left")
      .select("NAME","OPER_NAME")
      val uninDs= uninoDF.as[PeopleSql]
    uninDs.filter(d=>d.OPER_NAME!=null&&d.NAME=="周长龙").show()
//      .as[PeopleSql]
//      .filter(d=>d.operation!=null&&d.name=="周长龙").show()




  }
}
