package com.ywps.vaas.demoTask.dao

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

import java.util

/**
 * demo示例持久层
 */
class WriteJdbcDao extends TDao {

  def writeJdbc(): Unit = {
    val spark: SparkSession = EnvUtil.take()
    //订阅mysql中的SQL业务表
    val frame: DataFrame = readJdbc("demo_t", "vaas_demo")
    import org.apache.spark.sql.functions._
    import spark.implicits._
    val ds: Dataset[DemoT] = frame.as[DemoT]
    //筛选出待执行的一条业务SQL及其对应的id
    val preBusinessSql: Dataset[DemoT] = ds.filter($"status" === 0).limit(1)
    val countBusSQL: Long = preBusinessSql.count()
    //若无订阅的SQL，则执行直接打印
    if(countBusSQL>0){

      val updateDf: DataFrame = preBusinessSql.drop("status").withColumn("status",typedLit(1))
      updateDf.show()
      val list: util.List[DemoT] = preBusinessSql.collectAsList()
      val bussinessSQL: String = list.get(0).operate
      val businessId: Int = list.get(0).id
      val businessTmpTableName: String= list.get(0).tmpTableName
      //进入业务对应的SQL的hive数据库
      spark.sql("use vaas_demo")
      val dataFrame: DataFrame = spark.sql(bussinessSQL)
      val dsMockT: Dataset[DemoMockT] = dataFrame.as[DemoMockT]
      //结果集关联业务sql主键
      val dsWithBusId: Dataset[DemoMockTwithBusId] = dsMockT.withColumn("demoTid", typedLit(businessId)).as[DemoMockTwithBusId]
      dsWithBusId.show()
      //保存结果集至mysql中
      val resultDf: DataFrame = addAutoIncreaceIdColumn(dsWithBusId.toDF(), spark, "autoId")
      resultDf.show()
      saveDFtoDBOverrideTable(businessTmpTableName,spark.sqlContext,resultDf)
      //更新mysql订阅SQL表
      insertOrUpdateDFtoDBUsePool("demo_t","vaas_demo",updateDf,updateDf.columns)
    }else{
      //直接打印
      preBusinessSql.show()
    }

  }
}
