package cn.ipanel.bigdata.source.mysql

import cn.ipanel.bigdata.boot.source.genre.Mysql
import org.apache.spark.sql.Dataset

/**
 * Author: lzz
 * Date: 2021/12/21 17:01
 */
object ColumnInfo extends Mysql("itv_dtvs", "t_column_info") {

//  select c.f_column_id,c.f_showname,c.f_column_name,p.f_column_origin_id,c.f_all_parent_id
//  from t_column_info c, t_column_program_property p
//    where c.f_column_id = p.f_column_id;

  // 虽然只定义了这么几个字段，但是能查到mysql表所有的字段
  final val F_COLUMN_ID       : String = "f_column_id"
  final val F_COLUMN_NAME     : String = "f_column_name"
  final val F_SHOW_NAME       : String = "f_show_name"
  final val F_ALL_PARENT_ID   : String = "f_all_parent_id"
  final val F_COLUMN_STATUS   : String = "f_column_status"
  final val F_COLUMN_LEVEL    : String = "f_column_level"

  override def getTBColumns: Seq[String] = Table.FIELDS
  override def buildTable: String = ""
  override def emptyTable: Dataset[Table] = {
    import IMPLICITS._
    spark.createDataset(spark.sparkContext.emptyRDD[Table])
  }

  case class Table(var f_column_id: BigInt,
                   var f_column_name: String,
                   var f_show_name: String,
                   var f_all_parent_id: String,
                   var f_column_status: Int,
                   var f_column_level:Int)

  object Table {

    final lazy val FIELDS = {
      Seq(F_COLUMN_ID, F_COLUMN_NAME, F_SHOW_NAME, F_ALL_PARENT_ID, F_COLUMN_STATUS, F_COLUMN_STATUS)
    }
  }
}