package cn.itcast.tags.models

import cn.itcast.tags.config.ModelConfig
import cn.itcast.tags.meta.{HBaseMeta, MetaParse}
import cn.itcast.tags.utils.SparkUtils
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.storage.StorageLevel


/**
 * 标签模型开发基类，各个标签模型继承此类，实现其中打标签方法doTag
 */
abstract class AbstractTagModel(modelName:String, modelType:ModelType) extends Logging{

  //设置运行用户root
  System.setProperty("user.name",ModelConfig.FS_USER)
  System.setProperty("HADOOP_USER_NAME",ModelConfig.FS_USER)

  //变量声明
  var spark:SparkSession = _

  //初始化方法，构建sparksession对象
  def init(isHive:Boolean=false): Unit ={
    spark = SparkUtils.createSparkSession(this.getClass,isHive)
  }

  //2.准备标签数据，依据标签ID从mysql数据库表中tbl_basic_tag获取标签数据
  def getTagData(tagId:Long): DataFrame ={
    // TODO: 2. 从MySQL数据库读取标签数据（基础标签表：tbl_basic_tag），依据业 务标签ID读取

    spark.read.format("jdbc")
      .option("driver", ModelConfig.MYSQL_JDBC_DRIVER)
      .option("url", ModelConfig.MYSQL_JDBC_URL)
      .option("dbtable", ModelConfig.tagTable(tagId))
      .option("user", ModelConfig.MYSQL_JDBC_USERNAME)
      .option("password", ModelConfig.MYSQL_JDBC_PASSWORD)
      .load()
  }


  //3.业务数据，依据rule规则，从数据源获取业务数据
  def getBusinessData(tagDF:DataFrame): DataFrame ={
    //获取也标签分装map
    val rulesMap: Map[String, String] = MetaParse.parseRuleToParams(tagDF)
    //根据inType加载业务数据
    val businessDF: DataFrame = MetaParse.parseMetaToData(spark, rulesMap)
    businessDF
  }

  def doTag(businessDF: DataFrame, tagDF: DataFrame):DataFrame

  //4、报错画像标签纸HBase表中
  def saveTag(modelDF:DataFrame): Unit ={
    // TODO: 5. 将标签数据存储到HBase表中：用户画像标签表 -> tbl_profile
    /*HBaseTools.write(
      modelDF,
      ModelConfig.PROFILE_TABLE_ZK_HOSTS,
      ModelConfig.PROFILE_TABLE_ZK_PORT,
      ModelConfig.PROFILE_TABLE_NAME,
      ModelConfig.PROFILE_TABLE_FAMILY_USER,
      ModelConfig.PROFILE_TABLE_ROWKEY_COL
    )*/
    modelDF.write
      .mode(SaveMode.Overwrite)
      .format("hbase")
      .option("zkHosts", ModelConfig.PROFILE_TABLE_ZK_HOSTS)
      .option("zkPort", ModelConfig.PROFILE_TABLE_ZK_PORT)
      .option("hbaseTable", ModelConfig.PROFILE_TABLE_NAME)
      .option("family", ModelConfig.PROFILE_TABLE_FAMILY_USER)
      .option("rowKeyColumn", ModelConfig.PROFILE_TABLE_ROWKEY_COL)
      .save()

  }

  //关闭资源
  def close(): Unit ={
    if(null != spark){
      spark.stop()
    }
  }

  //规定标签模型执行顺序
  def executeModel(tagId:Long,isHive:Boolean=false): Unit ={
    init(isHive)
    try{
      //获取标签数据
      val tagDF: DataFrame = getTagData(tagId)
      //缓存标签数据
      tagDF.persist(StorageLevel.MEMORY_AND_DISK)
      tagDF.count()//触发缓存

      //获取业务数据
      val businessDF: DataFrame = getBusinessData(tagDF)
      //打标签
      val modelDF: DataFrame = doTag(businessDF, tagDF)
      //报错标签
      if(null!=modelDF) saveTag(modelDF)
      tagDF.unpersist()
    }catch {
      case e:Exception => e.printStackTrace()
    }finally {
      close()
    }
  }

}
