import base.BaseModel
import org.apache.spark.sql.{DataFrame, SparkSession}

object test extends BaseModel {
  //设置程序名称
  override def setAppName: String = "JobTag"

  //设置四级标签id
  override def setFourId: String = "66"

  //根据五级标签数据和HBASE数据进行计算标签
  override def getNewTag(spark:SparkSession, fiveTagDF: DataFrame, HBaseDF: DataFrame): DataFrame = {
    val newJobTagDF: DataFrame = fiveTagDF.join(HBaseDF, fiveTagDF.col("rule") === HBaseDF.col("job"))
      .select(HBaseDF.col("id").as("userId"), fiveTagDF.col("id").as("tagsId"))
    newJobTagDF
  }

  def main(args: Array[String]): Unit = {
    exec()
  }


}
