import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}


object tag_num {
  def main(args: Array[String]): Unit = {
    val sparkconf = new SparkConf()
      .setMaster("local[*]")
    val sparksession = SparkSession
      .builder()
      .config(sparkconf)
      .enableHiveSupport()
      .appName("fav_count")
      .getOrCreate()

    var df = sparksession.sql(
      """
        |SELECT
        |tag,count(1) AS count
        |FROM (SELECT explode(split(tags,'_')) as tag FROM db_minsu.tb_minsu) t
        |GROUP BY tag;
        |""".stripMargin
    )
    val rat_view = df.createTempView("rat_view")
    val tag_num = sparksession.sql("select * from rat_view ")
    //dws_cardio_age.show()
    //-将结果进行存储，存储到 hive 数据库中

    tag_num.write.mode(SaveMode.Overwrite).saveAsTable("db_minsu.tag_num")



    sparksession.stop()
    sparksession.close()
  }
}
