package com.clothes.recommender
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}
import java.sql.DriverManager

object Tags {
  def main(args: Array[String]): Unit = {

    // 创建sparksession
    val conf = new SparkConf().setAppName("ClothesTagCount").setMaster("local[*]")
    val spark = SparkSession.builder().config(conf).getOrCreate()

    val url = "jdbc:mysql:///clothes_recommender_system?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC"
    val user = "root"
    val password = "123456"

    // 从mysql中读取数据
    val clothesDF = spark.read.format("jdbc")
      .option("url", url)
      .option("dbtable", "armoire_clothes")
      .option("user", user)
      .option("password", password)
      .load()

    val tagsDF = spark.read.format("jdbc")
      .option("url", url)
      .option("dbtable", "armoire_tags")
      .option("user", user)
      .option("password", password)
      .load()

    // 链接两个表
    val clothesTagsDF = clothesDF.join(tagsDF, "armoire_id")
      .select("clothes_id", "tag_id")

    // 统计衣服被标签标记次数
    val clothesTagCountDF = clothesTagsDF.groupBy("clothes_id", "tag_id").count()

    // 删除已有的clothes_tag_count表
    Class.forName("com.mysql.jdbc.Driver")
    val connection = DriverManager.getConnection(url, user, password)
    val statement = connection.createStatement()
    val sql = "DROP TABLE IF EXISTS clothes_tag_count"
    statement.execute(sql)

    // 把结果写入数据库
    clothesTagCountDF.write.format("jdbc")
      .option("url", url)
      .option("dbtable", "clothes_tag_count")
      .option("user", user)
      .option("password", password)
      .option("mode", "overwrite")
      .save()

    // stop the SparkSession
    spark.stop()
  }
}
