package com.clothes.recommender
import java.sql.DriverManager
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._
import scala.util.matching.Regex

object ClothesKeywordExtractor {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("Clothes Keyword Extractor").master("local[*]").getOrCreate()
    import spark.implicits._

    // MySQL数据库连接配置
    val url = "jdbc:mysql:///clothes_recommender_system?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC"
    val driver = "com.mysql.jdbc.Driver"
    val username = "root"
    val password = "123456"

    // 读取clothes表的所有数据
    val clothesDF = spark.read.format("jdbc")
      .option("url", url)
      .option("driver", driver)
      .option("user", username)
      .option("password", password)
      .option("dbtable", "clothes")
      .load()

    val genderRegex = "(男|女|男士|女士)".r
    val styleRegex = "(T恤|衬衫|裤子|外套|羽绒服|连衣裙)".r
    val seasonRegex = "(春|夏|秋|冬)".r
    val colorRegex = "(白|黑|红|黄|蓝|绿|紫|灰)".r
    val typeRegex = "(球类|户外|室内|正式|休闲)".r

    // 定义UDF，输入为字符串类型，输出为字符串类型
    val findAllInSeasonUDF = udf((s: String) => seasonRegex.findAllIn(s).mkString("|"))
    val findAllInTypeUDF = udf((s: String) => typeRegex.findAllIn(s).mkString("|"))
    val findAllInColorUDF = udf((s: String) => colorRegex.findAllIn(s).mkString("|"))

    // 提取关键词并将结果写入数据库
    val keywordDF = clothesDF
      .select($"clothes_id", $"name",
        regexp_extract($"name", genderRegex.toString(), 0).alias("gender"),
        regexp_extract($"name", styleRegex.toString(), 0).alias("style"),
        findAllInSeasonUDF($"name").alias("season"),
        findAllInTypeUDF($"name").alias("type"),
        findAllInColorUDF($"name").alias("color")
        )

    //删除已有的clothes_keywords表
    Class.forName("com.mysql.jdbc.Driver")
    val connection = DriverManager.getConnection(url, username, password)
    val statement = connection.createStatement()
    val sql = "DROP TABLE IF EXISTS clothes_keywords"
    statement.execute(sql)

    // 将结果插入到数据库中
    keywordDF.show()
    keywordDF.write.format("jdbc")
      .option("url",url)
      .option("user",username)
      .option("password",password)
      .option("dbtable","clothes_keywords")
      .save()

    // 关闭Spark会话
    spark.stop()
  }
}