import org.apache.spark.sql.functions.col
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

// 数据清洗和导入
object spark {
  def main(args: Array[String]): Unit = {
    // 1. Spark配置对象
    val sparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("Minsu")

    // 2. SparkSession 对象
    val sparksession = SparkSession.builder()
      .config(sparkConf)
      .enableHiveSupport() // 开启 Hive 支持
      .getOrCreate()

    // CSV 文件路径
    val originPath = "/minsu/ods/part-r-00000"

    // 定义 schema，假设所有字段都是字符串类型，但我们可以根据实际需要转换为其他类型
    val schema = StructType(Array(
      StructField("product_id", StringType, true),
      StructField("city_name", StringType, true),
      StructField("title", StringType, true),
      StructField("district_name", StringType, true),
      StructField("location_area", StringType, true),
      StructField("star_rating", StringType, true),
      StructField("star_rating_desc", StringType, true),// 假设是字符串，之后可能需要转换为整型
      StructField("comment_number", StringType, true), // 假设是字符串，之后可能需要转换为整型
      StructField("distance_desc", StringType, true),
      StructField("cover_image", StringType, true),
      StructField("fav_count", StringType, true), // 假设是字符串，之后可能需要转换为整型
      StructField("fav_count_desc", StringType, true),
      StructField("consume_count", StringType, true), // 假设是字符串，之后可能需要转换为整型
      StructField("consume_desc", StringType, true),
      StructField("price", StringType, true), // 假设是字符串，之后可能需要转换为整型
      StructField("layout_desc", StringType, true),
      StructField("guest_number_desc", StringType, true),
      StructField("ugc_desc", StringType, true),
      StructField("tags", StringType, true)
    ))

    // 读取 CSV 文件，指定 schema 和分隔符
    val df: DataFrame = sparksession.read
      .option("header", "false")
      .option("sep", "-")
      .schema(schema)
      .csv(originPath)

    val convertedDf = df.withColumn("product_id", col("product_id").cast(IntegerType))
      .withColumn("star_rating", col("star_rating").cast(IntegerType))
      .withColumn("comment_number", col("comment_number").cast(IntegerType))
      .withColumn("fav_count", col("fav_count").cast(IntegerType))
      .withColumn("consume_count", col("consume_count").cast(IntegerType))
      .withColumn("price", col("price").cast(IntegerType))

    // 写入 Hive 表
    convertedDf.write
      .mode(SaveMode.Overwrite)
      .saveAsTable("db_minsu.tb_minsu")

    // 关闭 SparkSession
    sparksession.stop()
  }
}