package ds_industry_2025

import org.apache.spark.sql.SparkSession

import java.util.Random
//  todo 特征工程，推荐系统的数据准备
object tzgc_data_perparation {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("test1")
      .master("local[1]")
      .enableHiveSupport()
      .getOrCreate()

    val userIds = List(4282, 3331, 3108, 790, 1826, 2196, 2359, 1064, 6312, 4844, 5913, 4579, 1315, 1650, 2011, 4986,
      5286, 4616, 9243, 6213, 5241, 85, 5015, 2856, 5903, 1278, 2760, 539, 993, 8293, 9983, 9572, 7287, 6987, 8561,
      8085, 9858, 336, 8081, 6382, 1605, 2682, 2015, 6643, 1605, 9795, 8371, 8437, 3934, 5525, 1515, 4113, 2178, 6739,
      7350, 1861, 2610, 3102, 3307, 8839, 7967, 4568, 4268, 6995, 9321, 1608, 3770, 7097, 5121, 9580, 3934, 614, 1029,
      8190, 6814, 6385, 82, 641, 5724, 1427, 5289, 7353, 8768, 6115, 8631, 9812, 4537, 1958, 1830, 8708, 6748, 2316, 4629,
      4773, 6528, 191, 2481, 4131, 2382, 1089, 2033, 7044, 3949, 5913, 8921, 9441, 417, 7031, 3145, 5890, 6290, 3158,
      472, 5288, 9720, 9077, 5409, 9593, 453, 7664, 1857, 6017, 661, 583, 4504, 2814, 986, 4669, 506, 8263, 4158, 5137,
      1280, 6498, 5435, 96, 8880, 2712, 2790, 3020, 7637, 3635, 6581, 9619, 1088, 8611, 7476, 1592, 869, 4282, 3838,
      1801, 5464, 476, 1919, 1024, 1694, 505, 8225, 1755, 5729, 501, 7443, 8536, 6050, 6589, 9208, 7031, 4658, 6631, 6221,
      6496, 1143, 2687, 8582, 1441, 8197, 3192, 5400, 4889, 2383, 8584, 3552, 7783, 9493, 3003, 520, 2774, 1604, 5017,
      4860, 1335, 6498, 8418, 3810, 8778, 7388, 2093, 9501, 6956,6708)

    val skuIds = List(2, 9, 3, 6, 2, 14, 15, 6, 12, 6, 12, 4, 11, 4, 10, 9, 10, 9, 15, 1, 8, 7, 13, 1, 11, 11, 13, 15,
      15, 15, 11, 4, 15, 11, 9, 10, 13, 13, 11, 15, 10, 12, 9, 14, 16, 2, 10, 4, 3, 15, 6, 10, 15, 12, 7, 6, 15, 15,
      4, 12, 6, 16, 11, 2, 7, 2, 14, 16, 14, 15, 9, 10, 7, 14, 4, 12, 13, 15, 7, 9, 5, 14, 8, 3, 11, 7, 14, 9, 9,
      3, 15, 1, 8, 10, 16, 2, 15, 16, 14, 6, 4, 12, 9, 16, 15, 10, 8, 5, 9, 10, 1, 4, 14, 3, 13, 5, 7, 6, 12, 7,
      12, 14, 16, 6, 7, 16, 6, 13, 9, 7, 13, 16, 2, 10, 5, 4, 14, 7, 7, 16, 2, 10, 16, 3, 6, 4, 8, 14, 5, 9, 8,
      6, 7, 4, 9, 8, 10, 9, 12, 2, 13, 5, 15, 13, 13, 9, 14, 7, 5, 15, 3, 7, 5, 15, 15, 9, 5, 11, 9, 16, 8)

    // 随机生成100条数据
    val random = new Random()
    val randomData = (1 to 400).map { _ =>
      val userId = userIds(random.nextInt(userIds.length))
      val skuId = skuIds(random.nextInt(skuIds.length))
      (userId, skuId)
    }

    // 转换为DataFrame
    val randomDataDF = spark.createDataFrame(randomData).toDF("user_id", "sku_id")

    randomDataDF.show()

    // 将数据插入Hive表
    randomDataDF.write
      .mode("append")
      .saveAsTable("tzgc.source")

    //    val order_info = spark.sql("select * from dwd.fact_order_info")
    //
    //    println(order_info.limit(1).collect().toSeq.take(5).mkString(","))
    //    println(order_info.limit(1).collect().toSeq.take(5).mkString(","))


    //    val df = spark.sql("select * from dws.user_id_sku_id_hot")
    //    df.show()
    //    df.collect().foreach(row => {
    //      println(row.toSeq.flatMap(x => x.toString.split(",")).mkString(","))
    //    })

    spark.stop()
  }

}
