package study.wsn

import org.apache.spark.sql.SparkSession

//case class Product(prodID:String,style:String,merID:String,style2:String)
//case class ProductOrder(userID:String,prodID:String)
object UserPortrait {
  def main(args: Array[String]){
    val sess = SparkSession.builder().appName("UserPortraitSQL").master("local").getOrCreate();
    //读文件
    import sess.implicits._
    val df1 = sess.read.textFile("file:///A:/output2/products_clear/part-00000")
    .map(_.split("\t"))
    .map(arr => Product(arr(0).toString(),arr(1).toString(),arr(2).toString(),arr(3).toString()))
    
    val df2 = sess.read.textFile("file:///A:/output2/order_clear/part-00000")
    .map(_.split("\t"))
    .map(arr => ProductOrder(arr(0).toString(),arr(1).toString()))
    
    //创建视图
    df1.createOrReplaceTempView("product")
    df2.createOrReplaceTempView("order")
    
    //SQL查询
    sess.sql("select userID,concat_ws(';',collect_set(style)) from order inner join product on product.prodID = order.prodID group by userID")
        .rdd
        .map(v => {
          val arr = v.getString(1).split(";")
          var list = List[String]()
          for(x <- arr){
            if (!list.contains(x) & !x.isEmpty())
              list = x :: list
          }
          (v.getString(0),list.mkString(";"))
        })
        .toDF()
        .show(1000,false)
    
  }
}