package study.wsn

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.sql.SparkSession

case class Product(prodID:String,style:String,merID:String,style2:String)

object SparkSQL {
  def main(args: Array[String]): Unit = {
    val sess = SparkSession.builder().appName("SparkSQL").master("local").getOrCreate();
    
    
    import sess.implicits._
    val df = sess.read.textFile("file:///A:/output2/products_clear/part-00000")
    .map(_.split("\t"))
    .map(arr => Product(arr(0).toString(),arr(1).toString(),arr(2).toString(),arr(3).toString()))
    
    df.createOrReplaceTempView("product")
    
    sess.sql("select merID,concat_ws('--',collect_set(style)) from product group by merID")
    .show(1000,false)
     
  }
}