package SimpleQuery
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.log4j.{Level, Logger}
//x(0)是买家id，x(1)是商品id,x(2)是商品类别，x(3)是卖家id,x(4)是品牌id，x(5)是交易月，x(6)是交易日，x(7)是买家行为，x(8)是买家年龄段,x(9)是买家性别，x(10)是收获地址，
//查询某一天在该网站购买商品超过 5 次的用户
object query08 {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf().setMaster("local[2]").setAppName(query08.getClass.getSimpleName)
        Logger.getLogger("org.apache.spark").setLevel(Level.OFF)
        val sc = new SparkContext(conf)
        transformationOps5(sc)
        sc.stop()
    }
  def transformationOps5(sc:SparkContext): Unit = {
        val lines = sc.textFile("file:///C:/Users/asus/Desktop/hadoop_experiment/data/user_log.csv")
        //val listRDD = sc.parallelize(lines)
        val wordsRDD = lines.map(line => line.split(",")).zipWithIndex().filter(_._2>=1).keys
        
        val userAction = wordsRDD.map(x=>(x(0),x(5),x(6),x(7)))
                                 .filter(x=>(x._4.equals("2")))
                                 .map(x=>(x._1,x._2,x._3))
                                 .filter(w=>(w._3.equals("10")))
                                 .map(x=>(x._1,x._2))
                                 .filter(w=>(w._2.equals("11")))
                                 .groupByKey().filter(w=>(w._2.size>5))
        userAction.foreach(println)
        println(userAction.count())
    }
}