package com.lmq

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions.{col, collect_list, udf}
import org.apache.spark.sql.types.{DoubleType, IntegerType, StringType, StructField, StructType}

import java.util
import scala.collection.mutable

/**
 *
 * TODO:   not used, just for testing.
 *        In this file, we generate sessions with userid added.
 *        And userid is generated using random method.
 *        If a random number is bigger than 0.5, we assign a sessionid to a new Uid,
 *        otherwise, a sessionid is assigned  to last uid used.
 */
object genSessionWithUid {

  var global_cnt = 1
//  var cur_sessionId = -1
  val AssigneUid: UserDefinedFunction = udf((x:String)=> {
//    if (cur_sessionId == -1) cur_sessionId = x.toInt
//    if (cur_sessionId != x.toInt){
      if (math.random>0.84) {
//        val value = global_cnt.toString.toInt
        global_cnt = global_cnt+1
        global_cnt
      } else global_cnt

  } )

  def genRandomUserId():util.ArrayList[Int] = {
    val lines = 1881348
    val avglen = 6.6
    val userNumber = (1.88*lines/avglen).toInt
    var x: util.ArrayList[Int] = new util.ArrayList[Int]()
    var cnt = 0
    for(i<- 1 to userNumber){
      cnt=cnt+1
      if (math.random>0.84)
        x.add(cnt)
    }
    x
  }

  Logger.getLogger("org.apache.spark")
    .setLevel(Level.WARN)

  val spark = SparkSession.builder()
    .master("local[*]")
    .appName("Test")
    .getOrCreate()


  def main(args: Array[String]): Unit = {
    val utils = new utils()
    //CSV schema
    val schema = StructType(
      Array(
        StructField("SessionId", IntegerType, nullable = true),
        //        StructField("TimeStr", StringType, nullable = true),
        StructField("ItemId", IntegerType, nullable = true),
        StructField("Context", StringType, nullable = true),
        StructField("Time",DoubleType, nullable = false)
        //        StructField("TimeRank",IntegerType, nullable = true)
      )
    )
    //    read CSV
    val v: DataFrame = spark.read
      .schema(schema)
      //      .option("header",value = false)
      //      .option("header",value = true)

//      .csv("D:\\pythonProject\\pythonProject\\ComparisonWithYoochoose\\src\\head1.csv")
//            .csv("file:///home/iptv/yoochoose/Reslt.csv/1over64.csv")
      .csv("file:///home/iptv/yoochoose/OneOsixfour_V3/part-00000-241fb18c-41fc-47b0-9909-469c86569cca-c000.csv")
    //            .csv("D:\\pythonProject\\pythonProject\\ComparisonWithYoochoose\\src\\head1.csv")
    //      df.show(false)

    //      val timeTransfer = udf((x:String)=>  utils.timeStr2Tsp(x) )
    //      df.select(
    //        col("SessionId"),
    //        col("TimeStr"),
    //        col("ItemId"),
    //        col("Context"),
    //        timeTransfer( col("TimeStr") ).alias("Time")
    //      )
    //        .createOrReplaceTempView("allData")
    //
    //      val v: DataFrame = spark.sql(""" select * ,rank() over (partition by SessionId order by Time) as
    //    `TimeRank` from allData""")
    v.printSchema()



    println("=====================================")
    v.show(false)
    def sortwithPositionElement(Pt:Int) = udf((x:mutable.WrappedArray[Double])=>  x(Pt) )
    //      println(v.count())
    v.createTempView("v")

    val w = v.groupBy(col("SessionId")).agg(collect_list("ItemId").as("ItemId"), collect_list("Time").as("Time"))
    //    w.printSchema()
    val toStrFnc = udf((x:mutable.WrappedArray[AnyVal])=>x.mkString(","))
    val mymin = udf((x:mutable.WrappedArray[Double])=>x.min)
    val myminPro = udf((x:mutable.WrappedArray[Double])=>utils.Tsp2Time(x.min))
    w.show(100,truncate = false)

    w.orderBy( mymin(col("Time")) )
      .select(
        AssigneUid(col("SessionId")),
        col("SessionId"),
        myminPro(col("Time")),
        toStrFnc(col("ItemId"))

        //        col("Time"),
        //        sort_array(col("Time"))



      )
      .coalesce(1).write
      .option("header","false")
      .csv("file:///home/iptv/yoochoose/OneOsixfour_sessionsRevisev5")
//    w.orderBy( mymin(col("Time")) )
//      .select(
//        AssigneUid(col("SessionId")),
//        col("SessionId"),
//        toStrFnc(col("ItemId"))
////        mymin(col("Time"))
//        //        col("Time"),
//        //        sort_array(col("Time"))
//
//
//
//      )
//      .coalesce(1).write
//      .option("header","false")
//      .csv("file:///home/iptv/yoochoose/OneOsixfour_sessionsRevisev3")
    println("Done.")
//              .show(100,false)



    //        .count())
    //        .show(1000,truncate = false)
    //      println(
    // Random generated userid
    //
    //    spark.sql(
    //      """select v.* from
    //        |(select ItemId from
    //        |( select ItemId, count(1) as cnt
    //        |from v
    //        |group by ItemId) A where cnt>=5) B
    //        |left join v
    //        |on B.ItemId=v.ItemId
    //        |""".stripMargin)
    //    //        .count())
    //    //        .show(1000,truncate = false)
    //
    //    //      val xtable =
    //    spark.sql(
    //      """select w.SessionId,w.ItemId,w.Context,w.Time,
    //        |rank() over (partition by SessionId order by Time) as TimeRank
    //        |from
    //        |(select v.* from
    //        |(select ItemId from
    //        |( select ItemId, count(1) as cnt
    //        |from v
    //        |group by ItemId) A where cnt>=5) B
    //        |left join v
    //        |on B.ItemId=v.ItemId) w
    //        |""".stripMargin).createTempView("xtable")
    //    //        .show(false)
    //    spark.sql(
    //      """
    //        |select xtable.SessionId,xtable.ItemId,xtable.Context,xtable.Time
    //        |from
    //        |(select SessionId from
    //        |(select SessionId,sum(1) as cnts from xtable group by SessionId) groupedtable where cnts>1
    //        |) ltable
    //        |left join xtable
    //        |on ltable.SessionId=xtable.SessionId
    //        |""".stripMargin).coalesce(1).write
    //      .option("header","true")
    //      .csv("file:///home/iptv/yoochoose/OneOsixfour")
    //    println("Done.")
    //        .show(1000,false)


    // remove sessions which the length is less than 1 and apearance is less than 5.

    //    v.coalesce(1).write
    //      .option("header","true")
    //      .csv("file:///home/iptv/yoochoose/Reslt.csv")

    //      .toPandas()\
    //    .to_csv("helloResult.csv",index=None)




  }

}
