package com.lmq

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.functions.{col, collect_list, udf}
import org.apache.spark.sql.types.{DoubleType, IntegerType, StringType, StructField, StructType}

import java.util
import scala.collection.mutable

/**
 * TODO: not used.
 *    This file generate necessary field for item information table,
 *    and the fields of this table include ItemID,Context which indicate the category
 *    of the ItemID.
 */
object GenitemInfo {




  Logger.getLogger("org.apache.spark")
    .setLevel(Level.WARN)

  val spark = SparkSession.builder()
    .master("local[*]")
    .appName("Test")
    .getOrCreate()


  def main(args: Array[String]): Unit = {
    val utils = new utils()
    //csv sechema
    val schema2 = StructType(
      Array(
        StructField("SessionId", IntegerType, nullable = true),
        //        StructField("TimeStr", StringType, nullable = true),
        StructField("ItemId", IntegerType, nullable = true),
        StructField("Context", StringType, nullable = true),
        StructField("Time",DoubleType, nullable = false)
        //        StructField("TimeRank",IntegerType, nullable = true)
      )
    )
    val schema3 = StructType(
      //Session ID, Timestamp, Item ID, Price, Quantity
      Array(
        StructField("SessionId", IntegerType, nullable = true),
        //        StructField("TimeStr", StringType, nullable = true),
        StructField("Timestamp", StringType, nullable = true),
        StructField("ItemId", StringType, nullable = true),
        StructField("Price",DoubleType, nullable = false),
        StructField("quantity",DoubleType, nullable = false)
        //        StructField("TimeRank",IntegerType, nullable = true)
      )
    )
    //CSV schema
    val schema = StructType(
      Array(
        StructField("SessionId", IntegerType, nullable = true),
        //        StructField("TimeStr", StringType, nullable = true),
        StructField("ItemId", IntegerType, nullable = true),
        StructField("Context", StringType, nullable = true),
        StructField("Time",DoubleType, nullable = false)
        //        StructField("TimeRank",IntegerType, nullable = true)
      )
    )

    // read CSV
    val vc:DataFrame = spark.read
      .schema(schema2)
     .csv("file:///home/iptv/yoochoose/OneOsixfour_v2/part-00000-a24d0f25-8b8d-4e48-bacc-e60f0343ed96-c000.csv")
    val vd:DataFrame = spark.read
      .schema(schema3)
     .csv("file:///home/iptv/yoochoose/yoochoose-buys.dat")
    //            .csv("D:\\pythonProject\\pythonProject\\ComparisonWithYoochoose\\src\\head1.csv")("file:///home/iptv/yoochoose/OneOsixfour_v2/part-00000-a24d0f25-8b8d-4e48-bacc-e60f0343ed96-c000.csv")
    //            .csv("D:\\pythonProject\\pythonProject\\ComparisonWithYoochoose\\src\\head1.csv")
    //    read CSV
    val v: DataFrame = spark.read
      .schema(schema)
      //      .option("header",value = false)
      //      .option("header",value = true)

      //      .csv("D:\\pythonProject\\pythonProject\\ComparisonWithYoochoose\\src\\head1.csv")
      //            .csv("file:///home/iptv/yoochoose/Reslt.csv/1over64.csv")
      .csv("file:///home/iptv/yoochoose/OneOsixfour_v2/ItemTableGen.csv")
    //            .csv("D:\\pythonProject\\pythonProject\\ComparisonWithYoochoose\\src\\head1.csv")
    //      df.show(false)

    //      val timeTransfer = udf((x:String)=>  utils.timeStr2Tsp(x) )
    //      df.select(
    //        col("SessionId"),
    //        col("TimeStr"),
    //        col("ItemId"),
    //        col("Context"),
    //        timeTransfer( col("TimeStr") ).alias("Time")
    //      )
    //        .createOrReplaceTempView("allData")
    //
    //      val v: DataFrame = spark.sql(""" select * ,rank() over (partition by SessionId order by Time) as
    //    `TimeRank` from allData""")
    v.printSchema()
    vc.printSchema()
    vd.printSchema()



    println("=====================================")
    v.show(false)
    def sortwithPositionElement(Pt:Int) = udf((x:mutable.WrappedArray[Double])=>  x(Pt) )
    //      println(v.count())
    v.createTempView("v")
    vc.createTempView("vc")
    vc.createTempView("vd")


    val w: DataFrame = spark.sql(
      """
        |select vc.ItemId,vc.Context
        |from
        |(select distinct(ItemId) as ItemId
        |from v) vp left join
        |vc
        |on vp.ItemId = vc.ItemId
        |""".stripMargin)

  w.createTempView("wta")
    spark.sql(
      """
        |select wta.ItemId,vc.Context, vd.price
        |""".stripMargin)
    w.distinct()
      .coalesce(1).write
      .option("header","false")
      .csv("file:///home/iptv/yoochoose/OneOsixfour_sessionsRevisev4")
    println("Done.")





  }

}
