package edu.csl.study.spark.practise

import org.apache.spark.sql.{Dataset, SparkSession}

/**
 * @author
 */
object Practise1 {
  def getSparkSession(): SparkSession = {

    val spark: SparkSession = SparkSession.builder().appName(Practise1.getClass.getName).master("local[*]").getOrCreate()
    spark.sparkContext.setLogLevel("WARN")
    spark
  }

  val inputFilePath = System.getProperty("user.dir") + "/testfile/practise/practise1.txt"


  case class Record(click_category_id: String, click_product_id: String,
                    order_category_id: String, order_product_id: String,
                    pay_category_id: String, pay_product_id: String)

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = getSparkSession()
    val ds: Dataset[String] = spark.read.textFile(inputFilePath)
    import spark.implicits._
    val ds_Record: Dataset[Record] = ds.map(item => {
      val array = item.split(",")
      Record(array(7), array(8), array(9), array(10), array(11), array(12))
    })
    ds_Record.createTempView("RecordTable")
    /**
     * 1.创建点击视图click_Table
     */
    val click_DS = spark.sql("select  click_category_id as category_id, count(1) as count " +
      "from RecordTable " +
      "group by click_category_id  ")
    //click_DS.show()
    click_DS.createTempView("click_Table")


    /** *
     * lateral view explode 只支持hive、spark，不支持impala。
     * 注意：转义字符为：\\\\
     */
    /**
     * 2.创建点击创建订单视图order_table
     */
    val ds_Record_order = spark.sql("select orderid  as category_id,count(1) as count from" +
      " ( select orderid from   RecordTable  t" +
      " LATERAL VIEW explode(split(t.order_category_id,'\\\\^A')) as orderid ) temp " +
      " group by temp.orderid  ")
    ds_Record_order.createTempView("order_table")

    /**
     * 3.创建点击创建支付视图pay_table
     */
    val ds_Record_pay = spark.sql("select payid as  category_id,count(1) as count from" +
      " ( select payid from   RecordTable  t" +
      " LATERAL VIEW explode(split(t.pay_category_id,'\\\\^A')) as payid)" +
      "  group by payid")
    ds_Record_pay.createTempView("pay_table")
    /**
     * 4.创建点击创建订单视图order_table和支付视图pay_table进行 Full outer join 得到order_pay_table
     */
    val ds_order_pay = spark.sql("select " +
      "nvl(o.category_id,p.category_id)  category_id, o.count  order_count ,p.count  pay_count" +
      " from order_table o " +
      " Full Outer Join  pay_table p  on  o.category_id = p.category_id ");
    ds_order_pay.createTempView("order_pay_table")

    /**
     * 5.点击视图clike_table和order_pay_table进行 Full outer join得到最终结果
     */
    val ds_result2 = spark.sql("select " +
      " CONCAT('category_id=',nvl(c.category_id,op.category_id)," +
      " '|click_category_count=', nvl(c.count,0)," +
      " '|order_category_count=', nvl(op.order_count,0)," +
      " '|pay_category_count='  , nvl(op.pay_count,0)) as result" +
      " from click_Table c " +
      " Full Outer Join  order_pay_table op  on  c.category_id = op.category_id " +
      " order by  nvl(c.count,0) desc, nvl(op.order_count,0) desc, nvl(op.pay_count,0)  desc " +
      " limit 10 ");

    //5.打印最终结果: 和预期的一样
    ds_result2.toDF().rdd.foreach(println)

    spark.stop()
  }


}
