package com.haier.spark.project.spark

import java.util.UUID

import com.haier.spark.project.constant.Constants
import com.haier.spark.project.util.{DateUtils, StringUtils}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

import scala.collection.mutable.ListBuffer
import scala.util.Random


object TestMockData {
  def mock(sparkSession: SparkSession): Unit = {
    val rows = new ListBuffer[Row]

    val searchKeywords = Array[String]("火锅", "蛋糕", "重庆辣子鸡", "重庆小面", "呷哺呷哺", "新辣道鱼火锅", "国贸大厦", "太古商场", "日本料理", "温泉")
    val date = DateUtils.getTodayDate
    val actions = Array[String]("search", "click", "order", "pay")
    val random = new Random()

    for (_ <- Range(0, 100)) {
      val userId = random.nextInt(100).toLong
      for (_ <- Range(0, 10)) {
        val sessionId = UUID.randomUUID().toString.replace("-", "")
        val baseActionTime = date + " " + random.nextInt(23)
        for (_ <- Range(0, random.nextInt(100))) {
          val pageId = random.nextInt(10).toLong
          val actionTime = baseActionTime + ":" + StringUtils.fulfuill(random.nextInt(59).toString) + ":" + StringUtils.fulfuill(random.nextInt(59).toString);
          val action = actions(random.nextInt(4))
          var searchKeyword: String = null
          var clickCategoryId: Long = 0
          var clickProductId: Long = 0
          var orderCategoryIds: String = null
          var orderProductIds: String = null
          var payCategoryIds: String = null
          var payProductIds: String = null
          if ("search" == action) {
            searchKeyword = searchKeywords(random.nextInt(10)).toString
          } else if ("click" == action) {
            clickCategoryId = random.nextInt(100).toLong
            clickProductId = random.nextInt(100).toLong
          } else if ("order" == action) {
            orderCategoryIds = random.nextInt(100).toString
            orderProductIds = random.nextInt(100).toString
          } else if ("pay" == action) {
            payCategoryIds = random.nextInt(100).toString
            payProductIds = random.nextInt(100).toString
          }
          val row = Row(date, userId, sessionId, pageId, actionTime, searchKeyword, clickCategoryId, clickProductId, orderCategoryIds, orderProductIds, payCategoryIds, payProductIds)
          rows.append(row)
        }
      }
    }

    val rowsRdd = sparkSession.sparkContext.makeRDD(rows.toList)
    val schema = StructType(Array(
      StructField("date", StringType),
      StructField("user_id", LongType),
      StructField("session_id", StringType),
      StructField("page_id", LongType),
      StructField("action_time", StringType),
      StructField("search_keyword", StringType),
      StructField("click_category_id", LongType),
      StructField("click_product_id", LongType),
      StructField("order_category_ids", StringType),
      StructField("order_product_ids", StringType),
      StructField("pay_category_ids", StringType),
      StructField("pay_product_ids", StringType)))
    val df: DataFrame = sparkSession.createDataFrame(rowsRdd, schema)
    df.createOrReplaceTempView("user_visit_action")

    //*******************************

    val userRows = new ListBuffer[Row]
    val sexes = Array("male", "female")
    for(i <- Range(1, 101)) {
      val userId = i.toLong
      val username = "user" + i
      val name = "name" + i
      val age = random.nextInt(60)
      val professional = "professional" + random.nextInt(100)
      val city = "city" + random.nextInt(100)
      val sex = sexes(random.nextInt(2))

      val userRow = Row(userId, username, name, age,
        professional, city, sex)
      userRows.append(userRow)
    }

    val userRowsRDD = sparkSession.sparkContext.makeRDD(userRows.toList)

    val schema2 = StructType(Array(
      StructField("user_id", LongType),
      StructField("username", StringType),
      StructField("name", StringType),
      StructField("age", IntegerType),
      StructField("professional", StringType),
      StructField("city", StringType),
      StructField("sex", StringType)))

    val df2: DataFrame = sparkSession.createDataFrame(userRowsRDD, schema2)
    df2.createOrReplaceTempView("user_info")
  }

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder().appName(Constants.SPARK_APP_NAME_SESSION).master("local[1]")./*config("spark.sql.warehouse.dir","file:///D://test").*/getOrCreate()
    mock(sparkSession)
  }
}