package com.qiche.mock

import com.qiche.common.SparkBase
import com.qiche.model.OdsStudentInfo
import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ArrayBuffer
import scala.util.Random

object OdsStudentInfoMock extends SparkBase {

  private val random = new Random()
  private val onlineStatusArray = Array(1,1,1,1,1,0)
  private val dataArray = ArrayBuffer[OdsStudentInfo]()

  def execute(spark: SparkSession): Unit = {
    import spark.implicits._
    val classInfoArray = spark.sql("select class_id from pingtai.ods_class_info").map(x => x.getString(0)).collect()

    for (i <- 1 to 30000) {
      val studentId =if(i<10){"2019".concat("0000").concat(i.toString)}
      else if(i>=10 & i<100){"2019".concat("000").concat(i.toString)}
      else if(i>=100 & i<1000){"2019".concat("00").concat(i.toString)}
      else if(i>=1000 & i<10000){"2019".concat("0").concat(i.toString)}
      else{"2019".concat(i.toString)}
      val odsStudentInfo = new OdsStudentInfo(
        studentId,
        studentId,
        classInfoArray(random.nextInt(classInfoArray.length)),
        onlineStatusArray(random.nextInt(onlineStatusArray.length))
      )
      dataArray += odsStudentInfo
    }
    spark.sparkContext.parallelize(dataArray).toDF()
      .createTempView("ods_student_info_view")

    spark.sql(
      s"""insert into table pingtai.ods_student_info select * from ods_student_info_view""".stripMargin)
  }



  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "hadoop")
    System.setProperty("HIVE_USER_NAME", "hadoop")
    val spark = getContextLocal(this.getClass.getName.split('.').last)
    execute(spark)
    spark.stop()
  }
}



