package qiche.mock

import com.qiche.common.SparkBase
import com.qiche.model.OdsTaskInfo
import com.qiche.tools.DateUtils
import org.apache.spark.sql.SparkSession
import scala.collection.mutable.ArrayBuffer
import scala.util.Random

object OdsTaskInfoMock extends SparkBase {

  private val random = new Random()
  private val dataArray = ArrayBuffer[OdsTaskInfo]()

  def execute(spark: SparkSession): Unit = {
    import spark.implicits._

    val curriculumIdArray = spark.sql("select curriculum_id from pingtai.ods_curriculum_info").map(x => x.getString(0)).collect()

    for(i <- 101 to 136){
      for (j <- 1 to curriculumIdArray.length) {
        val odsTaskInfo = new OdsTaskInfo(
          curriculumIdArray(random.nextInt(curriculumIdArray.length)).concat(j.toString).concat(i.toString),
          curriculumIdArray(random.nextInt(curriculumIdArray.length)),
          i.toString,
          DateUtils.getYesterdayHour(scala.util.Random.nextInt(7), scala.util.Random.nextInt(23) + 1, scala.util.Random.nextInt(60))
        )
        dataArray += odsTaskInfo
      }
    }
    dataArray.toDF().createTempView("ods_task_info_view")
    dataArray.toDF().show()
    spark.sql(
      s"""insert into pingtai.ods_task_info select * from ods_task_info_view""".stripMargin)
  }
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "hadoop")
    System.setProperty("HIVE_USER_NAME", "hadoop")
    val spark = getContextLocal(this.getClass.getName.split('.').last)
    execute(spark)
    spark.stop()
  }
}



