package qiche.mock

import com.qiche.common.SparkBase
import com.qiche.model.OdsCurriculumInfo
import org.apache.spark.sql.SparkSession
import scala.collection.mutable.ArrayBuffer
import scala.util.Random

object OdsCurriculumInfoMock extends SparkBase {

  private val random = new Random()
  private val openingStatusArray = Array(1,1,1,1,1,0)
  private val dataArray = ArrayBuffer[OdsCurriculumInfo]()

  def execute(spark: SparkSession): Unit = {
    import spark.implicits._
    for (i <- 1 to 300) {
      val curriculumId =if(i<10){"c".concat("00").concat(i.toString)}
      else if(i>=10 & i<100){"c".concat("0").concat(i.toString)}
      else{"c".concat(i.toString)}
      val odsCurriculumInfo = new OdsCurriculumInfo(
        curriculumId,
        curriculumId,
        openingStatusArray(random.nextInt(openingStatusArray.length))
      )
      dataArray += odsCurriculumInfo
    }
    spark.sparkContext.parallelize(dataArray).toDF()
      .createTempView("ods_curriculum_info_view")

    spark.sql(
      s"""insert into table pingtai.ods_curriculum_info select * from ods_curriculum_info_view""".stripMargin)
  }


  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "hadoop")
    System.setProperty("HIVE_USER_NAME", "hadoop")
    val spark = getContextLocal(this.getClass.getName.split('.').last)
    execute(spark)
    spark.stop()
  }
}



