package qiche.mock

import com.qiche.common.SparkBase
import com.qiche.model.OdsLiveBoadcastInfo
import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ArrayBuffer
import scala.util.Random

object OdsLiveBoadcastInfoMock extends SparkBase {

  private val random = new Random()
  private val dataArray = ArrayBuffer[OdsLiveBoadcastInfo]()

  def execute(spark: SparkSession): Unit = {
    import spark.implicits._

    val teacherIdArray = spark.sql("select teacher_id from pingtai.ods_teacher_info").map(x => x.getString(0)).collect()
    val curriculumIdArray = spark.sql("select curriculum_id from pingtai.ods_curriculum_info").map(x => x.getString(0)).collect()
    val classIdArray = spark.sql("select class_id from pingtai.ods_class_info").map(x => x.getString(0)).collect()

    for(i <- 1 to 3886){
        val liveBroadcastId =if(i<10){"l".concat("000").concat(i.toString)}
        else if(i>=10 & i<100){"l".concat("00").concat(i.toString)}
        else if(i>=100 & i<1000){"l".concat("0").concat(i.toString)}
        else{"l".concat(i.toString)}
        val odsLiveBoadcastInfo = new OdsLiveBoadcastInfo(
          liveBroadcastId,
          teacherIdArray(random.nextInt(teacherIdArray.length)),
          curriculumIdArray(random.nextInt(curriculumIdArray.length)),
          classIdArray(random.nextInt(classIdArray.length)),
          180,
          160
        )
        dataArray += odsLiveBoadcastInfo
    }

    dataArray.toDF().createTempView("ods_live_broadcast_info_view")

    spark.sql(
      s"""insert into table pingtai.ods_live_broadcast_info select * from ods_live_broadcast_info_view""".stripMargin)
  }


  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "hadoop")
    System.setProperty("HIVE_USER_NAME", "hadoop")
    val spark = getContextLocal(this.getClass.getName.split('.').last)
    execute(spark)
    spark.stop()
  }
}



