package com.atguigu.qz.service

import com.alibaba.fastjson.JSONObject
import com.atguigu.constan.Constan
import com.atguigu.qz.bean.{DwdQzPaperView, DwdQzPoint, DwdQzQuestion}
import com.atguigu.util.JsonUtil
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * description ：etl 做题模块 ods 数据
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/3/8 15:00
 * modified By ：
 * version:    : 1.0
 */
object EtlOdsService {
  /**
   * etl 做题网站日志数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:52
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzWebsite(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzWebsite.log")

    // etl 并入库 dwd.dwd_qz_website
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val siteid = jsonObject.getIntValue("siteid")
          val sitename = jsonObject.getString("sitename")
          val domain = jsonObject.getString("domain")
          val sequence = jsonObject.getString("sequence")
          val multicastserver = jsonObject.getString("multicastserver")
          val templateserver = jsonObject.getString("templateserver")
          val status = jsonObject.getString("status")
          val creator = jsonObject.getString("creator")
          val createtime = jsonObject.getString("createtime")
          val multicastgateway = jsonObject.getString("multicastgateway")
          val multicastport = jsonObject.getString("multicastport")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (siteid, sitename, domain, sequence, multicastserver, templateserver, status, creator, createtime,
            multicastgateway, multicastport, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_website")
  }

  /**
   * etl 网络课程日志
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:45
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzSiteCourse(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzSiteCourse.log")

    // etl 并入库 dwd.dwd_qz_site_course
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val sitecourseid = jsonObject.getIntValue("sitecourseid")
          val siteid = jsonObject.getIntValue("siteid")
          val courseid = jsonObject.getIntValue("courseid")
          val sitecoursename = jsonObject.getString("sitecoursename")
          val coursechapter = jsonObject.getString("coursechapter")
          val sequence = jsonObject.getString("sequence")
          val status = jsonObject.getString("status")
          val creator = jsonObject.getString("creator")
          val createtime = jsonObject.getString("createtime")
          val helppaperstatus = jsonObject.getString("helppaperstatus")
          val servertype = jsonObject.getString("servertype")
          val boardid = jsonObject.getIntValue("boardid")
          val showstatus = jsonObject.getString("showstatus")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (sitecourseid, siteid, courseid, sitecoursename, coursechapter, sequence, status, creator
            , createtime, helppaperstatus, servertype, boardid, showstatus, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_site_course")
  }

  /**
   * etl 题目类型数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:43
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzQuestionType(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzQuestionType.log")

    // etl 并入库 dwd.dwd_qz_question_type
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val quesviewtype = jsonObject.getIntValue("quesviewtype")
          val viewtypename = jsonObject.getString("viewtypename")
          val questiontypeid = jsonObject.getIntValue("questypeid")
          val description = jsonObject.getString("description")
          val status = jsonObject.getString("status")
          val creator = jsonObject.getString("creator")
          val createtime = jsonObject.getString("createtime")
          val papertypename = jsonObject.getString("papertypename")
          val sequence = jsonObject.getString("sequence")
          val remark = jsonObject.getString("remark")
          val splitscoretype = jsonObject.getString("splitscoretype")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (quesviewtype, viewtypename, questiontypeid, description, status, creator, createtime, papertypename, sequence,
            remark, splitscoretype, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_question_type")
  }

  /**
   * etl 做题日志数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:39
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区地区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzQuestion(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzQuestion.log")

    // etl 并入库 dwd.dwd_qz_question
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val questionid = jsonObject.getIntValue("questionid")
          val parentid = jsonObject.getIntValue("parentid")
          val questypeid = jsonObject.getIntValue("questypeid")
          val quesviewtype = jsonObject.getIntValue("quesviewtype")
          val content = jsonObject.getString("content")
          val answer = jsonObject.getString("answer")
          val analysis = jsonObject.getString("analysis")
          val limitminute = jsonObject.getString("limitminute")
          val score = BigDecimal.apply(jsonObject.getDoubleValue("score")).setScale(1, BigDecimal.RoundingMode.HALF_UP)
          val splitscore = BigDecimal.apply(jsonObject.getDoubleValue("splitscore")).setScale(1, BigDecimal.RoundingMode.HALF_UP)
          val status = jsonObject.getString("status")
          val optnum = jsonObject.getIntValue("optnum")
          val lecture = jsonObject.getString("lecture")
          val creator = jsonObject.getString("creator")
          val createtime = jsonObject.getString("createtime")
          val modifystatus = jsonObject.getString("modifystatus")
          val attanswer = jsonObject.getString("attanswer")
          val questag = jsonObject.getString("questag")
          val vanalysisaddr = jsonObject.getString("vanalysisaddr")
          val difficulty = jsonObject.getString("difficulty")
          val quesskill = jsonObject.getString("quesskill")
          val vdeoaddr = jsonObject.getString("vdeoaddr")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          DwdQzQuestion(questionid, parentid, questypeid, quesviewtype, content, answer, analysis, limitminute, score, splitscore,
            status, optnum, lecture, creator, createtime, modifystatus, attanswer, questag, vanalysisaddr, difficulty, quesskill,
            vdeoaddr, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_question")
  }

  /**
   * etl 做题知识点关联数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:37
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzPointQuestion(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzPointQuestion.log")

    // etl 并入库 dwd.dwd_qz_point_question
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val pointid = jsonObject.getIntValue("pointid")
          val questionid = jsonObject.getIntValue("questionid")
          val questtype = jsonObject.getIntValue("questtype")
          val creator = jsonObject.getString("creator")
          val createtime = jsonObject.getString("createtime")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (pointid, questionid, questtype, creator, createtime, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_point_question")
  }

  /**
   * etl 知识点数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:31
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzPoint(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzPoint.log")

    // etl 并入库 dwd.dwd_qz_point
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val pointid = jsonObject.getIntValue("pointid")
          val courseid = jsonObject.getIntValue("courseid")
          val pointname = jsonObject.getString("pointname")
          val pointyear = jsonObject.getString("pointyear")
          val chapter = jsonObject.getString("chapter")
          val creator = jsonObject.getString("creator")
          val createtime = jsonObject.getString("createtime")
          val status = jsonObject.getString("status")
          val modifystatus = jsonObject.getString("modifystatus")
          val excisenum = jsonObject.getIntValue("excisenum")
          val pointlistid = jsonObject.getIntValue("pointlistid")
          val chapterid = jsonObject.getIntValue("chapterid")
          val sequence = jsonObject.getString("sequence")
          val pointdescribe = jsonObject.getString("pointdescribe")
          val pointlevel = jsonObject.getString("pointlevel")
          val typeslist = jsonObject.getString("typelist")
          val score = BigDecimal(jsonObject.getDouble("score")).setScale(1, BigDecimal.RoundingMode.HALF_UP) //保留1位小数 并四舍五入
          val thought = jsonObject.getString("thought")
          val remid = jsonObject.getString("remid")
          val pointnamelist = jsonObject.getString("pointnamelist")
          val typelistids = jsonObject.getString("typelistids")
          val pointlist = jsonObject.getString("pointlist")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          DwdQzPoint(pointid, courseid, pointname, pointyear, chapter, creator, createtime, status, modifystatus, excisenum, pointlistid,
            chapterid, sequence, pointdescribe, pointlevel, typeslist, score, thought, remid, pointnamelist, typelistids,
            pointlist, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_point")
  }

  /**
   * etl 试卷视图数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:30
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzPaperView(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzPaperView.log")

    // etl 并入库 dwd.dwd_qz_paper_view
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val paperviewid = jsonObject.getIntValue("paperviewid")
          val paperid = jsonObject.getIntValue("paperid")
          val paperviewname = jsonObject.getString("paperviewname")
          val paperparam = jsonObject.getString("paperparam")
          val openstatus = jsonObject.getString("openstatus")
          val explainurl = jsonObject.getString("explainurl")
          val iscontest = jsonObject.getString("iscontest")
          val contesttime = jsonObject.getString("contesttime")
          val conteststarttime = jsonObject.getString("conteststarttime")
          val contestendtime = jsonObject.getString("contestendtime")
          val contesttimelimit = jsonObject.getString("contesttimelimit")
          val dayiid = jsonObject.getIntValue("dayiid")
          val status = jsonObject.getString("status")
          val creator = jsonObject.getString("creator")
          val createtime = jsonObject.getString("createtime")
          val paperviewcatid = jsonObject.getIntValue("paperviewcatid")
          val modifystatus = jsonObject.getString("modifystatus")
          val description = jsonObject.getString("description")
          val papertype = jsonObject.getString("papertype")
          val downurl = jsonObject.getString("downurl")
          val paperuse = jsonObject.getString("paperuse")
          val paperdifficult = jsonObject.getString("paperdifficult")
          val testreport = jsonObject.getString("testreport")
          val paperuseshow = jsonObject.getString("paperuseshow")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          DwdQzPaperView(paperviewid, paperid, paperviewname, paperparam, openstatus, explainurl, iscontest, contesttime,
            conteststarttime, contestendtime, contesttimelimit, dayiid, status, creator, createtime, paperviewcatid, modifystatus,
            description, papertype, downurl, paperuse, paperdifficult, testreport, paperuseshow, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_paper_view")
  }

  /**
   * etl 试卷详情
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:28
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzPaper(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzPaper.log")

    // etl 并入库 dwd.dwd_qz_paper
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val paperid = jsonObject.getIntValue("paperid")
          val papercatid = jsonObject.getIntValue("papercatid")
          val courseid = jsonObject.getIntValue("courseid")
          val paperyear = jsonObject.getString("paperyear")
          val chapter = jsonObject.getString("chapter")
          val suitnum = jsonObject.getString("suitnum")
          val papername = jsonObject.getString("papername")
          val status = jsonObject.getString("status")
          val creator = jsonObject.getString("creator")
          val craetetime = jsonObject.getString("createtime")
          val totalscore = BigDecimal.apply(jsonObject.getString("totalscore")).setScale(1, BigDecimal.RoundingMode.HALF_UP)
          val chapterid = jsonObject.getIntValue("chapterid")
          val chapterlistid = jsonObject.getIntValue("chapterlistid")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (paperid, papercatid, courseid, paperyear, chapter, suitnum, papername, status, creator, craetetime, totalscore, chapterid,
            chapterlistid, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_paper")
  }

  /**
   * etl 学员做题详情
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:26
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzMemberPaperQuestion(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzMemberPaperQuestion.log")

    // etl 并入库 dwd.dwd_qz_member_paper_question
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val userid = jsonObject.getIntValue("userid")
          val paperviewid = jsonObject.getIntValue("paperviewid")
          val chapterid = jsonObject.getIntValue("chapterid")
          val sitecourseid = jsonObject.getIntValue("sitecourseid")
          val questionid = jsonObject.getIntValue("questionid")
          val majorid = jsonObject.getIntValue("majorid")
          val useranswer = jsonObject.getString("useranswer")
          val istrue = jsonObject.getString("istrue")
          val lasttime = jsonObject.getString("lasttime")
          val opertype = jsonObject.getString("opertype")
          val paperid = jsonObject.getIntValue("paperid")
          val spendtime = jsonObject.getIntValue("spendtime")
          val score = BigDecimal.apply(jsonObject.getString("score")).setScale(1, BigDecimal.RoundingMode.HALF_UP)
          val question_answer = jsonObject.getIntValue("question_answer")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (userid, paperviewid, chapterid, sitecourseid, questionid, majorid, useranswer, istrue, lasttime, opertype, paperid, spendtime, score, question_answer, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_member_paper_question")
  }

  /**
   * etl 主修数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:23
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzMajor(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzMajor.log")

    // etl 并入库 dwd.dwd_qz_major
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObject = JsonUtil.parseObject(line)
          val majorid = jsonObject.getIntValue("majorid")
          val businessid = jsonObject.getIntValue("businessid")
          val siteid = jsonObject.getIntValue("siteid")
          val majorname = jsonObject.getString("majorname")
          val shortname = jsonObject.getString("shortname")
          val status = jsonObject.getString("status")
          val sequence = jsonObject.getString("sequence")
          val creator = jsonObject.getString("creator")
          val createtime = jsonObject.getString("createtime")
          val columm_sitetype = jsonObject.getString("columm_sitetype")
          val dt = jsonObject.getString("dt")
          val dn = jsonObject.getString("dn")
          (majorid, businessid, siteid, majorname, shortname, status, sequence, creator, createtime, columm_sitetype, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_major")
  }

  /**
   * etl 课程辅导数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:21
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzCourseEduSubject(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzCourseEduSubject.log")

    // etl 并入库 dwd.dwd_qz_course_edusubject
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObj = JsonUtil.parseObject(line)
          val courseeduid = jsonObj.getIntValue("courseeduid")
          val edusubjectid = jsonObj.getIntValue("edusubjectid")
          val courseid = jsonObj.getIntValue("courseid")
          val creator = jsonObj.getString("creator")
          val createtime = jsonObj.getString("createtime")
          val majorid = jsonObj.getIntValue("majorid")
          val dt = jsonObj.getString("dt")
          val dn = jsonObj.getString("dn")
          (courseeduid, edusubjectid, courseid, creator, createtime, majorid, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_course_edusubject")
  }

  /**
   * etl 题库课程数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:19
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzCourse(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzCourse.log")

    // etl 并入库 dwd.dwd_qz_course
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObj = JsonUtil.parseObject(line)
          val courseid = jsonObj.getIntValue("courseid")
          val majorid = jsonObj.getIntValue("majorid")
          val coursename = jsonObj.getString("coursename")
          val coursechapter = jsonObj.getString("coursechapter")
          val sequence = jsonObj.getString("sequence")
          val isadvc = jsonObj.getString("isadvc")
          val creator = jsonObj.getString("creator")
          val createtime = jsonObj.getString("createtime")
          val status = jsonObj.getString("status")
          val chapterlistid = jsonObj.getIntValue("chapterlistid")
          val pointlistid = jsonObj.getIntValue("pointlistid")
          (courseid, majorid, coursename, coursechapter, sequence, isadvc, creator, createtime, status, chapterlistid, pointlistid)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_course")
  }

  /**
   * etl 章节列表数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:17
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzChapterList(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzChapterList.log")

    // etl 并入库 dwd.dwd_qz_chapter_list
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObj = JsonUtil.parseObject(line)
          val chapterlistid = jsonObj.getIntValue("chapterlistid")
          val chapterlistname = jsonObj.getString("chapterlistname")
          val courseid = jsonObj.getIntValue("courseid")
          val chapterallnum = jsonObj.getIntValue("chapterallnum")
          val sequence = jsonObj.getString("sequence")
          val status = jsonObj.getString("status")
          val creator = jsonObj.getString("creator")
          val createtime = jsonObj.getString("createtime")
          (chapterlistid, chapterlistname, courseid, chapterallnum, sequence, status, creator, createtime)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_chapter_list")
  }

  /**
   * elt 章节数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:16
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzChapter(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzChapter.log")

    // etl 并入库 dwd.dwd_qz_chapter
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObj = JsonUtil.parseObject(line)
          val chapterid = jsonObj.getIntValue("chapterid")
          val chapterlistid = jsonObj.getIntValue("chapterlistid")
          val chaptername = jsonObj.getString("chaptername")
          val sequence = jsonObj.getString("sequence")
          val showstatus = jsonObj.getString("showstatus")
          val creator = jsonObj.getString("creator")
          val createtime = jsonObj.getString("createtime")
          val courseid = jsonObj.getIntValue("courseid")
          val chapternum = jsonObj.getIntValue("chapternum")
          val outchapterid = jsonObj.getIntValue("outchapterid")
          val dt = jsonObj.getString("dt")
          val dn = jsonObj.getString("dn")
          (chapterid, chapterlistid, chaptername, sequence, showstatus, creator, createtime, courseid, chapternum, outchapterid, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_chapter")
  }

  /**
   * etl 试卷主题关联数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:10
   * @param spark
   * @param dt
   * @param dn
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzCenterPaper(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzCenterPaper.log")

    // etl 并入库 dwd.dwd_qz_center_paper
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObj = JsonUtil.parseObject(line)
          val paperviewid = jsonObj.getIntValue("paperviewid")
          val centerid = jsonObj.getIntValue("centerid")
          val openstatus = jsonObj.getString("openstatus")
          val sequence = jsonObj.getString("sequence")
          val creator = jsonObj.getString("creator")
          val createtime = jsonObj.getString("createtime")
          val dt = jsonObj.getString("dt")
          val dn = jsonObj.getString("dn")
          (paperviewid, centerid, openstatus, sequence, creator, createtime, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_center_paper")
  }

  /**
   * etl 主题数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:08
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzCenter(spark: SparkSession, dt: String, dn: String) = {
    import spark.implicits._
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzCenter.log")

    // etl 并入库 dwd.dwd_qz_center
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObj = JsonUtil.parseObject(line)
          val centerid = jsonObj.getIntValue("centerid")
          val centername = jsonObj.getString("centername")
          val centeryear = jsonObj.getString("centeryear")
          val centertype = jsonObj.getString("centertype")
          val openstatus = jsonObj.getString("openstatus")
          val centerparam = jsonObj.getString("centerparam")
          val description = jsonObj.getString("description")
          val creator = jsonObj.getString("creator")
          val createtime = jsonObj.getString("createtime")
          val sequence = jsonObj.getString("sequence")
          val provideuser = jsonObj.getString("provideuser")
          val centerviewtype = jsonObj.getString("centerviewtype")
          val stage = jsonObj.getString("stage")
          val dt = jsonObj.getString("dt")
          val dn = jsonObj.getString("dn")
          (centerid, centername, centeryear, centertype, openstatus, centerparam, description,
            creator, createtime, sequence, provideuser, centerviewtype, stage, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_center")
  }

  /**
   * etl 所属行业数据
   *
   * @Author 剧情再美终是戏
   * @Date 2020/3/8 15:07
   * @param spark sparkSession
   * @param dt    日期分区
   * @param dn    地区分区
   * @return java.lang.Object
   * @Version 1.0
   **/
  def EtlQzBusiness(spark: SparkSession, dt: String, dn: String) = {
    // 读取源数据
    val ssc = spark.sparkContext
    val source = ssc.textFile(Constan.ODS_FILE_PATH + "/QzBusiness.log")

    // etl 并入库 dwd.dwd_qz_business
    source
      .filter(line => JsonUtil.parseObject(line).isInstanceOf[JSONObject])
      .mapPartitions(it => {
        it.map(line => {
          val jsonObj = JsonUtil.parseObject(line)
          val businessid = jsonObj.getIntValue("businessid")
          val businessname = jsonObj.getString("businessname")
          val sequence = jsonObj.getString("sequence")
          val status = jsonObj.getString("status")
          val creator = jsonObj.getString("creator")
          val createtime = jsonObj.getString("createtime")
          val siteid = jsonObj.getIntValue("siteid")
          val dt = jsonObj.getString("dt")
          val dn = jsonObj.getString("dn")
          (businessid, businessname, sequence, status, creator, createtime, siteid, dt, dn)
        })
      })
      .toDF.coalesce(1).write.mode(SaveMode.Overwrite).insertInto("dwd.dwd_qz_business")
  }

}
