package com.o2o.cleaning.month.platform.ebusiness_plat.zaixianjy_yl.jiaoyu

import java.util.Properties

import com.alibaba.fastjson.JSON
import com.o2o.utils.TimeStamp
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.{DataFrame, SparkSession}

import scala.collection.mutable

object yuanfudao {

  var platform = "yuanfudao" //平台名称
  var year = "2022" //当月的年份、月份
  var month = "9"
  var timestamp = TimeStamp.TIME202209

  //商品的清洗结果路径
  // dws-data/split/split_data/2021/tengxunketang/7/
  var resultPath = s"s3a://dws-data/split/split_data/${year}/${platform}/${month}/"

  def main(args: Array[String]): Unit = {
    //spark mongo连接配置
    val spark = SparkSession.builder()
      .master("local[*]")
      .config("spark.debug.maxToStringFields", "10000")
      .appName("MongoSparkConnectorIntro")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()
    //obs设置
    val sc: SparkContext = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")
    // dws 配置
    val connectionProperties = new Properties()
    //增加数据库的用户名(user)密码(password),指定postgresql驱动(driver)
    connectionProperties.put("user", "sysadmin");
    connectionProperties.put("password", "Bigdata@123");
    connectionProperties.put("driver", "org.postgresql.Driver");
    val fram = zaixianjiaoyuCaculate(spark, resultPath).registerTempTable("teacher_id")

    val teacher_data = spark.sql(
      """
        |select distinct teacher_id from (select teachernew teacher_id from teacher_id lateral view explode( split(teachers,'\\|') ) views as teachernew) where teacher_id != ''
        |""".stripMargin)
    //      .show(false)
    teacher_data.withColumn("timestamp", lit(s"${timestamp}"))
      .withColumn("platformname", lit("猿辅导"))
      .toDF().write.mode("append").jdbc("jdbc:postgresql://114.115.219.204:8000/postgres", "test_zyf.o2o_zaixianjiaoyu_mid_2022", connectionProperties);

    println("--->  教师 id 插入完成 ")

    val data = spark.sql("select " +
      "'猿辅导' as platformname " +
      ",count(distinct good_id) good_idcount" +
      ",'' as good_idcount_yoy" +
      ",sum(sellcount) sellcount" +
      ",'' as sellcount_yoy" +
      ",sum(salesamount) salesamount" +
      ",'' as salesamount_yoy" +
      ",'' as servicesstucount" +
      ",'' as servicesstucount_yoy" +
      ",'' teachercount " +
      ",'' as teachercount_yoy" +
      ",'1' servicescount " +
      ",'' as servicescount_yoy" +
      s",'${timestamp}' as timestamp " +
      "from teacher_id " +
      "where sellcount > 0")


    data.toDF().write.mode("append").jdbc("jdbc:postgresql://114.115.219.204:8000/postgres", "test_zyf.o2o_zaixianjiaoyu_2022", connectionProperties);


  }

  def zaixianjiaoyuCaculate(spark: SparkSession, sourcePath: String): DataFrame = {
    val date = spark.read.orc(resultPath).toJSON.rdd.map(line => {

      val Object = JSON.parseObject(line)
      var teacherCount = 0
      // 获取 teacherInfoModule
      val teacherArray =
        if (Object.containsKey("teacherInfoModule")) {
          Object.getJSONArray("teacherInfoModule").toArray()
        } else {
          teacherCount = 0
          null
        }
      // 创建一个hashset存放teacherId
      var teachers = ""
      try {
        if (teacherArray != null) {
          for (i <- 0 to teacherArray.length - 1) {
            val teacherObject = JSON.parseObject(teacherArray(i).toString)
            var teacherId = if (teacherObject.containsKey("teacherId")) {
              teacherObject.getOrDefault("teacherId", null).toString
            } else null
            if (teacherId != null && teacherId != "") {
              if (teacherId.contains("$numberLong")) {
                teacherId = teacherId.replace("{\"$numberLong\":\"", "")
                teacherId = teacherId.replace("\"}", "")
              }
              if (i != 0) {
                teachers = teachers + "|"
              }
              teachers = teachers + teacherId
            }
          }
        }
      } catch {
        case e: Exception => println(e)
          println(Object.getString("good_id") + "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
      }
      Object.put("teachers", teachers)
      Object.toString
    })
    spark.read.json(date)
  }

}
