package com.o2o.cleaning.month.platform.ebusiness_plat.kuaishou

import java.text.SimpleDateFormat
import java.util.{Calendar, Date, Properties}

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object CheckShopDataDetail {


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("CheckDataDetail")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")


    val value: RDD[String] = spark.read.option("header", true).csv(s"d:\\shop.csv")
      .toJSON.rdd.map(
      line => {
        val lines = line
          .replaceAll("\\\\\\\\r\\\\\\\\n", "")
          .replaceAll("\\\\\\\\r", "")
          .replaceAll("\\\\\\\\n", "")
          .replaceAll("\\\\r\\\\n", "")
          .replaceAll("\\\\r", "")
          .replaceAll("\\\\n", "")
          .replaceAll("\\r\\n", "")
          .replaceAll("\\n", "")
          .replaceAll("\\r", "")
          .replaceAll("\r\n", "")
          .replaceAll("\r", "")
          .replaceAll("\n", "")
        //          .replaceAll("\"", "")
        //          .replaceAll("\\\"", "")
        val nObject: JSONObject = JSON.parseObject(lines)
        val shopName = nObject.getOrDefault("shopName", "-1").toString
          .replaceAll("\"", "").replaceAll("\\\"", "")
        //        val title = nObject.getOrDefault("title", "-1").toString
        //          .replaceAll("\"", "").replaceAll("\\\"", "")
        nObject.remove("shopName")
        //        nObject.remove("BaseInfo")
        nObject.put("shopName", shopName)
        //        nObject.put("title", title)
        nObject.toString
      })

    spark.read.json(value).registerTempTable("t1")

    spark.sql(
      """
        |select
        |platform_id,
        |shopid,
        |shopname,
        |company_name
        |from t1
        |""".stripMargin)
      .repartition(1).write.mode("overwrite").option("header", true).csv(s"d:\\test1")


  }
}
