
import java.text.SimpleDateFormat
import java.util.Date

import a_aa_amainpackage.a_o2odata_deal.config.config.{months, years}
import com.alibaba.fastjson.{JSON, JSONArray}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/9/5 11:21
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object test_tm {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")


    /*val data = sqlContext.read.orc("s3a://dws-data/g_shop/2019/12/tmall/")

    data.printSchema()*/
    import org.elasticsearch.spark._
    sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${years}/${months}/anchor_shop_v3").toJSON.rdd.map(line =>{
      JSON.parseObject(line)
    }).saveToEs(s"anchor_shop/anchor_shop", Map("es.mapping.id" -> "major_key"))

   /*val data = sqlContext.read.option("header","true").csv("D:\\京东品牌结果\\hema.csv")

    data.createOrReplaceTempView("data")

    sqlContext.sql(
      s"""
         |select
         |tm_brandValueId,
         |tm_brandName,
         |collect_list(score),
         |collect_list(category)
         |--concat_ws(':',collect_list(score)) as mids
         |from data
         |group by tm_brandValueId,tm_brandName
       """.stripMargin).show(false)*/

  /*  val tm = "1575120851"

    val a = tranTimeToString(tm)
    println(a)*/

    /*val seq = Seq(1,2,3)
    for (i <- 0 to seq.length-1){
      println(seq(i))
    }
    */
    /*import com.alibaba.fastjson.JSONObject
    val json = "{\"school\":\"商职\",\"sex\":\"男\",\"name\":\"wjw\",\"age\":22}"

    val jsonObject: JSONObject = JSON.parseObject(json)
    println(jsonObject)
    println(jsonObject.getString("school"))
    println(jsonObject.getString("sex"))
*/

    //sqlContext.udf.register("handle_array",handle_array _)

   /* for (months <- 2 to 2){
      val result = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${months}/zhibo_finally/")


      val value = result.toJSON.rdd.map(line => {
        val lines = JSON.parseObject(line)
        val evaluates = lines.getOrDefault("evaluates", "-1").toString
        val Base_Info = lines.getOrDefault("Base_Info", "-1").toString
        var promotion_price = lines.getOrDefault("promotion_price","-1").toString
        var original_cost = lines.getOrDefault("original_cost","-1").toString
        val priceText = lines.get("priceText").toString
        if (promotion_price.contains("?") | promotion_price.contains("？")){
          promotion_price = priceText
        }
        if (original_cost.contains("?") | original_cost.contains("？")){
          original_cost = priceText
        }
        var ev = "-1"
        var str = new JSONObject
        if (evaluates.equals("-1")) {
          ev = "{\"fuyi\":\"-1\"}"
        } else {
          str = JSON.parseObject(evaluates)
        }
        if (!ev.contains("fuyi")) {
          lines.put("evaluates", str)
        } else {
          val evs = JSON.parseObject(ev)
          lines.put("evaluates", evs)
        }
        lines.put("Base_Info",Base_Info)
        lines.put("promotion_price",promotion_price)
        lines.put("original_cost",original_cost)
        lines
      })
      //原始代码
      value.saveToEs(s"2019_tmall/tmall_2019_${months}", Map("es.mapping.id" -> "good_id"))

    }*/



    /*import org.apache.spark.sql.functions._
    data.agg(count("*"),sum("sellCount"),sum("salesAmount")).show(false)

    data.printSchema()*/

   /* sqlContext.udf.register("tranTimeToString",tranTimeToString _)
    println(data.count()+"********")
    data.selectExpr("good_id","viewcount","liveTime","add_to_field").where("add_to_field not rlike '2019-11'")
        .where("cast(viewcount as bigint)>5000").show(500,false)
*/

   /* println(data.where("add_to_field is null").count()+"********")
    data.createOrReplaceTempView("data")*/
   /* sqlContext.sql(
      s"""
         |select
         |*
         |from
         |(select
         |tranTimeToString(liveTime) as liveTime
         |from data where liveTime is not null)
         |group by liveTime
       """.stripMargin).show(false)*/

    /*data.selectExpr("viewcount","roomTypeName","roomType","nick","liveTitle","liveTime","liveId","anchorId","commodityCount")
      .show()*/
   /* sqlContext.udf.register("func",func _)

    data.createOrReplaceTempView("data")

    data.printSchema()

    sqlContext.sql(
      s"""
         |select
         |good_id,
         |add_to_field
         |from data
         |order by add_to_field desc
       """.stripMargin).show(200,false)*/

    //data.limit(100).write.json("D:\\zhibo_test_v1")

    /*data.createOrReplaceTempView("data")

    sqlContext.sql(
      s"""
         |select
         |*
         |from data
         |order by add_to_field desc
       """.stripMargin).limit(100).repartition(1).write.json("D:\\zhibo_test_v1")*/
  }

  def tranTimeToString(timestamp:String) :String={
    val tm = timestamp+"000"
    val fm = new SimpleDateFormat("yyyy-MM")
    val time = fm.format(new Date(tm.toLong))
    time
  }

  def handle_array(nick_collect: Seq[String],viewerCount_collect:Seq[String],liveTime_collect:Seq[String]):String={
    val array = new JSONArray();
    for (i <- 0 to nick_collect.length-1){
      val jsonStr = "{\"nick\":"+"\""+nick_collect(i)+"\"," +
        "\"viewCount\":"+"\""+viewerCount_collect(i)+"\"," +
        "\"liveTime\":"+"\""+liveTime_collect(i)+"\"}"
      val jsonObject = JSON.parseObject(jsonStr)
      array.add(jsonObject)
    }
    array.toString
  }


  def func(s:String):Long={
    s.length().toLong
  }
}
