import java.text.SimpleDateFormat
import java.util.Date

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/10/8 11:25
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object test_price {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    sqlContext.udf.register("tranTimeToString",tranTimeToString _)
    val data_id = sqlContext.read.json(s"${a_aa_amainpackage.a_o2odata_deal.config.config.tmall_live_id_path}")

    data_id.createOrReplaceTempView("data_id")

    sqlContext.sql(
      s"""
         |select
         |commodityCount,
         |viewerCount,
         |tranTimeToString(liveTime) as liveTime
         |from data_id where anchorId='69226163'
       """.stripMargin).repartition(1).write.option("header","true")
      .csv("D:\\zhobo_weiya")

    //println(tranTimeToString("1575043200"))

  }

  def tranTimeToString(timestamp:String) :String={
    val tm = timestamp+"000"
    val fm = new SimpleDateFormat("yyyy-MM-dd")
    val time = fm.format(new Date(tm.toLong))
    time
  }
}
