package cn.lecosa.spark.hive

import org.apache.spark.SparkConf
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.SparkContext
import java.util.logging.Level
import java.util.logging.Logger

object HiveTime {
  def main(args: Array[String]): Unit = {
    System.setProperty("spark.sql.warehouse.dir", "hdfs://park01:9000/user/hive/warehouse");
    Logger.getLogger("org.apache.spark").setLevel(Level.WARNING)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    val conf = new SparkConf().setAppName("firtHive").setMaster("local[*]")
    val sc = new SparkContext(conf)
    val hiveContext = new HiveContext(sc);
    hiveContext.sql("show databases").show()
    hiveContext.sql("use lecosa").show()
    //    hiveContext.sql("SELECT datediff('2009-30-07', '2009-31-07')   ").show()
    hiveContext.sql(s"select unix_timestamp('2018-03-03 20:30:34') ").show()
    hiveContext.sql(s"select substring(now(), 1, 20)").show()
//    跨日
    hiveContext.sql(s"select date_sub(now(), -1)").show()
//    跨月
        hiveContext.sql(s"select add_months(now(), 1)").show()
    sc.stop()

  }

  /*  /BIGINT转TIMESTAMP：
from_unixtime(time/1000)    
//TIMESTAMP转BIGINT：
unix_timestamp(time)*1000    
//BIGINT转STRING(yyyy-MM-dd)：
from_unixtime(time/1000,'yyyy-MM-dd')    
//STRING转BIGINT(yyyyMMdd)：
unix_timestamp(date_id,'yyyyMMdd')*1000    
//STRING转BIGINT(yyyy/MM/dd)：
unix_timestamp(date_id,'yyyy/MM/dd')*1000  
//STRING(yyyyMMdd)转TIMESTAMP：
from_unixtime(unix_timestamp(date_id,'yyyyMMdd'))    
//TIMESTAMP转STRING(yyyyMMdd)：
from_unixtime(unix_timestamp(regist_time),'yyyyMMdd')
//INT(DAY) 转STRING:
 CAST(DAY AS STRING) 
TIMESTAMP转STRING(yyyy-MM-dd):
 TO_DATE(created_time)*/
}