package com.xiaoliangkou.data.spark

import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.hive.HiveContext
import org.slf4j.LoggerFactory

/**
  * Author: denghp
  * Date: 16-2-1
  * Time: 下午4:47
  * Description: 
  * <p>
  * </p>
  * Version \$version$
  */
object SparkSQLHiveOnYarn {

  val logger = LoggerFactory.getLogger(SparkSQLHiveOnYarn.getClass);


  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setAppName("SparkSQLHiveOnYarn")
    val sc = new SparkContext(sparkConf)

    val hiveContext = new HiveContext(sc)

    import hiveContext.implicits._
    import hiveContext.sql

    //目标表nginx_log的记录数
    logger.info("Result of 'select count(1) from nginx_log': ")
    val count = sql("SELECT COUNT(1) FROM nginx_log").collect().head.getLong(0)
    logger.info("nginx_log all rows : {}", count)

    logger.info("Result of 'select remote_addr, count(1) from nginx_log': ")
    sql("select remote_addr, count(1) from nginx_log").collect().foreach(println)
    logger.info("exec spark sql successfully..............")
    //sleep 10分钟，为了从WEB界面上看日志
    Thread.sleep(300000)
    sc.stop()
  }

}
