package com.scala.learn.sparkSql_hive

import org.apache.spark.sql.SparkSession

/**
  * @Copyright: Shanghai Definesys Company.All rights reserved.
  * @Description:
  *
  * 1、pom中添加  spark-hive依赖
  * 2、resourec目录下添加 hive-site的数据库信息
  * 3、支持hive的自定义函数
  * @author: chuhaitao
  * @since: 2019/3/17 21:41
  * @history:
  *          1.2019/3/17 created by chuhaitao
  */
object SparkSqlHive {


  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("hive")
      .master("local")
      //启动hive支持
      .enableHiveSupport()
      //指定warehouse
      .config("spark.sql.warehouse.dir", "hdfs://hadoop102:9000/user/hive/warehouse")
      .getOrCreate()
    //支持hive的语法
    spark.sql("show databases").show()

    //使用hive的自定义的函数
    //注册函数
    spark.sql("create temporary function ip2Long as 'com.java.learn.hive.IpToLong' ")
    //查询
    spark.sql("select id,ip, ip2Long(ip) from access_log")
  }
}
