package chapter10

import org.apache.spark.sql.SparkSession
/**
 * author: 余辉
 * blog: https://blog.csdn.net/silentwolfyh
 * descriptions:
 * 具体步骤查看： BookData/input/09sparkSql连接hive的全部过程.txt
 *
 * date: 2024 - 09 - 02 2:15 下午
 */

object HIVEToDF {

  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAME","ROOT")

    val spark = SparkSession.builder()
      .appName("DF02_Hive")
      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()

    // default.
    spark.sql(
      """
        |select * from doit13user
        |""".stripMargin).show()

    /***
     * +---+-------+
     * | id|   name|
     * +---+-------+
     * |  1|     ls|
     * |  2|     ww|
     * |  3|     zs|
     * |  4|xiaohui|
     * |  5|     tg|
     * +---+-------+
     */
  }
}
