package com.czk.boot.spark.controller.hive;

import org.apache.spark.sql.SparkSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.io.File;

/**
 * @Author:ChenZhangKun
 * @Date: 2021/12/16 11:39
 */
@RestController
@RequestMapping("/spark")
public class SparkHiveController {
    @Autowired
    private SparkSession spark;
    @GetMapping("/hive")
    public void testHive(){
        System.setProperty("HADOOP_USER_NAME", "hadoop");
        // $example on:spark_hive$
        // warehouseLocation points to the default location for managed databases and tables
 /*       String warehouseLocation = new File("spark-warehouse").getAbsolutePath();
        SparkSession spark = SparkSession
                .builder()
                .master("local[*]")
                .appName("Java Spark Hive Example")
                .config("spark.sql.warehouse.dir", warehouseLocation)
                //  .config("hive.metastore.uris","thrift://hmaster:30006")
                .enableHiveSupport()
                .getOrCreate();*/
        spark.sql("show tables").show();
    }

}
