package com.zhaosc.spark.sql.hive

import org.apache.spark.sql.SparkSession

object HiveDataSource {
  
  def main(args: Array[String]): Unit = {
     val spark = SparkSession
      .builder()
      .appName("RDD2DataFrameByReflection")
      .config("spark.master", "local")
      .getOrCreate();
     
     spark.sql("DROP TABLE IF EXISTS student_infos");
     
     spark.sparkContext.stop();
  }
}