package demo.spark.sql

import demo.spark.utils.{SparkCore, SparkSql}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.storage.StorageLevel

import scala.collection.mutable


/**
 * Spark SQL模块意在通过 "应用程序内存 / 文件 / 外部数据源(Hive/Hdfs/Hbase)" 创建DataFrame，
 * DataFrame将尝试映射为满足 "JDBC / ODBC" 查询规范的结构化数据模板，使用户可以像操作SQL一样
 * 操作弹性分布式数据集，实际SQL会被解析成 类似 SparkCore模块中的 transform 和 action 算子在
 * 资源节点进行运算归并！
 */
object SqlDemoDataSource {

  //DataFrame类模板
  case class UserInfo (name:String, id: Long);
  case class UserInfoFull (name:String, id: Long, age: Long, hobby: Array[String]);
  case class UserInfoFull2 (name:String, id: Long, age: Long, hobby: Array[String],week:scala.collection.Map[String,String]);

  val SPARK_SQL :String = "spark-sql/";
  val SPARK_CORE :String = "spark-core/";
  val FILE_ROOT_PATH :String = "/Users/icasue/Desktop/lbs-server-plugins/icasue-plugins-demos/icasue-demo-spark/local_text/";

  val sparkContext: SparkContext = SparkCore.getContext("SqlDemoDataSource");
  val sparkSession: SparkSession = SparkSql.getSession("SqlDemoDataSource");
  val sqlContext: SQLContext = sparkSession.sqlContext;


  def main(args: Array[String]): Unit = {

  }


}
