package com.loong.spark.sql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, Row, SQLContext}

import scala.collection.mutable.ArrayBuffer
import scala.util.parsing.json.{JSON, JSONArray}

/**
  * Created by 梁浩峰 on 2016/9/24 21:35.
  */
object SparkSqlUtil {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("SparkSqlUtil").setMaster("local")
    val sc = new SparkContext(conf)
    case class Person(name: String, age: Int)
    val test = sc.parallelize[Person](Seq(Person("hello", 12), Person("wprd", 20)))
    val sqlContext = new SQLContext(sc)



    sqlContext.sql("use bonc")
    val querysql = "insert into table bonc.test_table_lzf1 select * ,'201611' from table1"
    sqlContext.sql(querysql)



    case class t_user(id: String, name: String, age: String)
    val data = List(t_user("13", "hello", "5"), t_user("14", "word", "6"))
    val rawData: RDD[t_user] = sc.parallelize(data)






  }


}
