package streaming

import com.alibaba.fastjson.JSON
import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka.KafkaUtils
import streaming.ReceiverTest.order

object Demo {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("rdd2DF")
      .config("spark.sql.warehouse.dir", "hdfs://master:9000/user/hive/warehouse")
      .config("hive.exec.dynamic.partition", "true")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .enableHiveSupport().getOrCreate()
    val conf = spark.sparkContext
    val ssc = new StreamingContext(conf, Seconds(10))
    val df = spark.sql("select * from default.orders limit 10")
    df.show()
    import spark.sql
    df.write.mode(SaveMode.Overwrite).saveAsTable("default.new1")
    df.write.mode(SaveMode.Append).insertInto("default.or2")
  }
}
