package com.guchenbo.spark.sql

import org.apache.spark.sql.SparkSession

/**
 * 测试把一个hive表的数据插入到另一个hive表
 * 1、直接使用sql insert into select from
 * 2、spark 先 query 到 df  然后 writer
 */
object HiveExchange {

  val table1 = ""
  val table2 = ""

  def main(args: Array[String]): Unit = {
    val spark = SparkUtils.sparkSession("HiveExchange")

    fun1(spark)
    fun2(spark)

  }

  def fun1(spark: SparkSession) = {
val sql=""
  }

  def fun2(spark: SparkSession) = {

  }
}
