import cn.tecnova.utils.ConfigHandler
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * description:
  * Rabcheng
  * Date:2019/4/25 10:41
  **/
object TestBro {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("gg")
    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)

    import sQLContext.implicits._

  /*  val df2 = List((1, "Spark", 22), (2, "Scala", 33), (3, "ML", 44)).toDF("id", "name", "age")

    val brodf = sc.broadcast(df2)
    val df1 = List(1, 2).toDF("id")

    val frame = df1.join(brodf.value, "id")


    println(brodf.value)

    frame.show()*/

    val userDF = sQLContext.read.jdbc("jdbc:mysql://localhost:3306/hello?characterEncoding=utf-8","user",ConfigHandler.props)
    val user2DF = sQLContext.read.jdbc("jdbc:mysql://localhost:3306/hello?characterEncoding=utf-8","user2",ConfigHandler.props)


    userDF.show()
    user2DF.show()

    val userBro = sc.broadcast(userDF)

    val res = user2DF.join(userBro.value,"id")

    res.show()


    //    df2.show()
    //    df1.show()

    sc.stop()

  }

}
