package com.zhaosc.spark.sql.df

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.SparkSession

object DataFrameOpsFromJsonRdd {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("DataFrameOpsFromJsonRdd").setMaster("local")
    val sc = new SparkContext(conf)

    val nameList = List(
      "{'name':'zhangsan', 'age':55}",
      "{'name':'lisi', 'age':30}",
      "{'name':'lisisi', 'age':30}",
      "{'name':'wangwu', 'age':19}");

    val scoreList = List(
      "{'name':'zhangsan','score':100}",
      "{'name':'lisi','score':99}");

    val spark = SparkSession
      .builder()
      .appName("DataFrameOpsFromJsonRdd")
      .config("spark.master", "local")
      .getOrCreate();

    val nameRdd = sc.parallelize(nameList);
    val scoreRdd = sc.parallelize(scoreList);

    val nameDf = spark.read.json(nameRdd);
    val scoreDf = spark.read.json(scoreRdd);

    /**
     *   方式一：
     * SELECT nameTable.name,nameTable.age,scoreTable.score
     * 		FROM nameTable JOIN scoreDF ON (nameTable.name == scoreTable.name)
     */
    //    nameDf.join(scoreDf).where(nameDf.col("name")===scoreDf.col("name"))
    //    .select(nameDf.col("name"), nameDf.col("age"),scoreDf.col("score"))
    //    .show();
    //

    /**
     *   方式二：临时表
     *
     */
    nameDf.createOrReplaceTempView("name");
    scoreDf.createOrReplaceTempView("score")
    val sql = "SELECT name.name,name.age,score.score FROM name join score ON (name.name = score.name)";

    spark.sql(sql).show();

    sc.stop();
  }
}