package com.doit.day06

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo06_DF_RDD {
  def main(args: Array[String]): Unit = {
        val session = SparkSession.builder()
              .appName("test")
              .master("local[*]")
              .getOrCreate()
    val sc = session.sparkContext
    // 加载数据
    val rdd: RDD[WordBean] = sc.textFile("data/words")
      .flatMap(line => {
        val arr = line.split("\\s+")
        arr.map(e => WordBean(e))
      })

    // 处理数据
    // 返回RDD  [泛型 case class]
    // 将RDD转换成 DF 使用SQL分析

    val df = session.createDataFrame(rdd)
    df.createTempView("tb_w")
    session.sql(
      """
        |select
        |w ,
        |count(1)  cnt
        |from
        |tb_w
        |group by w
        |""".stripMargin)
      .show()





  }

}
