package com.galeno.sparksql02

import org.apache.commons.codec.Encoder
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Encoders, Row, SparkSession}
import org.apache.spark.sql.types.{DataTypes, StructField, StructType}

/**
 * @Title: ${file_name}
 * @Description: ${todo}
 * @author galeno
 * @date 2021/9/615:37
 */
object Df转Rdd {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR)
    val spark = SparkSession.builder()
      .appName("aa")
      .master("local")
      .config("spark.sql.crossJoin.enabled", "true")
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._  //调用函数里的方法
    var schema = StructType(Seq(
      StructField("id", DataTypes.IntegerType),
      StructField("name", DataTypes.StringType),
      StructField("role", DataTypes.StringType),
      StructField("energy", DataTypes.DoubleType)
    ))
    val df = spark.read.option("header", "true").schema(schema).csv("data/battel2.txt")

    df.createTempView("df")
    val df2 = spark.sql(
      """
        |
        |select
        | role,
        | max(energy) as max_energy
        |from
        |df
        |group by
        |role
        |""".stripMargin)
   // df2.show()

   // val rdd: RDD[Row] = df2.rdd


    /**
     * 直接在dataset上调map算子会返回一个dataset,因而需要返回dataset[U]对应的Encoder[U]
     * 提供这种encoder有两种方式
     *方式一  导入隐式上下文,通过上下文中的隐式转换得到encoder[U]
     *
     */
    df2.map(row=>{
      val role = row.getAs[String]("role")
      val max_energy = row.getAs[Double]("max_energy")
      (role,max_energy)
    })
    //方式2显式传入Encoder[U]
    df2.map(row=>{
      val role = row.getAs[String]("role")
      val max_energy = row.getAs[Double]("max_energy")
      (role,max_energy)
    })(Encoders.product)

    /**
     *
     * 从dataset取到rdd调用
     *
     */
    val rdd2: RDD[(String, Double)] = df2.rdd.map(row => {
      val role = row.getAs[String]("role")
      val max_energy = row.getAs[Double]("max_energy")
      (role, max_energy)
    })













  }

}
