package com.atguigu.sparksql.day01

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, SparkSession}

/**
 * Author atguigu
 * Date 2020/11/3 14:03
 */
object Rdd2DS {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession.builder()
            .master("local[2]")
            .appName("Rdd2DS")
            .getOrCreate()
        import spark.implicits._
        
        //val list1 = List(30, 50, 70, 60, 10, 20)
        /*val ds: Dataset[Int] = list1.toDS
        val rdd =  ds.rdd*/
    
        /*val rdd: RDD[Int] = spark.sparkContext.parallelize(list1)
        val ds: Dataset[Int] = rdd.toDS()
        ds.show*/
    
        val list = User(10, "zs")::User(20, "zhiling")::Nil
        val rdd= spark.sparkContext.parallelize(list)
        val ds= rdd.toDS()
        //val r =  ds.map(user => user.age).reduce(_ + _)
        // select count(*),  form group by _1
       // ds.map(user => (user.name, 1)).groupBy("_1").sum("_2").show
        
        
        spark.stop()
    }
}
