package spark.work

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.types.StructType

import scala.collection.mutable.ArrayBuffer

/**
  * Created by liuwei on 2017/11/8.
  */
object AggTest2 {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("AggTest2").setMaster("local[8]")
    val sc = new SparkContext(sparkConf)
    val spark = SparkSession.builder

    val ss = spark.getOrCreate();

    val df = ss.createDataFrame(Seq(
//      ("张三","女","数学",60,15,"张老师"),
//      ("张三","女","语文",70,12,"李老师"),
//      ("王二","男","语文",60,20,"孙老师"),
      ("王二","男","历史",50,22,"李老师",
        "王二","男","历史",50,22,"李老师",
        "王二","男","历史",50,22,"李老师")
    )).toDF("姓名", "性别","科目","成绩","排名","监考老师","姓名2", "性别2","科目2","成绩2","排名2","监考老师2","姓名3","性别3","科目3","成绩3","排名3","监考老师3"/*,
      "姓名4", "性别4","科目4","成绩4","排名4","监考老师4","姓名5", "性别5","科目5","成绩5","排名5","监考老师5","姓名6", "性别6","科目6","成绩6","排名6","监考老师6","姓名7", "性别7","科目7","成绩7","排名7","监考老师7"*/)
df.show()
    //    val selectColumns = Seq("姓名")
//    val map = Map("姓名"->"first","性别"->"first","科目"->"first","成绩"->"first","排名"->"first","老师"->"first"
//    ,"姓名2"->"first","性别2"->"first","科目2"->"first","成绩2"->"first","排名2"->"first","老师2"->"first"
//    ,"姓名3"->"first","性别3"->"first","科目3"->"first","成绩3"->"first","排名3"->"first","老师3"->"first"
//    ,"姓名4"->"first","性别4"->"first","科目4"->"first","成绩4"->"first","排名4"->"first","老师4"->"first"
//    ,"姓名5"->"first","性别5"->"first","科目5"->"first","成绩5"->"first","排名5"->"first","老师5"->"first"
//      ,"姓名6"->"first","性别6"->"first","科目6"->"first","成绩6"->"first","排名6"->"first","老师6"->"first"
//    ,"姓名7"->"first","性别7"->"first","科目7"->"first","成绩7"->"first","排名7"->"first","老师7"->"first")
//    val map2 = Map("姓名"->"first","排名"->"max")
//    val df2 = df.agg(map)
//    val df3 = df.agg(map2)
//    val rdd = df2.rdd.zip(df3.rdd).map(row=>{
//      var res = new ArrayBuffer[Any]
//
//      res ++=row._1.toSeq
//      res ++=row._2.toSeq
//      Row.fromSeq(res)
//    }
//
//    ).collect().apply(0)
//    val schema = StructType(df2.schema.++(df3.schema))
//   val es =  ss.createDataFrame(rdd,schema)


//    println(rdd)
  }






}
