package spark.work

//import breeze.linalg.{max, sum}
//import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.HashMap
//import org.apache.spark.sql.functions._

/**
  * Created by liuwei on 2017/11/8.
  */
object GroupBy {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("RowToColumnTest").setMaster("local[8]")
    val sc = new SparkContext(sparkConf)
    val spark = SparkSession.builder

    val ss = spark.getOrCreate();

    val df = ss.createDataFrame(Seq(
      ("张三","女","数学",60,15,"张老师"),
      ("张三","女","语文",70,12,"李老师"), ("张三","女","语文",70,12,"李老师"),
      ("张三","女","物理",50,22,"张老师"),
      ("王二","男","数学",50,16,"赵老师"),
      ("王二","男","语文",60,20,"孙老师"),
      ("王二","男","历史",50,22,"李老师")
    )).toDF("姓名", "性别","科目","成绩","排名","监考老师")
    val selectColumns = Seq("姓名")

//    val stratifiedCountDf = df.groupBy(selectColumns.head, selectColumns.tail: _*).count.toDF("姓名", "科目", "count").cache
//    stratifiedCountDf.show
//    val stratifiedCountSchema = stratifiedCountDf.schema.fieldNames.toList
//    val countMap = stratifiedCountDf.collect.map(row => generateRddKey(selectColumns, row) -> row.getLong(stratifiedCountSchema.size - 1)).toMap
//    val fractionMap = HashMap.empty[String, Double]
//    println(countMap)
//    countMap.foreach(kv => fractionMap += kv._1 -> -1d)
//    println(fractionMap)
//    val (fractions, rateResultDf) = generateFractionMap(df.count, stratifiedCountDf, param)
    val fractions = HashMap.empty[String, Double]
    fractions += "姓名:王二-" -> 0.0
    fractions += "姓名:张三-" -> 0.0

    val rdd = df.rdd.map(row => generateRddKey(selectColumns, row) -> row).sampleByKeyExact(false, fractions, 47).map(_._2)
    ss.createDataFrame(rdd, df.schema).show()
//    df.createOrReplaceTempView("test")
//    import org.apache.spark.sql._
//    val sqlContext = new SQLContext(sc)
//    val sqlDF = sqlContext.sql("SELECT * FROM test")
//    val a = sqlDF.toJSON.collectAsList();
    println(Math.ceil(0.1 * 32561 ))
    val initRdd = df.rdd.map(row => generateRddKey(selectColumns, row) -> row)
    println(initRdd.count())
    initRdd.foreach(row=>{
      println(row._1)
      println(row._2.toString())
      println("==========")
    })
    println("")
  }

  private def generateRddKey(selectColumns: Seq[String], row: Row): String = {
    val titleNames = row.schema.fieldNames.toList
    val result = StringBuilder.newBuilder
    selectColumns.foreach(name => {
      titleNames.indexOf(name) match {
        case -1 => result.append("")
        case i => result.append(s"$name:").append(if (row.isNullAt(i)) "" else row.get(i).toString)
      }
      result.append("-")
    })
    result.toString
  }




}
