import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.SparkContext._
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.hive._
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import  org.json4s.JsonAST._
object test {


  def  show(line:Array[Tuple2[String,String]]):String= {

    for {s <- line} s._1
    ""
  }


  def main(args: Array[String]) {

    val filePath="hdfs:///test/res"
    val sparkConf=new SparkConf().setAppName("workCount")
    val sc= new SparkContext(sparkConf)
    val hdfs=FileSystem.get(new Configuration())
    if(hdfs.exists(new Path(filePath)))hdfs.delete(new Path(filePath),true)

     val p=1 to 100
     val r =sc.makeRDD(Seq(p)).map(line=> line.toArray).map(line=>line.filter(_ % 2 ==0).filter(_ >50)).flatMap(_.toList)

    var data = sc.parallelize(List((1,3),(1,2),(1, 4),(2,3)))
       sc





    //
    val sqlContext = new org.apache.spark.sql.hive.HiveContext(sc)

    import sqlContext._

    val  df=sqlContext.sql("select *  from report.testdevice")

    //  df.groupBy("product_key").count().show()
    // df.printSchema()
    // df.select("product_key").groupBy("product_key").count().show()

    //df.groupBy("ip").count().show()
    // val a=df.select("product_key").toJSON.map(line=>(line,1)).reduceByKey(_ + _).map(line=>pretty(line))
    //  val a=df.select("product_key").map(line=>(line,1)).reduceByKey(_ + _).collect.toList


    /*val a=df.select("product_key").map(line=>(line,1)).reduceByKey(_ + _).map(
    t=> ("product_key"->t._1.get(0).toString)
      ~ ("count" ->t._2.toInt)
    ).map(json=>pretty(render(json))).coalesce(1, shuffle = true).saveAsTextFile("hdfs:///test/res")*/

    /* val a=df.select("product_key").map(line=>(line,1)).reduceByKey(_ + _).map(
       t=> ("product_key"->t._1.get(0).toString)
         ~ ("count" ->t._2.toInt)
     ).map(json=>pretty(render(json))).*/

    // df.select("product_key","group_id").join()
    case class  Location(country:String,region:String,city:String)
    case class  DeviveActive(product_key:String,count:BigInt,group_id:BigInt,loc:Location)


   /* val res2=  df.map(
      line=>line.get(0).toString
        +","+line.get(0)+"%"+line.get(1).toString
        +","+line.get(0)+"%"+line.get(1).toString+"%"+line.get(4).toString
        +","+line.get(0)+"%"+line.get(1).toString+"%"+line.get(4).toString+"%"+line.get(5).toString
        +","+line.get(0)+"%"+line.get(1).toString+"%"+line.get(4).toString+"%"+line.get(5).toString+"%"+line.get(6).toString
    ).flatMap(line =>line.split(",")).map(line=>(line,1)).reduceByKey(_ + _).map(t=>check(t)).reduce(_ merge _ ).coalesce(1, shuffle = true).saveAsTextFile("hdfs:///test/res3")
*/
    // val s=res.map(line=>(line._1,line._2)).cache()
    //.map(t=>check(t)).reduce(_ merge _ )


    //  .reduce(_ merge _ )
  /*  val res3=  df.map(
      line=>line.get(0).toString
        +","+line.get(0)+"-"+line.get(1).toString
        +","+line.get(0)+"-"+line.get(1).toString+"-"+line.get(4).toString
        +","+line.get(0)+"-"+line.get(1).toString+"-"+line.get(4).toString+"-"+line.get(5).toString
        +","+line.get(0)+"-"+line.get(1).toString+"-"+line.get(4).toString+"-"+line.get(5).toString+"-"+line.get(6).toString
    ).flatMap(line =>line.split(",")).map(line=>(line,1)).reduceByKey(_ + _).map(line=>(line._1.split("-")(0),line)).map(t=>(t._1,check(t._2))).reduceByKey((x,y)=>x merge y).map(line=>compact(render(line._2))).coalesce(1, shuffle = true).saveAsTextFile("hdfs:///test/res4")
*/
    //compact


    //coalesce(1, shuffle = true).saveAsTextFile("hdfs:///test/res3")

    def  check(t:(String,Int))={
      val pk=t._1
      val num=t._2
      val pk2= pk.split("%")
      pk2.length match {
        case 1 => {
          ("product_key"->pk2(0).toString) ~ ("count" -> num)
        }
        case 2 =>{
          ("product_key"->pk2(0).toString) ~ ("group_id" ->Map(pk2(1).toString ->Map("count"->num)))
        }
        case 3 =>{
          if (pk2(1).toString.equalsIgnoreCase("locations"))
            ("product_key"->pk2(0).toString) ~ ("locations" ->Map(pk2(2).toString ->Map("count"->num) ))
          else
            ("product_key"->pk2(0).toString) ~ ("group_id" ->Map(pk2(1).toString ->Map(pk2(2).toString->Map("count"->num))))
        }
        case 4 =>{
          if (pk2(1).toString.equalsIgnoreCase("locations"))
            ("product_key"->pk2(0).toString) ~ ("locations" ->Map(pk2(2).toString ->Map(pk2(3).toString->Map("count"->num) )))
          else
            ("product_key"->pk2(0).toString) ~ ("group_id" ->Map(pk2(1).toString ->Map(pk2(2).toString->Map( pk2(3).toString->Map("count"->num) ))))
        }
        case 5 =>{
          if (pk2(1).toString.equalsIgnoreCase("locations"))
            ("product_key"->pk2(0).toString) ~ ("locations" ->Map(pk2(2).toString ->Map(pk2(3).toString->Map( pk2(4).toString->num) )))
          else
            ("product_key"->pk2(0).toString) ~ ("group_id" ->Map(pk2(1).toString ->Map(pk2(2).toString->Map( pk2(3).toString->Map(pk2(4).toString->num) ))))
        }

      }
    }




    // a.saveAsTextFile("hdfs:///test/res")
    /* val  json=(a.map(
        t=> ("product_key"->t._1.get(0).toString)
          ~ ("count" ->t._2.toInt)
     ))*/
    //  ~ ("count" ->t._2)
    //val p=a.toJSON.map(line=>parse(line)).map(line=>pretty(line))

    //  val r=pretty(render(json))
    //sc.parallelize(compact(render(json))).saveAsTextFile("hdfs:///test/res")
    //println(pretty(render(json)))











  }


}
