package cn.xiaoniu.dmp.report

import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

object Report {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("统计省市分布")
    val sc = new SparkContext(conf)
    val ssc = new SQLContext(sc)

    val frame: DataFrame = ssc.read.parquet("D:/dmp/parquet")

    //rdd算子
       val res: RDD[((String, String), Int)] = frame.rdd.map(t=>((t.getString(24),t.getString(25)),1)).reduceByKey(_+_)
         res.saveAsTextFile("D:\\rdd")

//    frame.registerTempTable("t_report")
//
//    val res: DataFrame = ssc.sql("select count(*)ct,provincename,cityname  from t_report group by provincename,cityname")
//    res.show()
//    json格式写入磁盘
//    res.write.json("D:\\json")

    //存到数据库
//    val  table:String = "t_report"
//    val url = "jdbc:mysql://localhost:3306/mysql?characterEncoding=utf8"
//    val prop = new  Properties
//    prop.setProperty("user","root")
//    prop.setProperty("password","root")
//    prop.setProperty("driver","com.mysql.jdbc.Driver")
//    res.write.jdbc(url,table,prop)
    sc.stop()
  }
}
