package report

import java.util.Properties

import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by 王康 on 2018/6/29.
  */
object Data_distribution {

  def main(args: Array[String]) {
      val conf: SparkConf = new SparkConf().setAppName("Data_distribution").setMaster("local[4]")
      val ssc: SparkContext = new SparkContext(conf)

      val sqlcontext: SQLContext = new SQLContext(ssc)

      val parquet: DataFrame = sqlcontext.read.parquet("E:\\大数据资料\\project2\\parquet_1.6.3")

      parquet.registerTempTable("dmp_data_distribution")

      val sql: DataFrame = sqlcontext.sql("select provincename,cityname,count(1) as ct from dmp_data_distribution group by provincename,cityname")
      val sql1: DataFrame = sqlcontext.sql("select * from dmp_data_distribution ")
      /*sql.write.json("E:\\大数据资料\\project2\\result_1.63_json")*/
      val prop: Properties = new Properties()
      prop.setProperty("user","root")
      prop.setProperty("password","root")
      //把数据全部写入sql

      sql1.write.jdbc("jdbc:mysql://localhost:3306/baby?characterEncoding=utf-8","dmp_data",prop)
      //sql.write.jdbc("jdbc:mysql://localhost:3306/baby?characterEncoding=utf-8","dmp_data_distribution",prop)
      ssc.stop()
  }
}
