package cn.dmp.tools

import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2018/4/21.
  */
object ProvinceDD {

  def main(args: Array[String]): Unit = {
    //判断路径参数
    if (args.length!=1){
      println(
        """
          |inputDataPath,outputDataPath
          |参数路径不合法
        """.stripMargin)
      sys.exit()
    }
    //创建sparkContext
    val conf = new SparkConf()
    .setMaster("local[*]").setAppName("ProvinceDD")
    .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)

    //输入 输出的路径参数
    val Array(inputDataPath)=args

    //sc不能读取parquet文件，sqlContext可以读
    val sQLContext = new SQLContext(sc)
    val dataFrame: DataFrame = sQLContext.read.parquet(inputDataPath)

    //对读取的数据处理
    val key: RDD[((String, String), Int)] = dataFrame.map(row => {
      val pname = row.getAs[String]("provincename")
      val cname = row.getAs[String]("cityname")
      ((pname, cname), 1)
    }).reduceByKey(_ + _)

    import sQLContext.implicits._
    val df : DataFrame = key.map(t=>Ppro(t._1._1,t._1._2,t._2)).toDF()
    //写json文件
   //df.write.json(outputDataPath)

    //写进 mysql数据库
    val url="jdbc:mysql://192.168.20.13:3306/spark?characterEncoding=utf-8"
    val tname="proviDD"
    val prop = new Properties()
    prop.setProperty("user","root")
    prop.setProperty("password","06141ybw")
    prop.setProperty("driver","com.mysql.jdbc.Driver")
    df.write.mode(SaveMode.Overwrite).jdbc(url,tname,prop)
    // 关闭资源
    sc.stop()
  }
}
case class Ppro(provincename:String,cityname:String,ct:Int)
