package cn.doit.report

import cn.doit.common.DolphinAppComm
import cn.doit.config.DolphinConfig
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext}


object ReadFile4Core {
  def main(args: Array[String]): Unit = {

    val sc = DolphinAppComm.creatSparkContext("cn.doit.etl.ReadFile4Core")

    val sQLContext = new SQLContext(sc)
    val frame: DataFrame = sQLContext.read.parquet("abc")
    val rdd: RDD[((String, String), Int)] = frame.map(t => {
      val province = t.get(24).toString
      val city = t.get(25).toString
      ((province,city),1)
    })
    val result = rdd.reduceByKey(_+_).map(tr=>{
      (tr._1._1,tr._1._2,tr._2)
    })
    import sQLContext.implicits._
    val dF = result.toDF("province","city","count")
    //url : scala.Predef.String, table : scala.Predef.String, connectionProperties : java.util.Properties
    dF.write.jdbc(DolphinConfig._url,"my_table2",DolphinConfig.props)
    dF.write.json("cba1")


    sc.stop()
   }
}
