package xubo.wangcaifeng.love.method

import java.util.Properties

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, SQLContext}

object DataFrameLogDeal {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("log")
      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .set("spark.sql.parquet.compression.codec", "Snappy")
    val sc = new SparkContext(conf)
    val sqlc = new SQLContext(sc)
    val plines: DataFrame = sqlc.read.parquet("data/parquet/dmt.parquet")
    plines.registerTempTable("dmt")
    val sql: DataFrame = sqlc.sql("select count(*) ct,provincename,cityname  from dmt group by provincename,cityname")
    //将数据写入到mysql中
    /*val url = "jdbc:mysql://localhost:3306/dmt?characterEncoding=utf8"
    val tname = "dmtcount2"
    val p = new Properties()
    p.setProperty("user","root")
    p.setProperty("password","217410")
    sql.write.jdbc(url,tname,p)*/
    //写入本地
    sql.repartition(1).write.json("data/json")
    sc.stop()
  }

}
