import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Column, DataFrame, Row, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by yangqiyuan on 2018/3/28.
  */
object ReadLog {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
    .setAppName(ReadLog.getClass.getSimpleName)
    .setMaster("local[*]")
    .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val spark: SparkContext = new SparkContext(conf)
    val sqlContext: SQLContext = new SQLContext(spark)
    val parquet: DataFrame = sqlContext.read.parquet("parquet")

    parquet.registerTempTable("logs")
    val sql: DataFrame = sqlContext.sql("select provincename,cityname from logs group by provincename,cityname")
    sql.show(50)
    val properties: Properties = new Properties()
    properties.setProperty("username","root")
    properties.setProperty("password","")
  sql.write.format("json").jdbc("jdbc:mysql://localhost:3306/test?characterEncoding=utf-8","log1",properties)


//sql.write.format("json").json("bb")
    spark.stop()

  }
}
