import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, SQLContext}
import scalikejdbc.{DB, SQL}
import scalikejdbc.config.DBs


/**
  * Created by yangqiyuan on 2018/3/28.
  */
object ReadLogToMysql {
  DBs.setup()
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
      .setAppName(ReadLog.getClass.getSimpleName)
      .setMaster("local[*]")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val spark: SparkContext = new SparkContext(conf)
    val sqlContext: SQLContext = new SQLContext(spark)
    val parquet: DataFrame = sqlContext.read.parquet("parquet")
    parquet.registerTempTable("logs")
    val sql: DataFrame = sqlContext.sql("select provincename,cityname from logs group by provincename,cityname")

    sql.foreach(x=>{
      DB.localTx{
        implicit session =>

          SQL("replace into log values(?,?)")
            .bind(x.getAs("provincename"),x.getAs("cityname"))
            .update().apply()

      }

    })





  }
}
