package xubo.wangcaifeng.love.method
import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}
import scalikejdbc.{DB, SQL}
import scalikejdbc.config.DBs
import xubo.wangcaifeng.love.Utils.SaveData

object Need1 {
  def main(args: Array[String]): Unit = {
    /**
      * 按照地域分布来查询
      */
    val conf = new SparkConf()
      .setAppName("按照地区纬度查询")
      .setMaster("local[*]")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)
    val sqlc = new SQLContext(sc)
    //读取文件

    val frame: DataFrame = sqlc.read.parquet("data/parquet/")
    val basedata: RDD[((String, String), List[Double])] = frame.map(t => {
      //获取省
      val provincename = t.getAs[String]("provincename")
      //获取市
      val cityname = t.getAs[String]("cityname")
      //获取数据请求方式

      ((provincename, cityname),SaveData(t))
    }).cache()
    val result: RDD[((String, String), List[Double])] = basedata.reduceByKey((list1, list2) => {
      list1.zip(list2).map(t => t._1 + t._2)
    })
    //写入本地
    //result.saveAsTextFile("data/result")
    //将数据写入到mysql中
      result.foreachPartition(it=>{
        DBs.setupAll()
        DB.localTx(implicit session =>{
          it.foreach({
            t=>
              SQL("insert into localcount values(?,?,?,?,?,?,?,?,?,?,?)")
                .bind(t._1._1,t._1._2,t._2(0),t._2(1),t._2(2),t._2(3),t._2(4),t._2(5),t._2(6),t._2(7),t._2(8))
                .update()
                .apply()
          })
      })
    })
    import sqlc.implicits._
    val dataFrame: DataFrame = result.toDF()
//    dataFrame.registerTempTable("tmp")
//    sqlc.sql("select * from tmp").show()
    /*sqlc.sql(
      """
        |select
      """.stripMargin)
    val url = "jdbc:mysql://localhost:3306/dmt?characterEncoding=utf8"
    val tname = "localcount1"
    val p = new Properties()
    p.setProperty("user","root")
    p.setProperty("password","217410")
    dataFrame.write.jdbc(url,tname,p)*/

    sc.stop()
  }

}
