package project

import java.sql.DriverManager

import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}
/**
  * Created by Administrator on 2018/03/28.
  */
object ProjectOne {

  def main(args: Array[String]): Unit = {
    if (args.length!=3){

      println(
        """
          |
          |dmpetl.ProjectOne
          |
          |inputpath <输入路径>
          |
          |outputpath <输出路径>
          |
        """.stripMargin)

      sys.exit()
    }
     val Array(inputpath,outputpath)=args

    val conf = new SparkConf()
    conf.setAppName("需求一")
    conf.setMaster("local[*]")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

    val sc: SparkContext = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)

    val parquet: DataFrame = sQLContext.read.parquet("G:\\TestData\\DMPData\\2016-10-01_06_p1_invalid.1475274123982.parquet")


    //实现方法二
    parquet.registerTempTable("t_Project1")
    val sql: DataFrame = sQLContext.sql("select count(*) ct,provincename,cityname from t_Project1 group by provincename,cityname")

    sql.foreachPartition(x=>{
      val cnnt = DriverManager.getConnection("jdbc:mysql://localhost:3306/big29?characterEncoding=utf-8","root","123456")

      x.foreach(z=>{

        val pst1 = cnnt.prepareStatement("create table IF NOT EXISTS json(count int,provincename varchar(225),cityname varchar(225))")
        pst1.execute()
        val pstm1 = cnnt.prepareStatement("insert into json values(?,?,?)")

        pstm1.setLong(1,z.getAs[Long]("ct"))
        pstm1.setString(2, z.getAs[String]("provincename"))
        pstm1.setString(3, z.getAs[String]("cityname"))
        pstm1.executeUpdate()
        println("---------------")
        pstm1.close()

      })
      cnnt.close()
    })
    sc.stop()

  }




}
