package com.qdu.publicBicycle

import jdk.nashorn.internal.objects.NativeString.{substr, substring}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}

import java.util.Properties

object PUNumber {//统计一天各个时间使用共享单车的次数
  def main(args: Array[String]): Unit = {

    if (args.length != 1) {
      println("请将file输入位置传入")
      System.exit(0)
    }
    import org.apache.spark.{SparkConf,SparkContext}
    val conf = new SparkConf().setAppName("PUNumber").setMaster("local[*]").set("spark.testing.memory", "512000000")
    val sc = new SparkContext(conf)
    val spark = SparkSession.builder().appName("PUNumber").getOrCreate()

    val lines = sc.textFile(args(0))

    import org.apache.spark.sql.Row

    val prople = lines.map(_.split(","))
      .map(x => {
        //与反射机制推断得到DF不同的是，这里需要贡藕早Row类型的实例
        Row(substr(x(1),5,2).toInt,x(2).trim.toInt,x(3),x(4).trim.toInt,x(5).trim.toInt,x(6).trim.toInt)
      })


    import org.apache.spark.sql.types.StructField
    val fields = Array(
      StructField("start_date", IntegerType, false),
      StructField("start_station_code", IntegerType, false),
      StructField("end_date", StringType, false),
      StructField("end_station_code", IntegerType, false),
      StructField("duration_sec", IntegerType, false),
      StructField("is_member", IntegerType, false)
    )
    val schema = StructType(fields)

    //利用Spark对像，将表头和表数据拼接起来

    val df1 = spark.createDataFrame(prople, schema)


    import spark._
    df1.createOrReplaceTempView("people")
    val res1 = sql("select start_date , count(start_date) as countnum from people group by start_date ORDER by start_date ASC")
    res1.show()

    //    res1.write.format("csv").save("file/PublicBicycle/output/PUNumber")
//    res1.toDF().write.mode("overwrite")
//      .format("jdbc")
//      .option("url", "jdbc:mysql://localhost:3306/scala?useSSL=false&useUnicode=true&characterEncoding=utf8")
//      .option("dbtable", "unumber")
//      .option("user", "root")
//      .option("password", "root")
//      .save()
val props = new Properties()
    props.put("dirver", "com.mysql.jdbc.Driver")
    props.put("user", "root")
    props.put("password", "root")
    res1.toDF().write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.56.1:3306/scala?useSSL=false", "unumber", props)


  }

}

