package com.wei.violet.app

import java.util.Properties

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.types._

object DMPAPP {
  Logger.getLogger("org").setLevel(Level.WARN)

  def main(args: Array[String]): Unit = {
    //校验参数
    if (args.length != 2) {
      println(
        """
          |Usage:
          |com.wei.violet.app.DMPAPP
          |args：
          |dataInputPath:原始文件输入路径
          |dataOutputPath:parquet文件输出路径
          |
      """.stripMargin)

      sys.exit(1)
    }

    //模式匹配
    val Array(dataInputPath, dataOutputPath) = args
    val conf: SparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("原始日志文件转换成parquet文件")
      //spark程序序列化方式
      .set("spark.serizlizer", "org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)

    // 读取数据

    val inputFile: RDD[String] = sc.textFile(dataInputPath)
    import com.wei.violet.utils.HandleString._

    val inputRdd: RDD[Row] = inputFile.map(_.split(",", -1)).filter(_.length >= 85)
      .map(fileds => {
        Row(
          fileds(0),
          fileds(1).toIntPlus,
          fileds(2).toIntPlus,
          fileds(3).toIntPlus,
          fileds(4).toIntPlus,
          fileds(5),
          fileds(6),
          fileds(7).toIntPlus,
          fileds(8).toIntPlus,
          fileds(9).toDoublePlus,
          fileds(10).toDoublePlus,
          fileds(11),
          fileds(12),
          fileds(13),
          fileds(14),
          fileds(15),
          fileds(16),
          fileds(17).toIntPlus,
          fileds(18),
          fileds(19),
          fileds(20).toIntPlus,
          fileds(21).toIntPlus,
          fileds(22),
          fileds(23),
          fileds(24),
          fileds(25),
          fileds(26).toIntPlus,
          fileds(27),
          fileds(28).toIntPlus,
          fileds(29),
          fileds(30).toIntPlus,
          fileds(31).toIntPlus,
          fileds(32).toIntPlus,
          fileds(33),
          fileds(34).toIntPlus,
          fileds(35).toIntPlus,
          fileds(36).toIntPlus,
          fileds(37),
          fileds(38).toIntPlus,
          fileds(39).toIntPlus,
          fileds(40).toDoublePlus,
          fileds(41).toDoublePlus,
          fileds(42).toIntPlus,
          fileds(43),
          fileds(44).toDoublePlus,
          fileds(45).toDoublePlus,
          fileds(46),
          fileds(47),
          fileds(48),
          fileds(49),
          fileds(50),
          fileds(51),
          fileds(52),
          fileds(53),
          fileds(54),
          fileds(55),
          fileds(56),
          fileds(57).toIntPlus,
          fileds(58).toDoublePlus,
          fileds(59).toIntPlus,
          fileds(60).toIntPlus,
          fileds(61),
          fileds(62),
          fileds(63),
          fileds(64),
          fileds(65),
          fileds(66),
          fileds(67),
          fileds(68),
          fileds(69),
          fileds(70),
          fileds(71),
          fileds(72),
          fileds(73).toIntPlus,
          fileds(74).toDoublePlus,
          fileds(75).toDoublePlus,
          fileds(76).toDoublePlus,
          fileds(77).toDoublePlus,
          fileds(78).toDoublePlus,
          fileds(79),
          fileds(80),
          fileds(81),
          fileds(82),
          fileds(83),
          fileds(84).toIntPlus


        )


      })
    val schema = StructType(

      Seq(
        StructField("sessionid", StringType),
        StructField("advertisersid", IntegerType),
        StructField("adorderid", IntegerType),
        StructField("adcreativeid", IntegerType),
        StructField("adplatformproviderid", IntegerType),
        StructField("sdkversion", StringType),
        StructField("adplatformkey", StringType),
        StructField("putinmodeltype", IntegerType),
        StructField("requestmode", IntegerType),
        StructField("adprice", DoubleType),
        StructField("adppprice", DoubleType),
        StructField("requestdate", StringType),
        StructField("ip", StringType),
        StructField("appid", StringType),
        StructField("appname", StringType),
        StructField("uuid", StringType),
        StructField("device", StringType),
        StructField("client", IntegerType),
        StructField("osversion", StringType),
        StructField("density", StringType),
        StructField("pw", IntegerType),
        StructField("ph", IntegerType),
        StructField("long", StringType),
        StructField("lat", StringType),
        StructField("provincename", StringType),
        StructField("cityname", StringType),
        StructField("ispid", IntegerType),
        StructField("ispname", StringType),
        StructField("networkmannerid", IntegerType),
        StructField("networkmannername", StringType),
        StructField("iseffective", IntegerType),
        StructField("isbilling", IntegerType),
        StructField("adspacetype", IntegerType),
        StructField("adspacetypename", StringType),
        StructField("devicetype", IntegerType),
        StructField("processnode", IntegerType),
        StructField("apptype", IntegerType),
        StructField("district", StringType),
        StructField("paymode", IntegerType),
        StructField("isbid", IntegerType),
        StructField("bidprice", DoubleType),
        StructField("winprice", DoubleType),
        StructField("iswin", IntegerType),
        StructField("cur", StringType),
        StructField("rate", DoubleType),
        StructField("cnywinprice", DoubleType),
        StructField("imei", StringType),
        StructField("mac", StringType),
        StructField("idfa", StringType),
        StructField("openudid", StringType),
        StructField("androidid", StringType),
        StructField("rtbprovince", StringType),
        StructField("rtbcity", StringType),
        StructField("rtbdistrict", StringType),
        StructField("rtbstreet", StringType),
        StructField("storeurl", StringType),
        StructField("realip", StringType),
        StructField("isqualityapp", IntegerType),
        StructField("bidfloor", DoubleType),
        StructField("aw", IntegerType),
        StructField("ah", IntegerType),
        StructField("imeimd5", StringType),
        StructField("macmd5", StringType),
        StructField("idfamd5", StringType),
        StructField("openudidmd5", StringType),
        StructField("androididmd5", StringType),
        StructField("imeisha1", StringType),
        StructField("macsha1", StringType),
        StructField("idfasha1", StringType),
        StructField("openudidsha1", StringType),
        StructField("androididsha1", StringType),
        StructField("uuidunknow", StringType),
        StructField("userid", StringType),
        StructField("iptype", IntegerType),
        StructField("initbidprice", DoubleType),
        StructField("adpayment", DoubleType),
        StructField("agentrate", DoubleType),
        StructField("lrate", DoubleType),
        StructField("adxrate", DoubleType),
        StructField("title", StringType),
        StructField("keywords", StringType),
        StructField("tagid", StringType),
        StructField("callbackdate", StringType),
        StructField("channelid", StringType),
        StructField("mediatype", IntegerType)
      )

    )

    //处理数据
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)
    //parquet文件的压缩方式
    sqlContext.setConf("spark.sql.parquet.compression.codec", "snappy")
    val dataFrame: DataFrame = sqlContext.createDataFrame(inputRdd,schema)

    val fs: FileSystem = FileSystem.get(sc.hadoopConfiguration)
    val path = new Path(dataOutputPath)
    if(fs.exists(path)){
      fs.delete(path,true)
    }
    //保存数据
   //.partitionBy("provincename", "cityname")

    dataFrame.write.parquet(dataOutputPath)
    sc.stop()


  }

}
