package cn.xiaoniu.dmp


import cn.xiaoniu.dmp.bean.LogSchema
import cn.xiaoniu.dmp.helper.ConfigHelper
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}
import utils.NumParse

/**
  * 将bz2日志转换成parquet文件
  *
  * author: sheep.Old 
  * qq: 64341393
  * Created 2018/5/29
  */
object Bz2Parquet {


    def main(args: Array[String]): Unit = {

        // SparkContext
        val sparkConf = new SparkConf()
        sparkConf.setAppName("将bz2日志转换成parquet文件")
        sparkConf.setMaster("local[*]")
        sparkConf.set("spark.serializer", ConfigHelper.serializer)

        val sc = new SparkContext(sparkConf)
        val sQLContext = new SQLContext(sc)

        sQLContext.setConf("spark.sql.parquet.compression.codec", "snappy")

        // 读取数据
        val rawLog = sc.textFile(/*ConfigHelper.logPath*/"D:\\2016-10-01_06_p1_invalid.1475274133118.log.FINISH.bz2")

        // 按照需求进行格式转换(Parquet <= row + schema = DataFrame)
        val filtered: RDD[Array[String]] = rawLog.map(line => line.split(",", -1))
          .filter(_.length >= 85)


        val rowRDD: RDD[Row] = filtered.map(arr => Row(

            arr(0),
            NumParse.toInt(arr(1)),
            NumParse.toInt(arr(2)),
            NumParse.toInt(arr(3)),
            NumParse.toInt(arr(4)),
            arr(5),
            arr(6),
            NumParse.toInt(arr(7)),
            NumParse.toInt(arr(8)),
            NumParse.toDouble(arr(9)),
            NumParse.toDouble(arr(10)),
            arr(11),
            arr(12),
            arr(13),
            arr(14),
            arr(15),
            arr(16),
            NumParse.toInt(arr(17)),
            arr(18),
            arr(19),
            NumParse.toInt(arr(20)),
            NumParse.toInt(arr(21)),
            arr(22),
            arr(23),
            arr(24),
            arr(25),
            NumParse.toInt(arr(26)),
            arr(27),
            NumParse.toInt(arr(28)),
            arr(29),
            NumParse.toInt(arr(30)),
            NumParse.toInt(arr(31)),
            NumParse.toInt(arr(32)),
            arr(33),
            NumParse.toInt(arr(34)),
            NumParse.toInt(arr(35)),
            NumParse.toInt(arr(36)),
            arr(37),
            NumParse.toInt(arr(38)),
            NumParse.toInt(arr(39)),
            NumParse.toDouble(arr(40)),
            NumParse.toDouble(arr(41)),
            NumParse.toInt(arr(42)),
            arr(43),
            NumParse.toDouble(arr(44)),
            NumParse.toDouble(arr(45)),
            arr(46),
            arr(47),
            arr(48),
            arr(49),
            arr(50),
            arr(51),
            arr(52),
            arr(53),
            arr(54),
            arr(55),
            arr(56),
            NumParse.toInt(arr(57)),
            NumParse.toDouble(arr(58)),
            NumParse.toInt(arr(59)),
            NumParse.toInt(arr(60)),
            arr(61),
            arr(62),
            arr(63),
            arr(64),
            arr(65),
            arr(66),
            arr(67),
            arr(68),
            arr(69),
            arr(70),
            arr(71),
            arr(72),
            NumParse.toInt(arr(73)),
            NumParse.toDouble(arr(74)),
            NumParse.toDouble(arr(75)),
            NumParse.toDouble(arr(76)),
            NumParse.toDouble(arr(77)),
            NumParse.toDouble(arr(78)),
            arr(79),
            arr(80),
            arr(81),
            arr(82),
            arr(83),
            NumParse.toInt(arr(84))
        ))

        // 样例类的方式 在2.10中不能超过22个字段
        /*val logDataFrame = filtered.map(arr => Logs(
            arr(0),
            arr(1).toInt,
            arr(2).toInt,
            arr(3).toInt,
            arr(4).toInt,
            arr(5),
            arr(6),
            arr(7).toInt,
            arr(8).toInt,
            arr(9).toDouble,
            arr(10).toDouble,
            arr(11),
            arr(12),
            arr(13),
            arr(14),
            arr(15),
            arr(16),
            arr(17).toInt,
            arr(18),
            arr(19),
            arr(20).toInt,
            arr(21).toInt,
            arr(22),
            arr(23),
            arr(24),
            arr(25),
            arr(26).toInt,
            arr(27),
            arr(28).toInt,
            arr(29),
            arr(30).toInt,
            arr(31).toInt,
            arr(32).toInt,
            arr(33),
            arr(34).toInt,
            arr(35).toInt,
            arr(36).toInt,
            arr(37),
            arr(38).toInt,
            arr(39).toInt,
            arr(40).toDouble,
            arr(41).toDouble,
            arr(42).toInt,
            arr(43),
            arr(44).toDouble,
            arr(45).toDouble,
            arr(46),
            arr(47),
            arr(48),
            arr(49),
            arr(50),
            arr(51),
            arr(52),
            arr(53),
            arr(54),
            arr(55),
            arr(56),
            arr(57).toInt,
            arr(58).toDouble,
            arr(59).toInt,
            arr(60).toInt,
            arr(61),
            arr(62),
            arr(63),
            arr(64),
            arr(65),
            arr(66),
            arr(67),
            arr(68),
            arr(69),
            arr(70),
            arr(71),
            arr(72),
            arr(73).toInt,
            arr(74).toDouble,
            arr(75).toDouble,
            arr(76).toDouble,
            arr(77).toDouble,
            arr(78).toDouble,
            arr(79),
            arr(80),
            arr(81),
            arr(82),
            arr(83),
            arr(84).toInt
        )).toDF()*/


        val logDataFrame = sQLContext.createDataFrame(rowRDD, LogSchema.schema)

        val hadoopConfiguration = sc.hadoopConfiguration
        val fs = FileSystem.get(hadoopConfiguration)

        val path = new Path("f:/dmp/parquet")
        if (fs.exists(path)) {
            fs.delete(path, true)
        }

        // 存储数据  hdfs
        logDataFrame.write.mode(SaveMode.Overwrite).parquet("D:/dmp/parquet")

        // 释放资源
        sc.stop()

    }

}
