package com.chenjj.etl.datatrans.job

import java.util
import java.util.List

import com.chenjj.etl.datatrans.converter.{DataMetaInfo, FlagInfo}
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.matching.Regex

/**
  * 通过spark的方式实现gz文件处理
  *
  * 1.解压文件
  * 2.逐行读取，将文件定长转定界到新文件
  */
object GzFileProcessBySpark {

  val flagFilePath = "file:\\D:\\Data\\WorkData\\长亮科技\\解决方案\\POC\\兴业银行SAS数据分析平台二期项目\\POC方案以及相关资料\\测试文件\\MA_SUBJ_LKP.20150331.000000.0000.flg"
  val dataFilePath = ""

  def main(args: Array[String]): Unit = {

    //spark配置信息
    val conf = new SparkConf()
      .setAppName("Dingchang2Dingjie")
      .setMaster("local")
      .set("spark.testing.memory", "1024000000")

    //spark上下文
    val sc = new SparkContext(conf)

    //1.对flag文件进行RDD操作,解析Flag文件，并且封装成FlagInfo
    val fileRDD : RDD[String] = sc.textFile(flagFilePath).cache()
    val flagInfo = parseFlagFile(sc,fileRDD)


  }


  /**
    * 解析flg文件
    */
  def parseFlagFile(sc:SparkContext,fileRDD:RDD[String]): FlagInfo ={
    val flagInfo : FlagInfo= new FlagInfo()
    //1.1解析ROWCOUNT
    val rowCount = fileRDD.filter(line => line.contains("ROWCOUNT=")).map(( line =>
    {
      val v = line.split("\\=")(1)
      v.toLong
    }
      )).first()
    flagInfo.setRowCount(rowCount);
    //1.2解析元数据信息
    val dataMetaInofs = fileRDD.filter(line => line.contains("$$")).map(line =>
    {
      val dtfld = new DataMetaInfo(line)
      dtfld
    }
    ).collect()
    val mlist = new util.ArrayList[DataMetaInfo]()
    for (i <- 0 until dataMetaInofs.length){
      mlist.add(dataMetaInofs(i))
    }
    flagInfo.setDataMetaInfos(mlist)
    //1.3计算每行的长度
    val rowlength = fileRDD.filter(line => line.contains("ROWLENGTH=")).map(( line =>
    {
      val v = line.split("\\=")(1)
      v.toLong
    }
      )).first()
    //val rowlength =  flagInfo.getDataMetaInfos.get(flagInfo.getDataMetaInfos.size()-1).getEndIndex()
    flagInfo.setRowLength(rowlength)
    //1.4广播
    val broadCastFlagInfo = sc.broadcast(flagInfo)

    println("broadCastFlagInfo is: " + broadCastFlagInfo.value)

    //2.对数据文件RDD进行操作

    println(fileRDD.count)
    println(flagInfo.getRowLength)
    println(flagInfo.getFileSize)
    println(flagInfo.getRowCount)
    println(flagInfo.getDataMetaInfos)
    //  val fs = FileSystem.get(sc.hadoopConfiguration);
    //    fs.globStatus(new Path(""));
    flagInfo
  }
}
