package com.jnpc.spark

import com.jnpc.spark.project.domain.{FamenGuzhangCount, FamenGuzhangItems}
import com.jnpc.spark.project.dao.FaMenGuzhangCountDAO
import com.jnpc.spark.project.utils.{FamenUtil, NLPUtil}
import org.apache.spark.sql.{Row, SaveMode, SparkSession}

import scala.collection.mutable.ListBuffer

/**
  * Created by yangqiang on 2018/3/28 0028.
  */
object BatchProcessDataOnYARN {
  def main(args: Array[String]): Unit = {

    if(args.length !=1) {
      println("Usage: BatchProcessDataOnYARN <crfilepath>  ")
      System.exit(1)
    }
    val Array(inputPath ) = args
    val spark = SparkSession.builder().appName("BatchProcessDataOnYARNApp").getOrCreate()
    val crlistcnnpRDD = spark.sparkContext.textFile(inputPath)

  // val crlistcnnpRDD = spark.sparkContext.textFile("D:\\DEV\\NLP\\LTP\\dealdata\\crreport")

    val crrdd = crlistcnnpRDD.map(_.split("@"))
      .map(line =>
        CRReport(line(0), line(1), line(2),line(3), line(4), line(5),
          line(6), line(7), line(8))
      )
      .filter(_.cr_subject.contains("阀门"))
      .filter(!_.cr_date.equals("null"))

    println(crrdd.count())
   val finalrdd= crrdd.map(x=>{

      val fc=new NLPUtil().getFenciResult(x.cr_subject)
      val fctype=FamenUtil.getType(fc)
      // println("fc=="+fc+",fctype=="+fctype)
      (x,fctype)
    }).filter(!_._2.equals(""))

    println(finalrdd.count())
    finalrdd.cache()
      .map(x=>{
      (x._1.cr_date.replace("-","") + "_" + x._2,  1)
    }).reduceByKey(_ + _).foreachPartition(partitionOfRecords => {
      val list = new ListBuffer[FamenGuzhangCount]
      partitionOfRecords.foreach(pair => {
        list.append(FamenGuzhangCount(pair._1, pair._2))
      })
      FaMenGuzhangCountDAO.save(list)
    })

    finalrdd.map(x=>{
      (x._1.cr_date.replace("-","") + "_" + x._2,  x._1.id_key_cr)
    }).aggregateByKey("")((k,v) => k+","+v, (v,k) => k+","+v)
      .foreachPartition(partitionOfRecords => {
        val list = new ListBuffer[FamenGuzhangItems]
        partitionOfRecords.foreach(pair => {
          list.append(FamenGuzhangItems(pair._1, pair._2))
        })
        FaMenGuzhangCountDAO.saveItems(list)
      })


    spark.stop
  }


  case class CRReport(id_key_cr:String,cr_no:String,cr_equipment:String,
                       cr_unit:String,cr_date:String, cr_unit_stat:String,
                      cr_subject:String,cr_desc:String,cr_cause:String )
}
