package lhb.spark.sparkresult

import lhb.spark.sparkreduce.alarm
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

case  class  alarm (timein:String,timeout:String,
                    tagname:String,almvalue:String, almstatus:String)

object sparkresult {

  def main(args: Array[String]): Unit = {

    

    val spark = SparkSession.builder().appName("sparkresult").getOrCreate()

    import spark.implicits._

    val frame: DataFrame = spark.sparkContext
      .textFile("hdfs://192.168.0.201:8020/spark/data/part-m-0000[0-3]")
      .map(_.split(","))
      .map(attr => alarm(attr(0).trim.toString, attr(1).trim.toString,
        attr(3).trim.toString, attr(5).trim.toString, attr(9).trim.toString))
      .toDF()



    val nonull = frame.where("tagname <> ''")

    val rddnonull: RDD[Row] = nonull.rdd

     rddnonull
      .map{x => {
        val key = x.getAs("tagname").toString;
        val value1 = x.getAs("timein").toString;
        val value2 = x.getAs("timeout").toString;
        val value3 = x.getAs("almvalue").toString;
        val value4 = x.getAs("almstatus").toString;
      (key,(value1,value2,value3,value4))}}



  }
}
