package lhb.spark.sparkselect

import java.sql.Timestamp


import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

case  class  alarm (timein:String,timeout:String,
                    tagname:String,almvalue:String, almstatus:String)

object sparkselect {

  def main(args: Array[String]): Unit = {

    val keytag ="ZYDL_ZG3_IA";

    val spark = SparkSession.builder().appName("sparkselect ").getOrCreate()

    import spark.implicits._

    val frame: DataFrame = spark.sparkContext
      .textFile("hdfs://192.168.0.201:8020/spark/data/part-m-0000[0-3]")
      .map(_.split(","))
      .map(attr => alarm(attr(0).trim.toString, attr(1).trim.toString,
        attr(3).trim.toString, attr(5).trim.toString, attr(9).trim.toString))
      .toDF()

    val nonull = frame.where("tagname <> ''")

    val rddnonull: RDD[Row] = nonull.rdd

    val OriginRdd: RDD[(String, (String, String, String, String))] = rddnonull.map {
      t => {
        val key = t.getAs("tagname").toString;
        val value1 = t.getAs("timein").toString;
        val value2 = t.getAs("timeout").toString;
        val value3 = t.getAs("almvalue").toString;
        val value4 = t.getAs("almstatus").toString;
        (key, (value1, value2, value3, value4))
      }
    }

    val result: DataFrame = OriginRdd.filter(x => (x._1.equals(keytag)))
      .map(x => (x._1, x._2._1, x._2._2, x._2._3, x._2._4))
      .toDF()

    result.write.format("JDBC")
      .option("url", "jdbc:sqlserver://192.168.0.200:1433;DatabaseName=sjk")
      .option("driver", "com.microsoft.sqlserver.jdbc.SQLServerDriver")
      .option("dbtable", "dbo.ALARMFILTER")
      .option("user", "sa")
      .option("password", "123456")
      .option("truncate", "True")
      .mode("overwrite")
      .save()





  }

}
