package lhb.spark.sparkclean

import java.sql.Timestamp
import java.text.SimpleDateFormat

import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

case  class  alarm (timein:Timestamp,timeout:Timestamp,
                    tagname:String,almvalue:String,
                    almstatus:String)

object  sparkclean {

  def main(args: Array[String]): Unit = {

    val startTime ="2018-12-22 00:00:00"
    val stopTime ="2018-12-30 23:59:59"


    val spark = SparkSession.builder().appName("sparkclean").getOrCreate()

    import spark.implicits._



    val frame = spark.sparkContext
        .textFile("hdfs://192.168.0.201:8020/spark/data/part-m-0000[0-3]")
        .map(_.split(","))
        .map(attr => alarm(Timestamp.valueOf(attr(0)),Timestamp.valueOf(attr(1)),
                            attr(3).trim.toString,attr(5).trim.toString,
                            attr(9).trim.toString))
        .toDF()



//    val frame = hivedata.na.drop(Seq("tagname"))
//    frame.createOrReplaceGlobalTempView("ALARMRESULT")

    //去除nan值,划分数据
    val dataFrame = frame
      .where("tagname <> ''")
      .filter($"timein" > Timestamp.valueOf(startTime) && $"timeout" < Timestamp.valueOf(stopTime)
              && frame.col("tagname").isNotNull
      )


//    val result = dataFrame
//      //聚合
//      .groupBy(hivedata("tagname"), hivedata("almstatus"))
//      //行列
//      .agg(("almvalue", "min"), ("almvalue", "max"))



//    readresult.write.mode("Overwrite").save("hdfs://192.168.0.201:8020/spark/result")
    dataFrame.write.format("JDBC")
      .option("url", "jdbc:sqlserver://192.168.0.200:1433;DatabaseName=sjk")
      .option("driver", "com.microsoft.sqlserver.jdbc.SQLServerDriver")
      .option("dbtable", "dbo.ALARMNONULL")
      .option("user", "sa")
      .option("password", "123456")
      .option("truncate", "True")
      .mode("overwrite")
      .save()



  }

}
