package com.comtop.db

import com.comtop.db.fastar.FastarUtils
import com.comtop.db.fastar.JdbcConnection
import org.apache.spark.rdd.JdbcRDD

object mainManager {
  def main(args: Array[String]): Unit = {
    println("***************************Just do it...***************************")
    
    val sqlHour = "SELECT  *  FROM DATA_TABLE WHERE NAME='FOR-TEST-POINT-NAME-00000000'  AND TIME_MARK >= '2016-06-01 00:00:00:000' AND TIME_MARK < '2016-06-04 00:00:00:000'"
    val sqlDay = "SELECT  *  FROM DATA_TABLE WHERE NAME='FOR-TEST-POINT-NAME-00000000'  AND TIME_MARK >= '2016-06-01 00:00:00:000' AND TIME_MARK < '2016-06-06 00:00:00:000'"
    val sqlWeek = "SELECT  *  FROM DATA_TABLE WHERE NAME='FOR-TEST-POINT-NAME-00000000'  AND TIME_MARK >= '2016-06-01 00:00:00:000' AND TIME_MARK < '2016-06-30 00:00:00:000'"
    val sqlMonth = "SELECT  *  FROM DATA_TABLE WHERE NAME='FOR-TEST-POINT-NAME-00000000'  AND TIME_MARK >= '2016-06-01 00:00:00:000' AND TIME_MARK < '2016-06-02 00:00:00:000'"
    val fastar = new FastarUtils()
//    fastar.getEQByHours(sql,"2016-06-01 00:00:00:000",null)
    
//    fastar.getEQByDays(sql, "2016-06-01", "2016-06-02")
    
//    fastar.getHourEQ(sqlHour)
//    
//    fastar.getDayEQ(sqlDay)
//    
//    fastar.getWeekEQ(sqlWeek)
//    
//    fastar.getMonthEQ(sqlMonth)
  
   val rdd = new JdbcRDD(JdbcConnection.sc, JdbcConnection.getConnection, sqlHour,
      1, 100, 1, r => (r.getString("NAME"), r.getDouble("VALUE"), r.getTimestamp("TIME_MARK")))
    val testRdd = rdd.groupBy(line => line._3.toString().subSequence(0, 13)).collect().length
                       
  
  }
}