import java.io.File
import java.util.Date

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession
import org.apache.spark.storage.StorageLevel

object BalanceQuery {

  def main(args: Array[String]) {

    var now = new Date()
    val time = now.getTime
//    queryFromHdfs()
//    queryFromJDBC()
//      queryFromHdfsByHive()
    queryFromAlluxio()
  }


  /**
    * 以hdfs为数据源读取数据
    *
    */
  def queryFromHdfs(): Unit = {
    val sparkSession = SparkSession
      .builder()
      .appName("BanlanceQuery_HDFS")
      .getOrCreate()


    sparkSession.sparkContext.setCheckpointDir("hdfs://yun0:9000/tentest/persist/")

    // if not to df and point the columnName it will be _c6 ...
    //read from csv
//    val infodf = sparkSession.read.csv("hdfs://yun0:9000/tentest/info_100.csv").toDF("statis_date","fuin","facct_type","fstatus","fbalance","foverdraft","fout_credit","fin_credit","fopen_time","fchg_time","fquota_order","fquota_day","fday_tran_amt","flast_tran_time","fday_save_amt","fmonth_save_amt","flast_save_time")
//    val waterdf = sparkSession.read.csv("hdfs://yun0:9000/tentest/water_100.csv").toDF("statis_day","fseq_no","fuin","facct_type","fapp_id","fapp_sub_id","fserial_no","fwater_type","ftran_type","fio_flag","fstatus","fex_uin","fex_acct_type","ftran_amt","fbalance","foverdraft","fout_credit","fin_credit","ftran_time","fchg_time","fapp_name","fapp_ip","fuser_ip","fchn_biz","fchn_voucher","fchn_type","fchn_sub_type","fchn_serialno","fcur_code","fbiz_code","fcancel_serialno","fpeer_acct","fuser_id","ftran_info","ftran_remark")

    //read from parquet
    val infodf = sparkSession.read.parquet("hdfs://yun0:9000/test/info_test")
    val waterdf = sparkSession.read.parquet("hdfs://yun0:9000/test/water_test")
    infodf.createOrReplaceTempView("qpoint_info")
    waterdf.createTempView("qpoint_water")
    var qpoint_info_table : String = "qpoint_info"
    var qpoint_water_table : String = "qpoint_water"

    var qpoint_sql : String = "SELECT * FROM ( SELECT t1.statis_date AS statis_date ,t1.fuin AS acct_id ,t1.facct_type AS acct_type ,'qqpoint' AS channel ,'' AS subchannel ,'info' AS datatype ," +
      "CASE WHEN t2.fbalance IS NULL THEN - CAST(t1.fbalance AS int) ELSE t2.fbalance - CAST(t1.fbalance AS int) END AS amt ,t1.fopen_time AS open_time ,t1.fchg_time AS chg_time ,t1.flast_tran_time AS last_tran_time ,t1.fstatus AS status ,'' as remark " +
      "FROM ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 AND facct_type = '3' ) t1 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 AND facct_type = '3' ) t2 ON ( t1.fuin = t2.fuin AND t1.facct_type = t2.facct_type ) " +
      "WHERE CASE WHEN t2.fbalance IS NULL THEN - CAST(t1.fbalance AS int) ELSE t2.fbalance - CAST(t1.fbalance AS int) END != 0 " +
      "UNION ALL SELECT 20170813 AS statis_date ,NVL(t3.fuin, t4.fuin) AS facct_id ,NVL(t3.facct_type, t4.facct_type) AS facct_type ,'qqpoint' ,'' ,'water' ," +
      "CASE WHEN t3.out_amt IS NULL THEN - t4.in_amt WHEN t4.in_amt IS NULL THEN t3.out_amt ELSE t3.out_amt - t4.in_amt END AS amt ,t3.out_amt ,t4.in_amt ,'' ," +
      "CASE WHEN out_num IS null THEN in_num WHEN in_num IS null THEN out_num ELSE out_num + in_num END ,'' " +
      "FROM ( SELECT fuin ,facct_type ,SUM(ftran_amt) AS out_amt ,COUNT(ftran_amt) AS out_num FROM ( SELECT ta3.* ,ta4.fchg_time AS beg_time " +
      "FROM ( SELECT ta1.* ,ta2.fchg_time AS end_time FROM ( SELECT * FROM "+qpoint_water_table+" WHERE statis_day = 20170813 AND facct_type = '3' ) ta1 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta2 ON ( ta1.fuin = ta2.fuin AND ta1.facct_type = ta2.facct_type ) ) ta3 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta4 ON ( ta3.fuin = ta4.fuin AND ta3.facct_type = ta4.facct_type ) ) " +
      "WHERE fio_flag = 1 AND end_time IS NOT NULL AND ftran_type <> 101 AND ftran_type <> 102 AND UNIX_TIMESTAMP(end_time) >= UNIX_TIMESTAMP(fchg_time) " +
      "AND ( beg_time IS null OR UNIX_TIMESTAMP(beg_time) < UNIX_TIMESTAMP(fchg_time) ) GROUP BY fuin ,facct_type ) t3 FULL JOIN ( SELECT fuin ,facct_type ,SUM(ftran_amt) AS in_amt ," +
      "COUNT(ftran_amt) AS in_num FROM ( SELECT ta3.* ,ta4.fchg_time AS beg_time FROM " +
      "( SELECT ta1.* ,ta2.fchg_time AS end_time FROM ( SELECT * FROM "+qpoint_water_table+" WHERE statis_day = 20170813 AND facct_type = '3' ) ta1 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta2 ON ( ta1.fuin = ta2.fuin AND ta1.facct_type = ta2.facct_type ) ) ta3 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta4 ON ( ta3.fuin = ta4.fuin AND ta3.facct_type = ta4.facct_type ) ) " +
      "WHERE fio_flag = 2 AND end_time IS NOT NULL AND ftran_type <> 101 AND ftran_type <> 102 AND UNIX_TIMESTAMP(end_time) >= UNIX_TIMESTAMP(fchg_time) AND ( beg_time IS null OR UNIX_TIMESTAMP(beg_time) < UNIX_TIMESTAMP(fchg_time) ) " +
      "GROUP BY fuin ,facct_type ) t4 ON ( t3.fuin = t4.fuin AND t3.facct_type = t4.facct_type ) " +
      "WHERE CASE WHEN t3.out_amt IS NULL THEN - t4.in_amt WHEN t4.in_amt IS NULL THEN t3.out_amt ELSE t3.out_amt - t4.in_amt END != 0 )"
    var qpoint_result = sparkSession.sql(qpoint_sql)
    qpoint_result.show()
//    qpoint_result.persist()
  }

  /**
    * 通过hive访问hdfs里的数据源
    * @return
    */
  def queryFromHdfsByHive() : Unit = {
    //仓库地址
    val warehouseLocation = new File("hdfs://yun0:9000/hive/warehouse/").getAbsolutePath
    //创建查询上下文
    val sparkSession = SparkSession
      .builder()
      .appName("BalanceQuery_HIVE")
      .config("spark.sql.warehouse.dir", warehouseLocation)
      .enableHiveSupport()
      .getOrCreate()
    var qpoint_info_table : String = "tendb.t_int_qpoint_acct_info"
    var qpoint_water_table : String = "tendb.t_int_qpoint_acct_water"
    var qpoint_sql : String = "SELECT * FROM ( SELECT t1.statis_date AS statis_date ,t1.fuin AS acct_id ,t1.facct_type AS acct_type ,'qqpoint' AS channel ,'' AS subchannel ,'info' AS datatype ," +
      "CASE WHEN t2.fbalance IS NULL THEN - CAST(t1.fbalance AS int) ELSE t2.fbalance - CAST(t1.fbalance AS int) END AS amt ,t1.fopen_time AS open_time ,t1.fchg_time AS chg_time ,t1.flast_tran_time AS last_tran_time ,t1.fstatus AS status ,'' as remark " +
      "FROM ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 AND facct_type = '3' ) t1 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 AND facct_type = '3' ) t2 ON ( t1.fuin = t2.fuin AND t1.facct_type = t2.facct_type ) " +
      "WHERE CASE WHEN t2.fbalance IS NULL THEN - CAST(t1.fbalance AS int) ELSE t2.fbalance - CAST(t1.fbalance AS int) END != 0 " +
      "UNION ALL SELECT 20170813 AS statis_date ,NVL(t3.fuin, t4.fuin) AS facct_id ,NVL(t3.facct_type, t4.facct_type) AS facct_type ,'qqpoint' ,'' ,'water' ," +
      "CASE WHEN t3.out_amt IS NULL THEN - t4.in_amt WHEN t4.in_amt IS NULL THEN t3.out_amt ELSE t3.out_amt - t4.in_amt END AS amt ,t3.out_amt ,t4.in_amt ,'' ," +
      "CASE WHEN out_num IS null THEN in_num WHEN in_num IS null THEN out_num ELSE out_num + in_num END ,'' " +
      "FROM ( SELECT fuin ,facct_type ,SUM(ftran_amt) AS out_amt ,COUNT(ftran_amt) AS out_num FROM ( SELECT ta3.* ,ta4.fchg_time AS beg_time " +
      "FROM ( SELECT ta1.* ,ta2.fchg_time AS end_time FROM ( SELECT * FROM "+qpoint_water_table+" WHERE statis_day = 20170813 AND facct_type = '3' ) ta1 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta2 ON ( ta1.fuin = ta2.fuin AND ta1.facct_type = ta2.facct_type ) ) ta3 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta4 ON ( ta3.fuin = ta4.fuin AND ta3.facct_type = ta4.facct_type ) ) " +
      "WHERE fio_flag = 1 AND end_time IS NOT NULL AND ftran_type <> 101 AND ftran_type <> 102 AND UNIX_TIMESTAMP(end_time) >= UNIX_TIMESTAMP(fchg_time) " +
      "AND ( beg_time IS null OR UNIX_TIMESTAMP(beg_time) < UNIX_TIMESTAMP(fchg_time) ) GROUP BY fuin ,facct_type ) t3 FULL JOIN ( SELECT fuin ,facct_type ,SUM(ftran_amt) AS in_amt ," +
      "COUNT(ftran_amt) AS in_num FROM ( SELECT ta3.* ,ta4.fchg_time AS beg_time FROM " +
      "( SELECT ta1.* ,ta2.fchg_time AS end_time FROM ( SELECT * FROM "+qpoint_water_table+" WHERE statis_day = 20170813 AND facct_type = '3' ) ta1 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta2 ON ( ta1.fuin = ta2.fuin AND ta1.facct_type = ta2.facct_type ) ) ta3 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta4 ON ( ta3.fuin = ta4.fuin AND ta3.facct_type = ta4.facct_type ) ) " +
      "WHERE fio_flag = 2 AND end_time IS NOT NULL AND ftran_type <> 101 AND ftran_type <> 102 AND UNIX_TIMESTAMP(end_time) >= UNIX_TIMESTAMP(fchg_time) AND ( beg_time IS null OR UNIX_TIMESTAMP(beg_time) < UNIX_TIMESTAMP(fchg_time) ) " +
      "GROUP BY fuin ,facct_type ) t4 ON ( t3.fuin = t4.fuin AND t3.facct_type = t4.facct_type ) " +
      "WHERE CASE WHEN t3.out_amt IS NULL THEN - t4.in_amt WHEN t4.in_amt IS NULL THEN t3.out_amt ELSE t3.out_amt - t4.in_amt END != 0 )"
    sparkSession.sql(qpoint_sql).show()
  }

  /**
    * 通过访问分布式缓冲区里的数据源
    * @return
    */
  def queryFromAlluxio() : Unit = {

    val sparkSession = SparkSession
      .builder()
      .appName("BanlanceQuery_Alluxio")
      .getOrCreate()



    // if not to df and point the columnName it will be _c6 ...
    //read from csv through alluxio
    val infodf = sparkSession.read.csv("alluxio://yun0:19998/tentest/info_uniacct_day_20170815_100.csv").toDF()
    infodf.createOrReplaceTempView("testTable")
    var testSql : String = "select * from testTable limit 10"
    var testSql2 : String ="select count(*) from testTable"
    sparkSession.sql(testSql2).show()
  }





  /**
    * 通过jdbc访问数据源
    */
  def queryFromJDBC() : Unit = {

    val spark = SparkSession
      .builder()
      .appName("BalanceQuery_JDBC")
      .getOrCreate()
    val infodf = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://202.112.113.68:28801/tenms")
      .option("dbtable", "qpoint_info")
      .option("driver","com.mysql.jdbc.Driver")
      .option("user", "tdsql")
      .option("password", "RqN6gy&wjk1SKD3OD")
      .load()

    val waterdf = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://202.112.113.68:28801/tenms")
      .option("dbtable", "qpoint_water")
      .option("driver","com.mysql.jdbc.Driver")
      .option("user", "tdsql")
      .option("password", "RqN6gy&wjk1SKD3OD")
      .load()

    infodf.printSchema()
    waterdf.printSchema()

    infodf.createOrReplaceTempView("qpoint_info")
    waterdf.createTempView("qpoint_water")

    var qpoint_info_table : String = "qpoint_info"
    var qpoint_water_table : String = "qpoint_water"
    var sqlTest : String = "select * from "+qpoint_info_table+" limit 10"
    var sqlTest2 : String = "select * from "+qpoint_water_table +" limit 10"
    var qpoint_sql : String = "SELECT * FROM ( SELECT t1.statis_date AS statis_date ,t1.fuin AS acct_id ,t1.facct_type AS acct_type ,'qqpoint' AS channel ,'' AS subchannel ,'info' AS datatype ," +
      "CASE WHEN t2.fbalance IS NULL THEN - CAST(t1.fbalance AS int) ELSE t2.fbalance - CAST(t1.fbalance AS int) END AS amt ,t1.fopen_time AS open_time ,t1.fchg_time AS chg_time ,t1.flast_tran_time AS last_tran_time ,t1.fstatus AS status ,'' as remark " +
      "FROM ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 AND facct_type = '3' ) t1 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 AND facct_type = '3' ) t2 ON ( t1.fuin = t2.fuin AND t1.facct_type = t2.facct_type ) " +
      "WHERE CASE WHEN t2.fbalance IS NULL THEN - CAST(t1.fbalance AS int) ELSE t2.fbalance - CAST(t1.fbalance AS int) END != 0 " +
      "UNION ALL SELECT 20170813 AS statis_date ,NVL(t3.fuin, t4.fuin) AS facct_id ,NVL(t3.facct_type, t4.facct_type) AS facct_type ,'qqpoint' ,'' ,'water' ," +
      "CASE WHEN t3.out_amt IS NULL THEN - t4.in_amt WHEN t4.in_amt IS NULL THEN t3.out_amt ELSE t3.out_amt - t4.in_amt END AS amt ,CAST(t3.out_amt AS STRING) ,CAST(t4.in_amt AS STRING) ,'' ," +
      "CASE WHEN out_num IS null THEN in_num WHEN in_num IS null THEN out_num ELSE out_num + in_num END ,'' " +
      "FROM ( SELECT fuin ,facct_type ,SUM(ftran_amt) AS out_amt ,COUNT(ftran_amt) AS out_num FROM ( SELECT ta3.* ,ta4.fchg_time AS beg_time " +
      "FROM ( SELECT ta1.* ,ta2.fchg_time AS end_time FROM ( SELECT * FROM "+qpoint_water_table+" WHERE statis_day = 20170813 AND facct_type = '3' ) ta1 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta2 ON ( ta1.fuin = ta2.fuin AND ta1.facct_type = ta2.facct_type ) ) ta3 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta4 ON ( ta3.fuin = ta4.fuin AND ta3.facct_type = ta4.facct_type ) ) " +
      "WHERE fio_flag = 1 AND end_time IS NOT NULL AND ftran_type <> 101 AND ftran_type <> 102 AND UNIX_TIMESTAMP(end_time) >= UNIX_TIMESTAMP(fchg_time) " +
      "AND ( beg_time IS null OR UNIX_TIMESTAMP(beg_time) < UNIX_TIMESTAMP(fchg_time) ) GROUP BY fuin ,facct_type ) t3 FULL JOIN ( SELECT fuin ,facct_type ,SUM(ftran_amt) AS in_amt ," +
      "COUNT(ftran_amt) AS in_num FROM ( SELECT ta3.* ,ta4.fchg_time AS beg_time FROM " +
      "( SELECT ta1.* ,ta2.fchg_time AS end_time FROM ( SELECT * FROM "+qpoint_water_table+" WHERE statis_day = 20170813 AND facct_type = '3' ) ta1 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta2 ON ( ta1.fuin = ta2.fuin AND ta1.facct_type = ta2.facct_type ) ) ta3 " +
      "LEFT JOIN ( SELECT * FROM "+qpoint_info_table+" WHERE statis_date = 20170813 ) ta4 ON ( ta3.fuin = ta4.fuin AND ta3.facct_type = ta4.facct_type ) ) " +
      "WHERE fio_flag = 2 AND end_time IS NOT NULL AND ftran_type <> 101 AND ftran_type <> 102 AND UNIX_TIMESTAMP(end_time) >= UNIX_TIMESTAMP(fchg_time) AND ( beg_time IS null OR UNIX_TIMESTAMP(beg_time) < UNIX_TIMESTAMP(fchg_time) ) " +
      "GROUP BY fuin ,facct_type ) t4 ON ( t3.fuin = t4.fuin AND t3.facct_type = t4.facct_type ) " +
      "WHERE CASE WHEN t3.out_amt IS NULL THEN - t4.in_amt WHEN t4.in_amt IS NULL THEN t3.out_amt ELSE t3.out_amt - t4.in_amt END != 0 )"
    spark.sql(qpoint_sql).show()
//    spark.sql(sqlTest).show()
//    spark.sql(sqlTest2).show()
//    jdbcDF.show()
//    jdbcDF.printSchema()
//    jdbcDF.select("uname","extra").filter("usex = 1").show()
  }

}
