package sparksql

import java.sql.{Connection, DriverManager, PreparedStatement}
import java.text.SimpleDateFormat
import java.util.Date

import kafka.TestStreamingKafkaMysql.intoMysql
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.sql.{DataFrame, ForeachWriter, Row, SaveMode, SparkSession}

object TestSparkSqlHbasemysql {

  /**
    * sql读取hbase中的数据
    * @param hbaseConf
    * @param spark
    */
  def getHbase( hbaseConf: Configuration,hbaseConf1: Configuration,hbaseConf2: Configuration,spark: SparkSession): DataFrame ={

    // 设定获取的表名
//    hbaseConf.set(TableInputFormat.INPUT_TABLE,"stu")
//    hbaseConf.set(TableInputFormat.SCAN,"")
//    hbaseConf.set(TableInputFormat.SCAN_ROW_START,"")
//    hbaseConf.set(TableInputFormat.SCAN_ROW_STOP,"")

    // 读取hbase中的数据为成rdd
    val hbaseRDD =  spark.sparkContext.newAPIHadoopRDD(hbaseConf,
      classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result]
      )
    val hbaseRDD1 =  spark.sparkContext.newAPIHadoopRDD(hbaseConf1,
      classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result]
    )
    val hbaseRDD2 =  spark.sparkContext.newAPIHadoopRDD(hbaseConf2,
      classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result]
    )

    // 转换成RDD[Row]
    // Bytes.toBytes()源码仍然是调用了getBytes()，但是将转换完成的结果编码设为UTF-8,而getBytes将会使用默认编码
    val tupRDD = hbaseRDD.map({
      case (_, result) => {
        val id = Bytes.toString(result.getRow)
        val name = Bytes.toString(result.getValue(Bytes.toBytes("s1"),Bytes.toBytes("name")))
        val sex = Bytes.toString(result.getValue(Bytes.toBytes("s1"),Bytes.toBytes("sex")))
        val sclass = Bytes.toString(result.getValue(Bytes.toBytes("s1"),Bytes.toBytes("class")))
        // 转换成RDD[Row]
        (id,name,sex,sclass)
      }
    })
    val tupRDD1 = hbaseRDD1.map({
      case (_, result) => {
        val cid = Bytes.toString(result.getRow)
        val cname = Bytes.toString(result.getValue(Bytes.toBytes("c1"),Bytes.toBytes("cname")))
        // 转换成RDD[Row]
        (cid,cname)
      }
    })
    val tupRDD2 = hbaseRDD2.map({
      case (_, result) => {
        val id = Bytes.toString(result.getRow)
        val cid = Bytes.toString(result.getValue(Bytes.toBytes("sc1"),Bytes.toBytes("cid")))
        val score = Bytes.toString(result.getValue(Bytes.toBytes("sc1"),Bytes.toBytes("score")))
        // 转换成RDD[Row]
        (id,cid,score)
      }
    })

    // rdd转dataframe
    import spark.implicits._
    val df = tupRDD.toDF("id","name","sex","class")
    val df1 = tupRDD1.toDF("cid","cname")
    val df2 = tupRDD2.toDF("id","cid","score")

    //建立临时表
    df.createOrReplaceTempView("student")
    df1.createOrReplaceTempView("course")
    df2.createOrReplaceTempView("scourse")

    //（执行缓存，加快sql速度）
    spark.table("student").cache()
    spark.table("course").cache()
    spark.table("scourse").cache()

    System.err.println("完成临时表生成",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
    //执行SQl

//    val res2 = spark.sql("select * from scourse")
//    res2.show()
//    System.err.println("time2",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
//
//    val res1 = spark.sql("select * from student")
//    res1.show()
//    System.err.println("time1",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
//
//
//
//    val res3 = spark.sql("select avg(score) from student,course,scourse where student.id=scourse.id and scourse.cid=course.cid")
//    res3.show()
//    System.err.println("time3",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
//
//    val res4 = spark.sql("select * from student left join scourse on student.id=scourse.id")
//    res4.show()
//    System.err.println("time4",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))

    //val res5 = spark.sql("select cid,avg(score) as avg_score from scourse group by cid order by avg(score) desc")
    //res5.show()
    val res5 = spark.sql("select student.id as id, name, class, cname, score from student,course,scourse where student.id=scourse.id and scourse.cid=course.cid and scourse.cid = '2' and score > '80' order by score desc")
    System.err.println("完成SQL",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))

    res5
  }

  /**
    * 数据处理
    * @param df
    */
  def saveData(df:DataFrame): Unit ={
    val prop = new java.util.Properties
    System.err.println("time2",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))

    prop.setProperty("user","root")
    //prop.setProperty("password","123456")
    prop.setProperty("password","huake@2021")
    prop.setProperty("driver","com.mysql.jdbc.Driver")

    df.write.mode(SaveMode.Append).jdbc("jdbc:mysql://dw-Huake05:3306/testspark?characterEncoding=UTF-8","testmysql",prop)
    //df.write.mode(SaveMode.Append).jdbc("jdbc:mysql://192.168.174.1:3306/testhbase?characterEncoding=UTF-8","test1",prop)

    System.err.println("完成插入:",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
  }

  def main(args: Array[String]): Unit = {
    System.err.println("程序启动:",new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date))
    //新建任务会话
    val spark = SparkSession
      .builder()
      .appName("TestSparkSqlHbase")
      .getOrCreate()

    //Hbase环境
    val hbaseConf = HBaseConfiguration.create()
    hbaseConf.set("hbase.zookeeper.property.clientPort","2181")
    hbaseConf.set("hbase.zookeeper.quorum","dw-Huake05")
    hbaseConf.set("hbase.master","dw-Huake05:60010")
    hbaseConf.set(TableInputFormat.INPUT_TABLE,"student")
    //Hbase环境
    val hbaseConf1 = HBaseConfiguration.create()
    hbaseConf1.set("hbase.zookeeper.property.clientPort","2181")
    hbaseConf1.set("hbase.zookeeper.quorum","dw-Huake05")
    hbaseConf1.set("hbase.master","dw-Huake05:60010")
    hbaseConf1.set(TableInputFormat.INPUT_TABLE,"course")

    val hbaseConf2= HBaseConfiguration.create()
    hbaseConf2.set("hbase.zookeeper.property.clientPort","2181")
    hbaseConf2.set("hbase.zookeeper.quorum","dw-Huake05")
    hbaseConf2.set("hbase.master","dw-Huake05:60010")
    hbaseConf2.set(TableInputFormat.INPUT_TABLE,"scourse")

//    //Hbase环境
//    val hbaseConf = HBaseConfiguration.create()
//    hbaseConf.set("hbase.zookeeper.property.clientPort","2181")
//    hbaseConf.set("hbase.zookeeper.quorum","master")
//    hbaseConf.set("hbase.master","master:60010")
//    hbaseConf.set(TableInputFormat.INPUT_TABLE,"student")
//    //Hbase环境
//    val hbaseConf1 = HBaseConfiguration.create()
//    hbaseConf1.set("hbase.zookeeper.property.clientPort","2181")
//    hbaseConf1.set("hbase.zookeeper.quorum","master")
//    hbaseConf1.set("hbase.master","master:60010")
//    hbaseConf1.set(TableInputFormat.INPUT_TABLE,"course")
//
//    val hbaseConf2= HBaseConfiguration.create()
//    hbaseConf2.set("hbase.zookeeper.property.clientPort","2181")
//    hbaseConf2.set("hbase.zookeeper.quorum","master")
//    hbaseConf2.set("hbase.master","master:60010")
//    hbaseConf2.set(TableInputFormat.INPUT_TABLE,"scourse")

    // Sql获取Hbase表信息
    val df = getHbase(hbaseConf,hbaseConf1,hbaseConf2,spark)
    // 打印数据
    df.show()
    // 结果转存
    saveData(df)
    //结束会话
    spark.stop()
  }

}
