package com.study.flink.source

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.source.{RichSourceFunction, SourceFunction}
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.hadoop.hbase.client.{Connection, Scan, Table}

/**
  * HBase Source Demo
  *
  * @author stephen
  * @date 2019-07-23 18:37
  */
object ScalaHBaseSourceDemo {

  def main(args: Array[String]): Unit = {
    // 1 获取执行环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    // 2 获取输入数据
    import org.apache.flink.api.scala._
    val dataStream = env.socketTextStream("localhost", 9999)

    // 3 Transformation
    val resultStream = dataStream.flatMap(_.split(" "))
      .map((_, 1))
      .keyBy(_._1)
      .timeWindow(Time.seconds(10))
      .reduce((x, y) => (x._1, x._2 + y._2))

    // 4 输出
    resultStream.print()

    // 5 启动任务
    env.execute("Demo")
  }

}

case class Student(name: String, age: Int, sex: String, sid: String)

class HBaseSource extends RichSourceFunction[Student] {
  // 参考：https://blog.csdn.net/javajxz008/article/details/83269108
  var conn: Connection = _
  var table: Table = _
  var scan: Scan = _
  override def open(parameters: Configuration): Unit = {
    super.open(parameters)


  }

  override def run(ctx: SourceFunction.SourceContext[Student]): Unit = {

  }



  override def cancel(): Unit = {
    if(table != null){
      table.close()
    }

    if(conn != null){
      conn.close()
    }
  }
}