package com.desheng.bigdata.flink.table

import java.text.SimpleDateFormat

import com.alibaba.fastjson.JSON
import org.apache.flink.api.scala.{DataSet, ExecutionEnvironment, _}
import org.apache.flink.table.api.scala.BatchTableEnvironment
import org.apache.flink.table.functions.ScalarFunction
import org.apache.flink.types.Row
/**
  * Flink table的udf操作
  * 用户浏览的数据
  * {"userID": 2, "eventTime": "2020-10-01 10:02:00", "eventType": "browse", "productID": "product_5", "productPrice": 20.99}
  *
  * 现在需要将eventTime转化为时间戳格式
  *     2020-10-01 10:02:00 ---》 1604645927877
  */
object _07FlinkTable2UDFOps {
    def main(args: Array[String]): Unit = {
        //创建批对应的Env
        val batchEnv = ExecutionEnvironment.getExecutionEnvironment
        //基于批Env构建BatchTableEnv
        val tblEnv = BatchTableEnvironment.create(batchEnv)

        val userDataSet: DataSet[User] = batchEnv.readTextFile("file:/E:/data/flink/flink-udf.json")
                .map(line => JSON.parseObject(line, classOf[User]))

        val userTbl = tblEnv.fromDataSet(userDataSet)
        tblEnv.registerTable("userTbl", userTbl)
        //注册用户自定义函数
        tblEnv.registerFunction("date2TimeStamp", new MyDate2TimeStampFunction())

        val table = tblEnv.sqlQuery(
                """
                  |select
                  |  userID,
                  |  eventTime,
                  |  date2TimeStamp(eventTime) timestamps
                  |from userTbl
                """.stripMargin)
        tblEnv.toDataSet[Row](table).print()
    }
}
/*
    自定义udf，实现业务逻辑
    用户需要自己编写函数eval，在这里面实现udf的业务
    该方法可重载
    比较类似于Hive的udf操作
    does not implement at least one method named 'eval' which is public, not abstract and (in case of table functions) not static
 */
class MyDate2TimeStampFunction extends ScalarFunction {

    val df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    def eval(eventTime: String): Long = {
        df.parse(eventTime).getTime
    }

}