package com.apache.flink.udf

import com.apache.flink.bean.Domain.Access
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.table.functions.ScalarFunction
import org.apache.flink.types.Row
// 开发时要做的第一件事情
import org.apache.flink.api.scala._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.api._
/**
 * @author PK哥
 **/
object ScalarFunctions {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val tableEnvironment = StreamTableEnvironment.create(env)
    val stream = env.readTextFile("data/access.log")
      .map(x => {
        val splits = x.split(",")
        Access(splits(0).trim.toLong, splits(1).trim, splits(2).trim.toDouble)
      })
    val accessTable: Table = tableEnvironment.fromDataStream(stream)
//    accessTable.toAppendStream[Row].print()

    val hashcode = new HashFunction
    accessTable.select('domain, 'traffic, hashcode('domain))
      .toAppendStream[Row].print("API")

    tableEnvironment.createTemporaryView("access", accessTable)
    tableEnvironment.createTemporaryFunction("hashcode2", hashcode)
    tableEnvironment.sqlQuery("select domain, traffic, hashcode2(domain) from access")
      .toAppendStream[Row].print("sql")

    env.execute(getClass.getCanonicalName)
  }

}


class HashFunction extends ScalarFunction {
  def eval(input:String): Int = {
     input.hashCode()
  }
}