package com.apache.flink.udf

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.annotation.{DataTypeHint, FunctionHint}
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.table.functions.{FunctionContext, ScalarFunction}
import org.apache.flink.types.Row
// 开发时要做的第一件事情
import org.apache.flink.api.scala._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.api._
/**
 * IP解析，输入：ip  输出：省份-城市
 **/
object IPParserFunctions {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    // ip
    val stream = env.socketTextStream("hadoop000", 9527)
    val tableEnvironment = StreamTableEnvironment.create(env)
    tableEnvironment.createTemporaryView("access",stream, 'ip)

    tableEnvironment.createTemporaryFunction("ip_parser", new IPParse)
    tableEnvironment.sqlQuery("select ip, ip_parser(ip) from access")
      .toAppendStream[Row].print("sql")

    env.execute(getClass.getCanonicalName)
  }
}

@FunctionHint(
  input = Array(new DataTypeHint("STRING")),
  output = new DataTypeHint("STRING")
)
class IPParse extends ScalarFunction {
//  var ipUtils:IPUtil = null

  override def open(context: FunctionContext): Unit = {
//    ipUtils = IPUtil.getInstance()
  }

  override def close(): Unit = {
//    if(null != ipUtils) {
//      ipUtils = null
//    }
  }

  def eval(ip:String) = {
    val ipInfos = "ipUtils.getInfos(ip)"
    ipInfos(1) + "-" + ipInfos(2)
  }

//  override def getTypeInference(typeFactory: DataTypeFactory): TypeInference = {
//    super.getTypeInference(typeFactory)
//  }
//  override def getResultType(signature: Array[Class[_]]): TypeInformation[_] = {
//    Types.STRING()
//  }
}