package cn.spark.study.sql

import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.types.StringType

object UDF {
  def main(args: Array[String]): Unit = {
    val sc = new ContextUtil().getContextL("UDF")
    val sqlC = new SQLContext(sc)
    udaf(sc,sqlC)
  }
  
  def udaf(sc:SparkContext,sqlC:SQLContext){
    val names = Array("Leo","Marray","Jack","Tom","Tom","Tom","Tom","Tom","Tom","Jack","Jack","Jack")
    val namesRDD = sc.parallelize(names, 2).map { Row(_) }
    
    val structType = StructType(Array(StructField("name",StringType,true)))
    
    val namesDF = sqlC.createDataFrame(namesRDD, structType);
    namesDF.registerTempTable("names")
    
    //自定义函数
    sqlC.udf.register("strCount", new StringCount)
    sqlC.sql("select name ,strCount(name) from names group by name").collect().foreach { println }
  }
  
  
  def udf(sc:SparkContext,sqlC:SQLContext):Unit={
    val names = Array("Leo","Marray","Jack","Tom")
    val namesRDD = sc.parallelize(names, 2).map { Row(_) }
    
    val structType = StructType(Array(StructField("name",StringType,true)))
    
    val namesDF = sqlC.createDataFrame(namesRDD, structType);
    namesDF.registerTempTable("names")
    //自定义函数
    sqlC.udf.register("StrLen", (str:String) => str.length())
    sqlC.sql("select name ,StrLen(name) from names").collect().foreach { println }
    
  }
}