package com.demo.udf;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.*;

import java.util.Arrays;
import java.util.List;

public class UdfMain {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder()
                .master("local[*]")
                .getOrCreate();

        SparkIpUDF sparkIpUDF = new SparkIpUDF(spark);

        // 注册普通的UDF函数
        sparkIpUDF.registerCommon();

        // 加载IP地址库的数据
        Dataset<Row> ipLibDF = spark.read().format("json").load("data/ip/ip_lib_thin");

        // 将IP地址库的数据转换为数组
        Dataset<Row> ipLibSelected = ipLibDF.selectExpr(
                        "ipv4_full(start_ip) as start_ip",
                        "ipv4_full(end_ip) as end_ip",
                        "area_code"
                ).repartition(1)
                .orderBy("start_ip");

        // 转换为 (String, String, String) 的数组，分别表示：开始ip、结束ip、区域编码
        List<Row> ipLibList = ipLibSelected.collectAsList();
        String[][] ipLibArr = new String[ipLibList.size()][3];
        for (int i = 0; i < ipLibList.size(); i++) {
            Row row = ipLibList.get(i);
            ipLibArr[i][0] = row.getString(0);
            ipLibArr[i][1] = row.getString(1);
            ipLibArr[i][2] = row.getString(2);
        }

        JavaSparkContext sparkContext = new org.apache.spark.api.java.JavaSparkContext(spark.sparkContext());

        // 广播变量。 一定要将从文件里面获取的数据广播出去，才能在UDF函数中使用。否则在每个task里面都拷贝一份数组，性能非常差
        Broadcast<String[][]> ipLibArrBroadcast = sparkContext.broadcast(ipLibArr);

        // 注册IP地址库的UDF函数
        sparkIpUDF.registerIpSearch(ipLibArrBroadcast);

        // 构造测试数据
        List<String> mockData = Arrays.asList("27.186.0.2", "159.226.222.3");
        // 转换为DataFrame，并注册为表test
        JavaRDD<String> rdd = sparkContext.parallelize(mockData);
        Dataset<Row> df = spark.createDataset(rdd.rdd(), Encoders.STRING()).toDF("source_ip");
        df.createOrReplaceTempView("test");

        Dataset<Row> newDF = spark.sql(
                "select source_ip, " +
                        "ipv4_to_num(source_ip) as source_ip_num, " +
                        "ipv4_full(source_ip) as source_ip_full, " +
                        "ip_search(source_ip) as ip_area " +
                        "from test"
        );

        newDF.show();
    }
}
