package com.hngy.java.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * 需求：计算TopN主播
 * 1：直接使用sparkSession中的load方式加载json数据
 * 2：对这两份数据注册临时表
 * 3：执行sql计算TopN主播
 * 4：使用foreach将结果打印到控制台
 */
public class TopNAnchorJava {

    public static void main(String[] args){
        SparkConf conf = new SparkConf();
        conf.setMaster("local");

        //创建SparkSession对象，里面包含SparkContext和SqlContext
        SparkSession sparkSession = SparkSession.builder()
                .appName("SqlDemoJava")
                .config(conf)
                .getOrCreate();
        //JavaSparkContext sc = JavaSparkContext.fromSparkContext(sparkSession.sparkContext());

        //1：直接使用sparkSession中的load方式加载json数据
        Dataset<Row> videoInfoDf = sparkSession.read().format("json").load("F:\\BaiduNetdiskDownload\\hadoop\\source\\bigdata_course_materials\\spark2\\video_info.log");
        Dataset<Row> giftRecordDf = sparkSession.read().format("json").load("F:\\BaiduNetdiskDownload\\hadoop\\source\\bigdata_course_materials\\spark2\\gift_record.log");

        //2：对这两份数据注册临时表
        videoInfoDf.createOrReplaceTempView("video_info");
        giftRecordDf.createOrReplaceTempView("gift_record");

        //3：执行sql计算TopN主播
        String sqlVideo = "select uid,vid,area from video_info";
        String sqlGift = "select vid,sum(gold) as gold_sum from gift_record group by vid";
        //
        String sqlBase = "select a.uid,a.vid,a.area,b.gold_sum from (" + sqlVideo + " ) as a " + "join (" + sqlGift + ") as b ON a.vid = b.vid";
        String sqlT1 = "select t1.uid,max(t1.area) as area,sum(t1.gold_sum) as gold_sum_all from (" + sqlBase + ") as t1 group by t1.uid";
        String sqlT2 = "select t2.uid,t2.area,t2.gold_sum_all,row_number() over (partition by area order by gold_sum_all desc) as num from (" + sqlT1 + ") t2";
        String sqlT3 = "select t3.area,concat(t3.uid,':',cast(t3.gold_sum_all as int)) as topn from (" + sqlT2 + ") t3 where t3.num <=3";
        String sqlT4 = "select t4.area,concat_ws(',',collect_list(t4.topn)) as topn_list from(" + sqlT3 + ") as t4 group by t4.area";

        Dataset<Row> resDf = sparkSession.sql(sqlT4);
        //4：使用foreach将结果打印到控制台
        resDf.javaRDD().foreach(new VoidFunction<Row>() {
            @Override
            public void call(Row row) throws Exception {
                System.out.println(row.getAs("area").toString()+"\t"+row.getAs("topn_list").toString());
            }
        });

        sparkSession.stop();
    }
}
