package cn.spark.study.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;

/**
 * Hive数据源
 *
 * @author jun.zhang6
 * @date 2020/11/15
 */
public class HiveDataSource {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setAppName("HiveDataSource");
        JavaSparkContext sc = new JavaSparkContext(conf);

        //创建HiveContext
        HiveContext hiveContext = new HiveContext(sc.sc());

        //判断是否存在students_info表，如果存在则删除
        hiveContext.sql("DROP TABLE IF EXIST student_infos");

        //判断students_info表是否不存在，如果不存在，则创建该表
        hiveContext.sql("CREATE TABLE IF NOT EXISTS student_infos (name STRING,age INT)");

        //将学生表基本信息导入student_infos
        hiveContext.sql("LOAD DATA LOCAL INPATH '/usr/local/spark-study/resources/student_infos.txt' INTO TABLE student_infos");

        // 用同样的方式给student_scores导入数据
        hiveContext.sql("DROP TABLE IF EXISTS student_scores");
        hiveContext.sql("CREATE TABLE IF NOT EXISTS student_scores (name STRING, score INT)");
        hiveContext.sql("LOAD DATA LOCAL INPATH '/usr/local/spark-study/resources/student_scores.txt' INTO TABLE student_scores");

        //执行sql查询，关联两张表，查询成绩大于80分的学生
        DataFrame goodStudentsDF = hiveContext.sql("SELECT si.name, si.age, ss.score "
                + "FROM student_infos si "
                + "JOIN student_scores ss ON si.name=ss.name "
                + "WHERE ss.score>=80");

        //将DataFrame中的数据，保存到Hive表中
        hiveContext.sql("DROP TABLE IF EXISTS good_student_infos");
        //saveAsTable会物化表，服务器重启表还存在，不像registerTempTable只是创建临时表，重启丢失
        goodStudentsDF.saveAsTable("good_student_infos");

        //针对Hive表直接创建DataFrame
        DataFrame goodStudentRowsDF = hiveContext.table("good_student_infos");

        Row[] rows = goodStudentRowsDF.collect();

        for (Row row : rows) {
            System.out.println(row);
        }

        sc.close();
    }
}
