package com.spark.zhou.demo.sparksql.sqlcontext;

import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SparkSession;

/**
 * @Description: orc数据文件测试
 * @Author: ZhOu
 * @Date: 2018/5/31
 */
public class OrcData {
    private static final String ORC_PATH = OrcData.class.getResource("/").getPath() + "orc_file";

    public static void main(String[] args) {
        SparkSession sparkSession = SparkSession.builder()
                .appName("ORC_DATA")
                .enableHiveSupport()
                .getOrCreate();
        JavaSparkContext jsc = new JavaSparkContext(sparkSession.sparkContext());
        SQLContext sqlContext = sparkSession.sqlContext();
        Dataset<Row> dataset = sqlContext.read().orc(ORC_PATH);

        dataset.show();

    }
}
