package com;

import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.storage.StorageLevel;

public class Sixoo2 {

    public static void main(String[] args) {

        SparkConf conf = new SparkConf();
        SparkSession spark = SparkSession
                .builder()
                .appName("Java Spark SQL basic example")
                .config("spark.io.compression.codec", "snappy")
                .config("spark.debug.maxToStringFields", "20000")
//                .config("spark.sql.orc.impl", "native")
                .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
                .config("spark.files.overwrite","true")
                .config("spark.sql.files.maxPartitionBytes","536870912")// 512mb
//                .config("spark.sql.files.openCostInBytes","536870912")// 512mb
                .config(conf)
                .master("local[4]")
                .enableHiveSupport()
                .getOrCreate();

        Dataset<Row> data = spark.read()
                .option("header", "true")
                .csv("data/fly1.txt")
                .repartition(4)
                .persist(StorageLevel.MEMORY_ONLY());

        System.out.println(data.rdd().partitions().length);

//        data.write().orc("data/fly1.orc");
        data.write().parquet("data/fly1.orc");

        spark.sql(Hivetal.table_c);

        spark.sql("LOAD DATA LOCAL  INPATH 'data/fly11.orc'  INTO TABLE fly_test555");
        spark.sql("select count(0) from fly_test555").show();

    }
}
