package Parequet;

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.avro.AvroSchemaConverter;
import org.apache.parquet.avro.AvroWriteSupport;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.schema.MessageType;

import java.io.File;
import java.io.IOException;

public class Test {
    /*public static void main(String[] args) {
        parquetWrite();
    }*/

    private static void parquetWrite() throws IOException {
       String parquetFile="E:\\test\\hive\\ParquetTest";
       String schemaLocation ="E:\\test\\hive\\ParquetTest\\Bean_json.json";

       Path path = new Path("");
       /* public AvroWriteSupport(MessageType schema, Schema avroSchema) {
        this.rootSchema = schema;
        this.rootAvroSchema = avroSchema;
        this.model = null;
     */
        /*Schema avroSchema = new Schema().Parser.parse(new File(schemaLocation));

        MessageType parquetSchema = new AvroSchemaConverter().convert(avroSchema);
        AvroWriteSupport<Bean> writeSupport = new AvroWriteSupport<>(parquetSchema, avroSchema);
        ParquetWriter<GenericRecord> parquetWriter = new ParquetWriter<GenericRecord>(path, writeSupport, CompressionCodecName.SNAPPY, 134217728, 1024 * 1024);*/


    }
}
