package architect;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.column.ParquetProperties;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroup;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.format.converter.ParquetMetadataConverter;
import org.apache.parquet.hadoop.ParquetFileReader;
import org.apache.parquet.hadoop.ParquetFileWriter;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.example.ExampleParquetWriter;
import org.apache.parquet.hadoop.example.GroupReadSupport;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.hadoop.metadata.ParquetMetadata;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.MessageTypeParser;

import java.io.IOException;

import static java.lang.System.out;

/**
 * @title: ParquetWriter
 * @description:
 * @author: guan.xin
 * @create: 2022-04-07 14:21
 * @since: JDK8
 */
public class ParquetWriterExample {

    private static String schemaStr = new StringBuilder()

            .append("message test {")
            .append("optional int64 log_id;")
            .append("optional binary idc_id;")
            .append("optional int64 house_id;")
            .append("optional int64 src_ip_long;")
            .append("optional int64 dest_ip_long;")
            .append("optional int64 src_port;")
            .append("optional int64 dest_port;")
            .append("optional int32 protocol_type;")
            .append("optional binary url64;")
            .append("optional binary access_time;")
            .append("}")

            .toString();

    static MessageType schema = MessageTypeParser.parseMessageType(schemaStr);

    /**
     * 创建时间：2017-8-3
     * 创建者：meter
     * 返回值类型：void
     *
     * @描述：输出MessageType
     */
    public static void testParseSchema() {
        out.println(schema.toString());
    }

    /**
     * 创建时间：2017-8-3
     * 创建者：meter
     * 返回值类型：void
     *
     * @throws Exception
     * @描述：获取parquet的Schema
     */
    public static void testGetSchema() throws Exception {
        Configuration configuration = new Configuration();
        ParquetMetadata readFooter = null;
        Path parquetFilePath = new Path("/Users/guan.xin/Gitee/Python/pythonProject/parquet_files/allin/so_do_order.parquet");
        readFooter = ParquetFileReader.readFooter(configuration,
                parquetFilePath, ParquetMetadataConverter.NO_FILTER);
        MessageType schema = readFooter.getFileMetaData().getSchema();
        out.println(schema.toString());
    }

    /**
     * 创建时间：2017-8-3
     * 创建者：meter
     * 返回值类型：void
     *
     * @throws IOException
     * @描述：测试写parquet文件
     */
    private static void testParquetWriter() throws IOException {
        Path file = new Path("/Users/guan.xin/Gitee/Java/Demo/bigdata/flink-sql-java/files/test.parq");
        ExampleParquetWriter.Builder builder = ExampleParquetWriter
                .builder(file).withWriteMode(ParquetFileWriter.Mode.CREATE)
                .withWriterVersion(ParquetProperties.WriterVersion.PARQUET_1_0)
                .withCompressionCodec(CompressionCodecName.SNAPPY)
                //.withConf(configuration)
                .withType(schema);
        /*
         * file, new GroupWriteSupport(), CompressionCodecName.SNAPPY, 256 *
         * 1024 * 1024, 1 * 1024 * 1024, 512, true, false,
         * ParquetProperties.WriterVersion.PARQUET_1_0, conf
         */
        ParquetWriter<Group> writer = builder.build();
        SimpleGroupFactory groupFactory = new SimpleGroupFactory(schema);
        String[] access_log = {"111111", "22222", "33333", "44444", "55555", "666666", "777777", "888888", "999999", "101010"};
        for (int i = 0; i < 1000; i++) {
            writer.write(groupFactory.newGroup()
                    .append("log_id", Long.parseLong(access_log[0]))
                    .append("idc_id", access_log[1])
                    .append("house_id", Long.parseLong(access_log[2]))
                    .append("src_ip_long", Long.parseLong(access_log[3]))
                    .append("dest_ip_long", Long.parseLong(access_log[4]))
                    .append("src_port", Long.parseLong(access_log[5]))
                    .append("dest_port", Long.parseLong(access_log[6]))
                    .append("protocol_type", Integer.parseInt(access_log[7]))
                    .append("url64", access_log[8])
                    .append("access_time", access_log[9]));
        }
        writer.close();
    }

    /**
     * 创建时间：2017-8-3
     * 创建者：meter
     * 返回值类型：void
     *
     * @throws IOException
     * @描述：测试读parquet文件
     */
    private static void testParquetReader() throws IOException {
        //Path file = new Path("/Users/guan.xin/Gitee/Java/Demo/bigdata/flink-sql-java/files/test.parq");
        Path file = new Path("/Users/guan.xin/Gitee/Python/pythonProject/parquet_files/allin/so_do_order.parquet");
        ParquetReader.Builder<Group> builder = ParquetReader.builder(new GroupReadSupport(), file);

        ParquetReader<Group> reader = builder.build();

        boolean isLast = false;
        while (!isLast) {
            SimpleGroup group = (SimpleGroup) reader.read();
            if (group == null) {
                isLast = true;
                continue;
            }
            out.println(group.getString(0, 0));
            out.println(group.getString(1, 0));
            //out.println(group.getString(2, 0));
            out.println("----------------------------------------");
        }
    }

    /**
     * 创建时间：2017-8-2 创建者：meter 返回值类型：void
     *
     * @param args
     * @throws Exception
     * @描述：
     */
    public static void main(String[] args) throws Exception {
        testGetSchema();
        //testParseSchema();
        //testParquetWriter();
        testParquetReader();
    }
}
