package com.crawler.repitle;

import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.GroupFactory;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.schema.MessageType;

import java.io.IOException;
import java.util.Map;

public class ParquetSender {
    private ParquetWriter<Group> writer;
    private GroupFactory groupFactory;
    public void init() {
        try {
            MessageType schema = ParquetUtil.createSchema();
            writer = ParquetUtil.getWriter(schema, "/data.parquet");
            groupFactory = new SimpleGroupFactory(schema);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    public void send(Map<String, String> record) throws IOException {
        Group group = groupFactory.newGroup();
        for (String key : record.keySet()) {
            group.append(key, record.get(key));
        }
        writer.write(group);
    }

    public void destroy() throws IOException {
        writer.close();
    }
}
