package com.crawler.repitle;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.column.ParquetProperties;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.GroupFactory;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.MessageTypeParser;

import java.io.IOException;
import java.util.List;
import java.util.Map;

public class ParquetUtil {
    public static MessageType createSchema() {
        MessageType schema = MessageTypeParser.parseMessageType(
                "message Pair {\n" +
                        "required binary industryName (UTF8);\n" +
                        "required binary industryHref (UTF8);\n" +
                        "required binary jobName(UTF8);" +
                        "required binary jobHref(UTF8);" +
                        "required binary companyName(UTF8);" +
                        "required binary companyHref(UTF8);" +
                        "required binary jobLocation(UTF8);" +
                        "required binary salary(UTF8);" +
                        "required binary time(UTF8);" +
                        "required binary order(UTF8);" +
                        "required binary intro(UTF8);" +
                        "}");
        return schema;
    }

    /* dictionary page size */
    public static ParquetWriter<Group> getWriter(MessageType schema, String outPath) throws Exception {
        Configuration conf = new Configuration();

        FileSystem fileSystem =  FileSystem.get(conf);
        Path path = new Path(outPath);
        if (fileSystem.exists(path)) {
            fileSystem.delete(path, true);
        }
        GroupWriteSupport writeSupport = new GroupWriteSupport();
        GroupWriteSupport.setSchema(schema, conf);
        ParquetWriter<Group> writer = new ParquetWriter<Group>(path, writeSupport,
                ParquetWriter.DEFAULT_COMPRESSION_CODEC_NAME,
                ParquetWriter.DEFAULT_BLOCK_SIZE,
                ParquetWriter.DEFAULT_PAGE_SIZE,
                ParquetWriter.DEFAULT_PAGE_SIZE,
                ParquetWriter.DEFAULT_IS_DICTIONARY_ENABLED,
                ParquetWriter.DEFAULT_IS_VALIDATING_ENABLED,
                ParquetProperties.WriterVersion.PARQUET_1_0, conf);
        return writer;

    }

    public static Group getParquetGroup(String industryName, String industryUrl, List<Map<String, String>> jobs) throws Exception {
        MessageType schema = createSchema();
        GroupFactory groupFactory = new SimpleGroupFactory(schema);
        Group group = groupFactory.newGroup()
                .append("industryName", industryName)
                .append("industryHref", industryUrl);
        Group point = group.addGroup("jobEntry");
        for (Map<String, String> map : jobs) {
            Group list = point.addGroup("job");
            list.append("jobName", map.get("jobName"));
            list.append("jobHref", map.get("jobHref"));
            list.append("companyName", map.get("companyName"));
            list.append("companyHref", map.get("companyHref"));
            list.append("jobLocation", map.get("jobLocation"));
            list.append("salary", map.get("salary"));
            list.append("time", map.get("time"));
            list.append("order", map.get("order"));
            list.append("intro", map.get("intro"));
        }
        return group;
    }
}
