package org.example.flink;

import org.apache.commons.lang3.StringUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.types.Row;
import org.apache.hadoop.fs.Path;
import org.apache.hive.orc.OrcFile;
import org.apache.hive.orc.TypeDescription;
import org.apache.hive.orc.Writer;

import java.io.IOException;
import java.text.MessageFormat;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.stream.Collectors;

/**
 * Created by McClone on 2020/6/13.
 */
public class OrcSink extends RichSinkFunction<Row> {

    private String basePath;
    private String tableName;
    private String partitionField;
    private TreeMap<String, String> schemaMap;
    private Map<String, Writer> writerMap = new ConcurrentHashMap<>();
    private org.apache.hadoop.conf.Configuration conf;
    private TypeDescription typeDescription;

    public OrcSink(String basePath, String tableName, String partitionField, TreeMap<String, String> schemaMap) {
        this.basePath = basePath;
        this.tableName = tableName;
        this.partitionField = partitionField;
        this.schemaMap = schemaMap;
    }

    @Override
    public void invoke(Row value, Context context) throws Exception {
        String partitionValue = (String) value.getField(0);
        Writer writer = writerMap.computeIfAbsent(partitionValue, new Function<String, Writer>() {
            @Override
            public Writer apply(String partitionValue) {
                Writer writer;
                try {
                    String partitionPath = basePath + "/" + tableName + "/" + partitionField + "=" + partitionValue;
                    System.out.println(partitionPath);
                    writer = OrcFile.createWriter(new Path(partitionPath),
                            OrcFile.writerOptions(conf)
                                    .setSchema(typeDescription));
                } catch (IOException e) {
                    e.printStackTrace();
                    throw new RuntimeException(e);
                }
                return writer;
            }
        });


    }

    @Override
    public void open(Configuration parameters) throws Exception {
        conf = new org.apache.hadoop.conf.Configuration();
        String schema = "struct<{0}>";
        String schemaStr = writerMap.entrySet().stream().map(new Function<Map.Entry<String, Writer>, String>() {
            @Override
            public String apply(Map.Entry<String, Writer> stringWriterEntry) {
                return stringWriterEntry.getKey() + ":" + stringWriterEntry.getValue();
            }
        }).collect(Collectors.joining(","));
        typeDescription = TypeDescription.fromString(MessageFormat.format(schema, schemaStr));

    }

    @Override
    public void close() throws Exception {


    }
}
