package tbdp.tool.writer;


import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.MessageTypeParser;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;

import tbdp.tool.common.ColumnDesc;
import tbdp.tool.common.JobConstants;
import tbdp.tool.common.Record;
import tbdp.tool.config.WriterConfig;
import tbdp.tool.context.JobContext;
import tbdp.tool.format.csv.CsvFormatter;
import tbdp.tool.format.parquet.CsvParquetWriter;
import tbdp.tool.spi.RecordIterator;
import tbdp.tool.spi.Writer;
import tbdp.tool.util.ResourceUtil;

/**
 * @author xbkaishui
 * @version $Id: ParquetWriter.java, v 0.1 2017-06-22 下午10:36 xbkaishui Exp $$
 */
public class ParquetWriter extends Writer {

    private CsvFormatter csvFormater = new CsvFormatter();

    @Override
    public void write(JobContext context, RecordIterator recordIterator) {
        try {
            ColumnDesc[] header = (ColumnDesc[]) context.getValue(JobConstants.COLUMN_SCHEMA);
            CsvParquetWriter writer = initParquetWriter(context);
            while (recordIterator.hasNext()) {
                Record record = (Record) recordIterator.next();
                if (record != null) {
                    List<String> values = new ArrayList(header.length);
                    for (ColumnDesc col : header) {
                        Object val = record.get(col.getColumn());
                        if (val == null) {
                            val = "";
                        }
                        values.add(val.toString());
                    }
                    writer.write(values);
                }

            }
            writer.close();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    private CsvParquetWriter initParquetWriter(JobContext context) throws IOException {
        WriterConfig writerConfig = context.getJobConfig().getWriter();
        String columnSchema = writerConfig.getConfigPropAsString(JobConstants.COLUMN_SCHEMA_PATH);
        MessageType schema = getSchema(columnSchema);
        String tmpFilePath = getTmpFilePath(context);
        Path path = new Path(new File(tmpFilePath).toURI());
        CsvParquetWriter writer = new CsvParquetWriter(path, schema, true);
        return writer;
    }

    private MessageType getSchema(String schemaFile) throws IOException {
        InputStream ins = ResourceUtil.getInputStream(schemaFile);
        String schemaContent = IOUtils.toString(ins, "UTF-8");
        MessageType schema = MessageTypeParser.parseMessageType(schemaContent);
        return schema;
    }

    @Override
    public void close(JobContext context) {

    }

    @Override
    public String getType() {
        return "parquet";
    }
}
