package com.apex.flink.source;


import com.alibaba.fastjson.JSONObject;
import com.apex.flink.FlinkEnvironment;
import com.apex.flink.batch.FlinkBatchSource;
import com.apex.flink.utils.ConfigKeyName;
import com.apex.flink.utils.SchemaUtil;
import com.typesafe.config.Config;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.core.fs.Path;
import org.apache.avro.Schema;
import org.apache.flink.types.Row;
import org.apache.parquet.avro.AvroSchemaConverter;
import org.apache.parquet.schema.MessageType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.io.RowCsvInputFormat;
import org.apache.flink.formats.parquet.ParquetRowInputFormat;
import org.apache.flink.orc.OrcRowInputFormat;
import org.apache.flink.api.common.io.InputFormat;

import java.util.List;
import java.util.Map;

public class FileBatchSource implements FlinkBatchSource<Row> {
    private final Logger logger = LoggerFactory.getLogger(FileBatchSource.class);

    private RowTypeInfo rowTypeInfo;

    private Config config;

    private String filePath;
    private String split;
    private String format;
    private String schema;
    @Override
    public DataSet<Row> getData(FlinkEnvironment env) {
        ExecutionEnvironment environment = env.getBatchEnvironment();
        DataSource<String> dataSet = null;
        if (!StringUtils.isBlank(filePath)) {
            dataSet = environment.readTextFile(filePath);
        } else {
            logger.error("unsupported file path");
            System.exit(0);
        }
        DataSet<Row> dataSetRow = null;

        switch (format) {
            case "json":
                Object jsonSchemaInfo = JSONObject.parse(schema);
                RowTypeInfo jsonInfo = SchemaUtil.getTypeInformation((JSONObject) jsonSchemaInfo);
                JsonRowInputFormat jsonInputFormat = new JsonRowInputFormat(new Path(filePath), null, jsonInfo);
                dataSetRow = env.getBatchEnvironment().createInput(jsonInputFormat);
                break;
            case "parquet":
                final Schema parse = new Schema.Parser().parse(schema);
                final MessageType messageType = new AvroSchemaConverter().convert(parse);
                InputFormat inputFormat = new ParquetRowInputFormat(new Path(filePath), messageType);
                dataSetRow = env.getBatchEnvironment().createInput(inputFormat);
                break;
            case "orc":
                OrcRowInputFormat orcRowInputFormat = new OrcRowInputFormat(filePath, schema, null, 1000);
                dataSetRow = env.getBatchEnvironment().createInput(orcRowInputFormat);
                break;
            case "csv":
                Object csvSchemaInfo = JSONObject.parse(schema);
                TypeInformation[] csvType = SchemaUtil.getCsvType((List<Map<String, String>>) csvSchemaInfo);
                RowCsvInputFormat rowCsvInputFormat = new RowCsvInputFormat(new Path(filePath), csvType, true);
                dataSetRow = env.getBatchEnvironment().createInput(rowCsvInputFormat);
                break;
            case "raw":
                TextRowInputFormat textInputFormat = new TextRowInputFormat(new Path(filePath));
                dataSetRow = env.getBatchEnvironment().createInput(textInputFormat);
                break;
            case "text":
               dataSetRow = dataSet.map(data -> {
                    String[] strings = data.split(split);
                    Row row = new Row(strings.length);
                    for (int i = 0; i < strings.length; i++) {
                        row.setField(i, strings[i]);
                    }
                    return row; })
                        .returns(rowTypeInfo);
            default:
                break;
        }
        return dataSetRow;
    }

    @Override
    public void prepare(FlinkEnvironment environment) {
        this.rowTypeInfo = environment.getRowTypeInfo();
        if (config.hasPath(ConfigKeyName.READ_FILE_PATH)){
            filePath = config.getString(ConfigKeyName.READ_FILE_PATH);
            split = config.getString(ConfigKeyName.FILE_SPLIT);
        }

        if (config.hasPath(ConfigKeyName.FILE_SOURCE_FILE_TYPE)){
            format = config.getString(ConfigKeyName.FILE_SOURCE_FILE_TYPE);
            if (StringUtils.isBlank(format)){
                format = "raw";
            }
        }
        schema = environment.getSchema();
    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }
}
