package com.yifeng.repo.flink.data.transport.streaming.iceberg;

import com.alibaba.fastjson.JSONObject;
import com.yifeng.repo.flink.data.transport.config.IcebergFieldConfig;
import com.yifeng.repo.flink.data.transport.config.IcebergTableConfig;
import com.yifeng.repo.flink.data.transport.config.SinkIcebergConfig;
import com.yifeng.repo.flink.data.transport.dto.DataRow;
import com.yifeng.repo.flink.data.transport.dto.IcebergTableInfo;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.table.data.*;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.hadoop.conf.Configuration;
import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.flink.CatalogLoader;
import org.apache.iceberg.flink.TableLoader;
import org.apache.iceberg.flink.sink.FlinkSink;
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;

import java.time.Instant;
import java.time.LocalDate;

import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Days;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.math.BigDecimal;
import java.sql.Timestamp;
import java.text.ParseException;
import java.util.*;

/**
 * 构建写入Iceberg的环境和写入表逻辑
 * @author wangzhi
 * @since 2023-06-13
 */
public class FlinkSinkIcebergBuilder {

    private static final Logger LOG = LoggerFactory.getLogger(FlinkSinkIcebergBuilder.class);
    private static final DateTime JODA_DATETIME_EPOC =
            new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC);

    /**
     * 构建写入Iceberg的表信息配置和写入表逻辑
     * @param dataStreamSource
     * @param config
     */
    public static void buildFlinkSinkIceberg(DataStreamSource<String> dataStreamSource, SinkIcebergConfig config) {
        // 定义表信息配置
        List<IcebergTableInfo> icebergTables = buildIcebergTableInfo(config);
        List<IcebergTableConfig>  tableList = config.getTableList();

        Map<String, OutputTag<RowData>> outputTagMap = buildOutputTagMap(tableList);
        Map<String,IcebergTableConfig> tableMap = buildTableMap(tableList);

        // 定义拆分流写入不同表逻辑
        SingleOutputStreamOperator<RowData> mainDataStream = getOutputStreamOperator(dataStreamSource, outputTagMap, tableMap);

        // 将表信息和拆分流添加到FlinkSink
        for(IcebergTableInfo tableInfo : icebergTables) {
            OutputTag<RowData> outputTag = outputTagMap.get(tableInfo.getName());
            DataStream<RowData> sideOutputStream = mainDataStream.getSideOutput(outputTag);
            //通过DataStream Api 向Iceberg中写入数据
            FlinkSink.forRowData(sideOutputStream)
                    .table(tableInfo.getTable())
                    .tableLoader(tableInfo.getTableLoader())
                    .overwrite(false)
                    .append();
        }
    }

    private static SingleOutputStreamOperator<RowData> getOutputStreamOperator(DataStreamSource<String> dataStreamSource, Map<String, OutputTag<RowData>> outputTagMap, Map<String, IcebergTableConfig> tableMap) {
        SingleOutputStreamOperator<RowData> mainDataStream = dataStreamSource.process(new ProcessFunction<String, RowData>() {
            @Override
            public void processElement(String value, Context ctx, Collector<RowData> out) throws Exception {
                LOG.info("start value====={}", value);
                List<DataRow> dataRowList = JSONObject.parseArray(value, DataRow.class);
                if(CollectionUtils.isEmpty(dataRowList)) {
                    return;
                }
                DataRow dataRowFirst = dataRowList.get(0);
                if(Objects.isNull(dataRowFirst)) {
                    return;
                }
                String tb = dataRowFirst.getTb();
                if(!tableMap.containsKey(tb)){
                    LOG.warn("table not exists!!tb:{}", tb);
                    return;
                }
                OutputTag<RowData> outputTag = outputTagMap.get(tb);
                IcebergTableConfig tableConfig = tableMap.get(tb);
                List<IcebergFieldConfig> fieldConfigs = tableConfig.getFieldList();
                for(DataRow dataRow : dataRowList) {
                    GenericRowData row = new GenericRowData(fieldConfigs.size());
                    for (IcebergFieldConfig fieldConfig : fieldConfigs) {
                        Map<String, Object> valueMap = dataRow.getAfter();
                        row.setField(fieldConfig.getPos(), getFieldValue(fieldConfig, valueMap));
                    }
                    ctx.output(outputTag, row);
                    out.collect(row);
                }
                LOG.info("end value====={}", value);
            }
        });
        return mainDataStream;
    }

    /**
     * 构建Iceberg的表信息
     * @param config
     * @return
     */
    public static List<IcebergTableInfo> buildIcebergTableInfo(SinkIcebergConfig config) {
        List<IcebergTableInfo> icebergTables = new ArrayList<>();
        List<IcebergTableConfig>  tableList = config.getTableList();
        Map<String, String> properties = loadPropHiveCatalog(config);
        CatalogLoader catalogLoader = CatalogLoader.hive("hive_catalog", new Configuration(), properties);
        HiveCatalog hiveCatalog = buildHiveCatalog(properties);
        for(IcebergTableConfig tableConfig : tableList){
            IcebergTableInfo tableInfo = new IcebergTableInfo();
            TableIdentifier name = TableIdentifier.of(config.getCatalogDatabase(),tableConfig.getName());
            TableLoader tableLoader = TableLoader.fromCatalog(catalogLoader, name);
            Schema schema = buildSchema(tableConfig.getFieldList());
            PartitionSpec spec = buildPartition(schema,null);
            Table table = buildTable(hiveCatalog, name, schema, spec, tableConfig.getVersion());
            tableInfo.setName(tableConfig.getName());
            tableInfo.setTable(table);
            tableInfo.setTableLoader(tableLoader);
            icebergTables.add(tableInfo);
        }
        return icebergTables;
    }



    private static Map<String, IcebergTableConfig> buildTableMap(List<IcebergTableConfig> tableList) {
        Map<String, IcebergTableConfig> tableMap = new HashMap<>();
        for(IcebergTableConfig tableConfig : tableList){
            tableMap.put(tableConfig.getName(), tableConfig);
        }
        return tableMap;
    }


    private static Map<String,OutputTag<RowData>>buildOutputTagMap(List<IcebergTableConfig> tableList) {
        Map<String,OutputTag<RowData>> outputTagMap = new HashMap<>();
        for(IcebergTableConfig tableConfig : tableList){
            outputTagMap.put(tableConfig.getName(),new OutputTag<>(tableConfig.getName(), TypeInformation.of(RowData.class)));
        }
        return outputTagMap;
    }


    // 加载HiveCatalog配置
    private static Map<String, String> loadPropHiveCatalog(SinkIcebergConfig sinkIcebergConfig){
        Map<String, String> properties = new HashMap<String, String>();
        properties.put(CatalogProperties.WAREHOUSE_LOCATION, sinkIcebergConfig.getCatalogWarehouse());
        properties.put(CatalogProperties.URI, sinkIcebergConfig.getCatalogUri());
        properties.put(CatalogProperties.CATALOG_IMPL, sinkIcebergConfig.getCatalogImpl());
        return properties;
    }
    // 初始化catalog
    private static HiveCatalog buildHiveCatalog(Map<String, String> properties){
        Configuration conf = new Configuration();
        HiveCatalog catalog = new HiveCatalog();
        catalog.setConf(conf);
        catalog.initialize("hive_catalog",properties);
        return catalog ;
    }
    // 定义表结构schema
    private static Schema buildSchema(List<IcebergFieldConfig> fieldList){
        List<Types.NestedField> columns = new ArrayList<>(fieldList.size());
        for(IcebergFieldConfig fieldConfig : fieldList){
            Types.NestedField field = Types.NestedField.required(fieldConfig.getPos() + 1, fieldConfig.getName(), buildFieldType(fieldConfig));
            columns.add(field);
        }
        Schema schema = new Schema(columns);
        return schema;
    }
    private static PartitionSpec buildPartition(Schema schema,String partitionField){
        // 分区定义
        // PartitionSpec spec = PartitionSpec.builderFor(schema).month("id").build();
        // 不分区
        PartitionSpec spec = PartitionSpec.unpartitioned();
        return spec;
    }
    // 定义表
    private static Table buildTable(HiveCatalog catalog, TableIdentifier name, Schema schema, PartitionSpec spec,String version){

        // 表的属性
        Map<String, String> tableProp = new HashMap<>();
        tableProp.put("engine.hive.enabled", "true");
        tableProp.put("format-version", version);
        // 建表
        Table table;
        // 通过catalog判断表是否存在，不存在就创建，存在就加载
        if (!catalog.tableExists(name)) {
            LOG.info("tableNotExists：{}", name);
            table = catalog.createTable(name, schema, spec, tableProp);
        }else {
            LOG.info("tableExists：{}", name);
            table = catalog.loadTable(name);
        }
        return table;
    }
    // 字段类型
    private static Type buildFieldType(IcebergFieldConfig fieldConfig) {
        Type type;
        String typeStr = fieldConfig.getType();
        int precision = fieldConfig.getPrecision();
        int scale = fieldConfig.getScale();
        switch (typeStr){
            case "Boolean":
                type = Types.BooleanType.get();
                break;
            case "Integer":
                type = Types.IntegerType.get();
                break;
            case "Long":
                type = Types.LongType.get();
                break;
            case "Date":
                type = Types.DateType.get();
                break;
            case "Time":
                type = Types.TimeType.get();
                break;
            case "Timestamp":
                type = Types.TimestampType.withoutZone();
                break;
            case "Decimal":
                type = Types.DecimalType.of(precision, scale);
                break;
            default:
                type = Types.StringType.get();
        }
        return type;
    }
    // 字段值，除数值类型的，建议其他字段类型都用String
    private static Object getFieldValue(IcebergFieldConfig fieldConfig, Map<String, Object> valueMap) throws ParseException {
        String typeStr = fieldConfig.getType();
        Object value = valueMap.get(fieldConfig.getName());
        if(value == null){
            return null;
        }
        String valStr = value.toString();
        LOG.info("getFieldValue typeStr:{} , valStr:{}", typeStr, valStr);
        Object obj;
        switch (typeStr){
            case "Boolean":
                obj = Boolean.valueOf(valStr);
                break;
            case "Integer":
                obj = Integer.valueOf(valStr);
                break;
            case "Long":
                obj = Long.valueOf(valStr);
                break;
            case "Date":
                String str = valStr.substring(0,10);
                String[] dateStr = str.split("-");
                int year = Integer.valueOf(dateStr[0]);
                int month = Integer.valueOf(dateStr[1]);
                int day = Integer.valueOf(dateStr[2]);
                DateTime dateTime = new DateTime(year, month, day, 0, 0, 0, 0,DateTimeZone.UTC);
                obj = Days.daysBetween(JODA_DATETIME_EPOC, dateTime).getDays();
                break;
            case "Time":
                obj = valStr;
                break;
            case "Timestamp":
                obj = Timestamp.valueOf(valStr).getTime();
                break;
            case "Decimal":
                obj = new BigDecimal(valStr);
                break;
            default:
                obj = StringData.fromString(valStr);
        }

        return obj;
    }


}
