package org.apache.flink.connector.httptm;

import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.table.catalog.Column;
import org.apache.flink.table.connector.ChangelogMode;
import org.apache.flink.table.connector.format.DecodingFormat;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.connector.source.ScanTableSource;
import org.apache.flink.table.connector.source.SourceFunctionProvider;
import org.apache.flink.table.connector.source.abilities.SupportsLimitPushDown;
import org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.types.DataType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * <ol>
 * date:2022/4/6 editor:yanghongjian
 * <li>创建文档</li>
 * <li>运行时实现来读取/写入实际数据</li>
 * </ol>
 * <ol>
 *
 * @author <a href="mailto:12719889@qq.com">YangHongJian</a>
 * @version 3.0
 * @since 1.8
 */
public class HttpTmTableSource implements ScanTableSource, SupportsReadingMetadata, SupportsLimitPushDown {
    private static final Logger logger = LoggerFactory.getLogger(HttpTmTableSource.class);
    public static final String VALUE_METADATA_PREFIX = "value.";

    private final DataType physicalRowDataType;
    private final List<Column.MetadataColumn> metadataColumns;
    private final ReadableConfig options;
    private final DecodingFormat<DeserializationSchema<RowData>> decodingFormat;
    private final boolean isBounded;
    private long limit;

    // 构造函数，传入必要的参数
    public HttpTmTableSource(
            DataType physicalRowDataType,
            List<Column.MetadataColumn> metadataColumns,
            ReadableConfig options,
            DecodingFormat<DeserializationSchema<RowData>> decodingFormat,
            boolean isBounded) {
        this.physicalRowDataType = physicalRowDataType;
        this.metadataColumns = metadataColumns;
        this.options = options;
        this.decodingFormat = decodingFormat;
        this.isBounded = isBounded;

        Map<String, String> mapOpt = ((Configuration) options).toMap();
        String setting = mapOpt.getOrDefault("setting", "");
        try {
            if (!setting.isEmpty()) {
                Map mapSetting = (new ObjectMapper()).readValue(setting, Map.class);
                this.limit = Long.parseLong(mapSetting.getOrDefault("limit", "-1").toString());
            }
        } catch (Exception e) {
            logger.error("http接口配置参数异常", e);
            throw new RuntimeException(e.getMessage());
        }
    }

    //由 Format 决定 ChangelogMode，也可以由 Source 自己决定
    @Override
    public ChangelogMode getChangelogMode() {
        return decodingFormat.getChangelogMode();
    }

    @Override
    public ScanRuntimeProvider getScanRuntimeProvider(ScanContext runtimeProviderContext) {
        final DeserializationSchema<RowData> deserializationSchema = decodingFormat.createRuntimeDecoder(
                runtimeProviderContext, physicalRowDataType);

        final SourceFunction<RowData> sourceFunction = new HttpTmSourceFunction(this.options, deserializationSchema, isBounded);
        //第二参数决定执行模式
        return SourceFunctionProvider.of(sourceFunction, isBounded);
    }

    @Override
    public DynamicTableSource copy() {
        return new HttpTmTableSource(physicalRowDataType, metadataColumns, options, decodingFormat, isBounded);
    }

    @Override
    public String asSummaryString() {
        return "外部连接器(http) 源表";
    }

    @Override
    public void applyLimit(long limit) {
        this.limit = limit;
    }

    @Override
    public Map<String, DataType> listReadableMetadata() {
        final Map<String, DataType> mapMetadata = new LinkedHashMap<>();
        decodingFormat.listReadableMetadata().forEach(
                (key, value) -> mapMetadata.put(VALUE_METADATA_PREFIX + key, value)
        );
        return mapMetadata;
    }

    @Override
    public void applyReadableMetadata(List<String> metadataKeys, DataType producedDataType) {
        final Map<String, DataType> formatMetadata = decodingFormat.listReadableMetadata();
        if (!formatMetadata.isEmpty()) {
            final List<String> formatMetadataKeys =
                    metadataColumns.stream()
                            .filter(
                                    k -> k.getMetadataKey().isPresent() && k.getMetadataKey().get().startsWith(VALUE_METADATA_PREFIX)
                            )
                            .map(k -> k.getMetadataKey().get().substring(VALUE_METADATA_PREFIX.length()))
                            .collect(Collectors.toList());
            decodingFormat.applyReadableMetadata(formatMetadataKeys);
        }
    }
}
