package com.chinaums.format;

import com.chinaums.format.deserializer.SftpCsvDeserializer;
import com.chinaums.format.option.SftpCsvOption;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.table.connector.ChangelogMode;
import org.apache.flink.table.connector.format.DecodingFormat;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.connector.source.DynamicTableSource.DataStructureConverter;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.types.RowKind;

import java.util.List;

/**
 * @author 86185
 */
public class SftpCsvFormat implements DecodingFormat<DeserializationSchema<RowData>> {

      private final SftpCsvOption option;
    public SftpCsvFormat(SftpCsvOption option) {
        this.option = option;
    }

    @Override
    public DeserializationSchema<RowData> createRuntimeDecoder(
            DynamicTableSource.Context context,
            DataType producedDataType) {
        final TypeInformation<RowData> producedTypeInfo =context.createTypeInformation(producedDataType);
        final DataStructureConverter converter = context.createDataStructureConverter(producedDataType);
        final List<LogicalType> parsingTypes = producedDataType.getLogicalType().getChildren();
        // create runtime class
        return new SftpCsvDeserializer(parsingTypes, converter, producedTypeInfo, option);
    }

    @Override
    public ChangelogMode getChangelogMode() {
        // define that this format can produce INSERT and DELETE rows
        return ChangelogMode.newBuilder()
                .addContainedKind(RowKind.INSERT)
                .addContainedKind(RowKind.DELETE)
                .build();
    }
}
