package com.athui.service;

import com.athui.bean.metadata.ColumnMetaData;
import com.athui.bean.metadata.TableMetaData;
import com.athui.bean.task.JdbcTask;
import com.athui.bean.task.SameTask;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.connector.jdbc.*;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import org.apache.flink.types.Row;

import java.io.Serializable;
import java.util.*;
import java.util.stream.Collectors;

/**
 * @description: TODO
 * @autor: zhangzhonghui
 * @create: 2024-08-01 09:09
 * @Version: 1.0
 */
public class JdbcExplorerService extends ExplorerServer<Row,Row> implements Serializable {


    private Collection<ColumnMetaData> newColumnMetaDataList;


    private JdbcTask task;
    private TableMetaData tableMetaData;

    public JdbcExplorerService(JdbcTask task,TableMetaData tableMetaData) {
        this.tableMetaData = tableMetaData;
        this.task = task;
    }



    public void open(StreamExecutionEnvironment env) {

        this.newColumnMetaDataList = tableMetaData.getColumnMetaDataList().stream().peek(columnMetaData -> {
            columnMetaData.setTaskCode(tableMetaData.getTaskCode());
            columnMetaData.setTableCode(tableMetaData.getTableCode());
        }).collect(Collectors.toList());
    }

    public Collection<ColumnMetaData> getColumnMetaDataList(Collection<ColumnMetaData> collect) {
        return this.newColumnMetaDataList;
    }

    @Override
    public DataSourceTransformer<Row, Row> getTypeTransformer() {
        return new DataSourceTransformer<Row, Row>(){
            @Override
            public Row transformBySourceType(Row input) {
                return input;
            }

            @Override
            public Object transformByValue(String index, Row input) {
                return input.getField(Integer.parseInt(index) - 1);
            }

            @Override
            public SameTask task() {
                return task;
            }
        };
    }

    public Integer getRowSizeLimit() {
        // TODO: 广播 行数
        return tableMetaData.getRowSizeLimit();
    }

    public DataStreamSource<Row> source(StreamExecutionEnvironment env) {
        // TODO: 输入
        // 使用 JdbcInputFormat 读取 源数据信息
        JdbcInputFormat jdbcInputFormat = JdbcInputFormat.buildJdbcInputFormat()
                .setDBUrl(task.getConnectInfo().getJdbc())
                .setDrivername(task.getConnectInfo().getDriver().toString())
                .setUsername(task.getConnectInfo().getUsername())
                .setPassword(task.getConnectInfo().getPassword())
                .setQuery(task.getExplorerStrategy().setTableMetaData(tableMetaData).build(tableMetaData.getRowSizeLimit(), task.getScanStrategy()))
                // 设置 Flink 数据类型
                .setRowTypeInfo(new RowTypeInfo(newColumnMetaDataList.stream().map(e->e.getFlinkColumnType()).toArray(TypeInformation[]::new)))
                .setFetchSize(task.getFetchSize())
                .finish();
        return env.createInput(jdbcInputFormat);
    }

    public void sink(DataStream<Tuple2<ColumnMetaData,SameTask>> stream){
        stream.print();
    }

    public String getJobName() {
        return String.format("metadata explorer by(%s.%s)",tableMetaData.getSchemaName(),tableMetaData.getTableName());
    }
}
