package com.bleeth.flow.step.input;

import com.bleeth.flow.core.common.APlugin;
import com.bleeth.flow.core.common.KV;
import com.bleeth.flow.core.common.PluginAnnotation;
import com.bleeth.flow.core.common.PluginTypeEnum;
import com.bleeth.flow.core.util.SparkUtil;
import com.jd.platform.async.wrapper.WorkerWrapper;
import lombok.Data;
import org.apache.log4j.Logger;
import org.apache.spark.sql.DataFrameReader;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;

import java.io.Serializable;
import java.util.List;
import java.util.Map;

/**
 * @author ：Bleeth
 * @date ：2021-08-06
 * @description：JDBC读取接口
 */
@Data
@PluginAnnotation(name = "JDBC读取接口",
        type = PluginTypeEnum.INPUT,
        description = "",
        id = "JdbcReaderPlugin")
public class JdbcReaderPlugin extends APlugin implements Serializable {

    private static final long serialVersionUID = 7322626719843369743L;

    private Logger logger = Logger.getLogger(getClass());

    private String driver;
    private String url;
    private String user;
    private String password;

    private List<KV> options;


    @Override
    public Dataset<Row> action(Dataset<Row> param, Map<String, WorkerWrapper> allWrappers) {
        DataFrameReader reader = SparkUtil.getSparkInstance().read()
                .format("jdbc")
                .option("driver", driver)
                .option("url", url)
                .option("user", user)
                .option("password", password);
        for (KV option : options) {
            reader.option(option.getKey(), option.getValue());
        }
        Dataset<Row> ds = reader.load();
        return ds;
    }



}
