package com.bleeth.flow.step.output;

import cn.hutool.core.convert.Convert;
import cn.hutool.json.JSONObject;
import com.bleeth.flow.core.common.APlugin;
import com.bleeth.flow.core.common.PluginAnnotation;
import com.bleeth.flow.core.common.PluginTypeEnum;
import com.jd.platform.async.wrapper.WorkerWrapper;
import lombok.Data;
import org.apache.spark.sql.DataFrameWriter;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;

import java.io.Serializable;
import java.util.Map;
import java.util.Properties;

/**
 * @author ：Bleeth
 * @date ：2021-08-06
 * @description：一个将数据输出到Http接口的组件(动态接口)
 */
@Data
@PluginAnnotation(name = "jdbc写",
        type = PluginTypeEnum.OUTPUT,
        description = "",
        id = "JdbcWriterPlugin")
public class JdbcWriterPlugin extends APlugin implements Serializable {

    private static final long serialVersionUID = 7322626719843369743L;

    private final String driver;

    private final String url;

    private final String username;

    private final String password;

    private final String table;

    private final JSONObject paramObj;


    @Override
    public Dataset<Row> action(Dataset<Row> param, Map<String, WorkerWrapper> allWrappers) {
        super.action(param, allWrappers);

        String fromPluginName = fromList.get(0);
        WorkerWrapper fromWrapper = allWrappers.get(fromPluginName);
        Dataset<Row> ds = (Dataset<Row>) fromWrapper.getWorkResult().getResult();

        Properties pro = new Properties();
        pro.put("driver", driver);

        DataFrameWriter<Row> writer = ds.write()
                .mode(SaveMode.Append)
                .option("username", username)
                .option("password", password);

        paramObj.entrySet().stream().forEach(entry -> {
            String key = entry.getKey();
            String value = Convert.toStr(entry.getValue());
            pro.put(key, value);
        });

        writer.jdbc(url, table, pro);
        return null;
    }


}
