package com.bleeth.flow.step.output;

import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONObject;
import com.bleeth.flow.core.common.APlugin;
import com.bleeth.flow.core.common.PluginAnnotation;
import com.bleeth.flow.core.common.PluginTypeEnum;
import com.jd.platform.async.worker.WorkResult;
import com.jd.platform.async.wrapper.WorkerWrapper;
import lombok.Data;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * @author ：Bleeth
 * @date ：2021-08-06
 * @description：CSV写插件
 */
@Data
@PluginAnnotation(name = "CSV写插件",
        type = PluginTypeEnum.OUTPUT,
        description = "",
        id = "CsvWriterPlugin")
public class CsvWriterPlugin extends APlugin implements Serializable {

    private String delimiter;

    private String quote;

    private String path;


    @Override
    public void result(boolean success, Dataset<Row> input, WorkResult<Dataset<Row>> workResult) {
        super.result(success,input,workResult);
    }


    @Override
    public Dataset<Row> action(Dataset<Row> param, Map<String, WorkerWrapper> allWrappers) {
        super.action(param, allWrappers);

        String fromPluginName = fromList.get(0);
        WorkerWrapper fromWrapper = allWrappers.get(fromPluginName);
        Dataset<Row> ds = (Dataset<Row>) fromWrapper.getWorkResult().getResult();

        List<Row> rowList = ds.collectAsList();
        List<JSONObject> objectList = new ArrayList<>();
        StructField[] fieldArray = null;
        for (int i = 0; i < rowList.size(); i++) {
            Row row = rowList.get(i);
            if (i == 0) {
                StructType schema = row.schema();
                fieldArray = schema.fields();
            }

            JSONObject object = new JSONObject();
            objectList.add(object);
            for (int fieldIndex = 0; fieldIndex < fieldArray.length; fieldIndex++) {
                StructField structField = fieldArray[fieldIndex];
                object.set(structField.name(), row.get(fieldIndex));
            }
        }

        ds.write().mode(SaveMode.Overwrite)
                .option("delimiter", delimiter)
                .option("quote", StrUtil.isEmpty(quote) ? "" : quote)
                .format("csv")
                .save(path);

        return null;
    }


}
