package com.bleeth.flow.step.row;

import cn.hutool.core.util.StrUtil;
import com.bleeth.flow.core.common.APlugin;
import com.bleeth.flow.core.common.PluginAnnotation;
import com.bleeth.flow.core.common.PluginTypeEnum;
import com.jd.platform.async.wrapper.WorkerWrapper;
import lombok.Data;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.catalyst.encoders.RowEncoder;
import scala.collection.Seq;

import java.io.Serializable;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;

/**
 * @author ：Bleeth
 * @date ：2021-08-06
 * @description：行拆分
 */
@Data
@PluginAnnotation(name = "行拆分",
        type = PluginTypeEnum.ROW,
        description = "一行拆分为多行",
        id = "SplitLinePlugin")
public class SplitLinePlugin extends APlugin implements Serializable {

    private static final long serialVersionUID = 1675576340203329097L;

    /**
     * 需要拆分的字段
     */
    private String field;

    /**
     * 拆分字符串
     */
    private String split;

    /**
     * 是否左右trim操作
     */
    private boolean trim;

    /**
     * 是否保留原始字段
     */
    private boolean addField;

    @Override
    public Dataset<Row> action(Dataset<Row> param, Map<String, WorkerWrapper> allWrappers) {
        String leftPluginName = fromList.get(0);
        WorkerWrapper leftWrapper = allWrappers.get(leftPluginName);
        Dataset<Row> ds = (Dataset<Row>) leftWrapper.getWorkResult().getResult();

        Dataset<Row> flatDs = ds.flatMap((FlatMapFunction<Row, Row>) row -> {
            List<Row> ls = new LinkedList<>();
            int index = row.fieldIndex(field);
            String str = row.getAs(index);
            Seq<Object> objectSeq = row.toSeq();
            List<String> splitList = StrUtil.split(str, this.split);
            for (String newV : splitList) {
                Object[] objArray = new Object[objectSeq.size()];
                objectSeq.copyToArray(objArray);
                if (trim) {
                    newV = StrUtil.trim(newV);
                }
                objArray[index] = newV;
                Row nRow = RowFactory.create(objArray);
                ls.add(nRow);
            }
            return ls.iterator();
        }, RowEncoder.apply(ds.schema()));


        return flatDs;
    }

}
