package com.bleeth.flow.step.input;


import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.ArrayUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.extra.ssh.JschUtil;
import com.bleeth.flow.core.common.APlugin;
import com.bleeth.flow.core.common.PluginAnnotation;
import com.bleeth.flow.core.common.PluginTypeEnum;
import com.bleeth.flow.core.util.SparkUtil;
import com.jcraft.jsch.Session;
import com.jd.platform.async.wrapper.WorkerWrapper;
import lombok.Data;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;

import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * @author ：Bleeth
 * @date ：2021-08-05
 * @description：读取shell命令中的数据
 */
@Data
@PluginAnnotation(name = "自定义聚合",
        type = PluginTypeEnum.AGG,
        description = "",
        id = "ShellReaderPlugin")
public class ShellReaderPlugin extends APlugin {


    private String host;
    private String password;

    private String username;
    private Integer port;

    private String command;

    private List<String> useCommandList;

    @Override
    public Dataset<Row> action(Dataset<Row> param, Map<String, WorkerWrapper> allWrappers) {
        super.action(param, allWrappers);


        Session session = null;
        try {
            session = JschUtil.openSession(host, port, username, password, 5 * 1000);
        } catch (Exception exception) {
            return null;
        }
        if (!useCommandList.contains(command)) {
            return null;
        }

        String result = JschUtil.exec(session, command + " |awk '{print $}'",
                Charset.forName("utf8"));


        List<String> headerList =  CollUtil.newArrayList();
        List<String> strRowList =  CollUtil.newArrayList();
        //分析记录数


        List<Row> rowLineList = new ArrayList<>();


        //读取记录


        Row row = RowFactory.create(ArrayUtil.toArray(strRowList, String.class));
        rowLineList.add(row);


        //第一行作为表头
        List schemaFields = new ArrayList();
        for (Object headerObj : headerList) {
            String header = (String) headerObj;
            schemaFields.add(DataTypes.createStructField(header, DataTypes.StringType, true));
        }
        StructType schema = DataTypes.createStructType(schemaFields);

        Dataset<Row> ds = SparkUtil.getSparkInstance().createDataFrame(rowLineList, schema);
        return ds;
    }


    /**
     * 查询一个命令前n行结果的第m列的所有数据
     *
     * @param command   命令
     * @param rowSize   前rowSize行
     * @param cellIndex 列索引，从1开始
     * @return 列号对应的数据
     */
    public static List<String> queryRow(Session session, String command, int rowSize, int cellIndex) {
        String headCommand = StrUtil.format("head -{}", rowSize);
        String awkCommand = StrUtil.format("awk '{print ${}}'", cellIndex);
        String result = JschUtil.exec(session, command + "|" + awkCommand + "|" + headCommand, Charset.forName("utf-8"));
        List<String> spList = StrUtil.splitTrim(result, "\n");
        return spList;
    }

    public static List<String> queryCell(Session session, String command, int cellIndex) {

        return CollUtil.newArrayList();
    }


    public static List<List<String>> queryCommand(Session session, String command) {

        return CollUtil.newArrayList();
    }
}
