package com.cl.spark.node;

import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import com.cl.pile.rule.process.RuleMatcher;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.*;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

@Component
public class RuleNode extends BaseSparkNode {

  @Override
  public SparkResult process(SparkParam sparkParam) {

    Dataset<Row> dataset = sparkParam.getSparkResultList().get(0).getDataset();
    JSONObject expressionObj = sparkParam.getNodeExpression();
    String ruleExpression = expressionObj.getString("ruleExpression");

    Dataset<Row> newDataset =
        dataset
            .withColumn("msg", functions.lit(""));
    Dataset<Row> mapDataset =
        newDataset.map(
            (MapFunction<Row, Row>)
                row -> {
                  List<String> errorList = new ArrayList<>();
                  List<String> tipsList = new ArrayList<>();
                  JSONObject cpJson = new JSONObject();
                  String msg = "";
                  boolean match =
                      RuleMatcher.execMatchRule(
                          JSONObject.parseObject(row.json()),
                          JSONObject.parseObject(ruleExpression),
                          cpJson,
                          errorList,
                          tipsList);
                  if (!errorList.isEmpty()) {
                    msg = String.join(",", errorList);
                  } else if (!tipsList.isEmpty()) {
                    msg = String.join(",", tipsList);
                  }
                  List<Object> values = new ArrayList<>();
                  for (int i = 0; i < row.length() - 1; i++) {
                    values.add(row.get(i));
                  }
                  values.add(" " + msg);
//                  values.add(String.valueOf(match));
//                  values.add(cpJson.toJSONString());

                  // 创建新的Row对象，包含原始字段和新字段
                  Row newRow = RowFactory.create(values.toArray(new Object[0]));

                  return newRow;
                },
            newDataset.encoder());

    Dataset<Row> filteredDF =
        mapDataset.filter(
            functions
                .col("msg")
                .isNotNull()
                .and(
                    functions
                        .col("msg")
                        .notEqual(" ")
                        .and(functions.col("msg").notEqual("")) // 过滤掉 msg 为 null 的记录
                    )); // 过滤掉 msg 为空字符串的记录;

    return SparkResult.success(filteredDF);
  }

  @Override
  public SparkNodeEnum getType() {
    return SparkNodeEnum.RULE_HANDLE;
  }
}
