package com.cl.spark.node;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import org.apache.spark.sql.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import scala.collection.JavaConverters;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

@Component
public class StringNode extends BaseSparkNode {
    @Autowired
    SparkSession sparkSession;

    @Override
    public SparkResult process(SparkParam sparkParam) {
        Dataset<Row> dataset = sparkParam.getSparkResultList().get(0).getDataset();
        JSONObject expression = sparkParam.getNodeExpression();

        JSONArray fields = expression.getJSONArray("fields");
        String field = fields.getString(0);
//        String field = expression.getString("field");
        String filterFunction = expression.getString("filterFunction");
        String filterValue = expression.getString("filterValue");

        Dataset<Row> filter = dataset.filter(generateFilterColumn(field, filterFunction, filterValue));
        return SparkResult.success(filter);

    }

    public Column generateFilterColumn(String field, String filterFunction, String filterValue) {
        if (!StringUtils.hasLength(field) || !StringUtils.hasLength(filterFunction)) {
            throw new RuntimeException(String.format("field: %s function %s value %s", field, filterFunction, filterValue));
        }
        Column column = new Column(field);
        Column filterValueLit = functions.lit(filterValue);
        switch (filterFunction) {
            case "大于":
                column = column.gt(filterValueLit);
                break;
            case "小于":
                column = column.lt(filterValueLit);
                break;
            case "包含":
                column = column.like("%" + filterValue + "%");
                break;
            case "等于":
                column = column.equalTo(filterValueLit);
                break;
            case "不等于":
                column = column.notEqual(filterValueLit);
                break;
            case "大于等于":
                column = column.geq(filterValueLit);
                break;
            case "小于等于":
                column = column.leq(filterValueLit);
                break;
            case "列表包含(或)":
                column = column.rlike(filterValue.replace(",","|"));
                break;
        }
        return column;
    }

    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.STRING;
    }
}
