package com.cl.spark.nodenew;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.springframework.stereotype.Component;
import scala.Tuple2;

import java.util.List;

@Component
public class JoinNode extends BaseSparkNode {
//    @Override
//    public JSONArray getFields(SparkParam sparkParam) {
//        JSONObject expression = sparkParam.getNodeExpression();
//
//        JSONArray fields = new JSONArray();
//        String joinTypeStr = "";
//        if (expression != null) {
//            joinTypeStr = expression.getString("joinType");
//        }
//        if ("左连接".equals(joinTypeStr)) {
//            fields.addAll(sparkParam.getFieldsList().get(0));
//        } else if ("右连接".equals(joinTypeStr)) {
//            fields.addAll(sparkParam.getFieldsList().get(1));
//        } else {
//            fields.addAll(sparkParam.getFieldsList().get(0));
//            fields.addAll(sparkParam.getFieldsList().get(1));
//        }
//        return fields;
//    }

    @Override
    public SparkResult process(SparkParam sparkParam) {
        JSONArray res = new JSONArray();

        JSONObject expression = sparkParam.getNodeExpression();
        String joinTypeStr = expression.getString("joinType");
        String joinType = "inner";
        if ("左连接".equals(joinTypeStr)) {
            joinType = "leftouter";
        } else if ("右连接".equals(joinTypeStr)) {
            joinType = "rightouter";
        }

        JSONArray fields1 = expression.getJSONArray("fields1");
        JSONArray fields2 = expression.getJSONArray("fields2");
        String field1 = fields1.getString(0);
        String field2 = fields2.getString(0);

        List<JSONArray> dataList = sparkParam.getDataList();
//        JSONArray dataHdfsPathList = param.getJSONArray("dataHdfsPathList");

        JSONArray data1 = dataList.get(0);
        JSONArray data2 = dataList.get(1);
//        if (data1.isEmpty()) {
//            return SparkResult.success(data2.fluentAddAll(data1));
//        } else if (data2.isEmpty()) {
//            return SparkResult.success(data1.fluentAddAll(data2));
//        }

//        if (!data1.isEmpty()) {
//            JSONObject item1 = data1.getJSONObject(0);
//            if (!item1.containsKey(field1)) {
//                String temp = field1;
//                field1 = field2;
//                field2 = temp;
//            }
//        } else if (!data2.isEmpty()) {
//            JSONObject item1 = data2.getJSONObject(0);
//            if (!item1.containsKey(field2)) {
//                String temp = field2;
//                field2 = field1;
//                field1 = temp;
//            }
//        } else {
//            return SparkResult.success(res);
//        }
        Dataset<Row> dataset1 = sparkUtil.jsonArrayToDataset(data1);
        Dataset<Row> dataset2 = sparkUtil.jsonArrayToDataset(data2);
        Dataset<Tuple2<Row, Row>> tuple2Dataset;
        try {
            tuple2Dataset = dataset1.joinWith(dataset2, dataset1.col(field1).equalTo(dataset2.col(field2)), joinType);
        } catch (Exception e) {
            tuple2Dataset = dataset1.joinWith(dataset2, dataset1.col(field2).equalTo(dataset2.col(field1)), joinType);
            e.printStackTrace();
        }

        List<Tuple2<Row, Row>> tuple2List = tuple2Dataset.collectAsList();
        tuple2List.forEach(tuple2 -> {

            Row tupleItem1 = tuple2._1();
            Row tupleItem2 = tuple2._2();
            JSONObject item1 = JSON.parseObject(tupleItem1.json());
            JSONObject item2 = JSON.parseObject(tupleItem2.json());
            if ("右连接".equals(joinTypeStr)) {
                JSONObject temp = item1;
                item1 = item2;
                item2 = temp;
            }
            item2.putAll(item1);
            res.add(item2);
        });
//        String path = "/temp/join-" + System.currentTimeMillis() + "-" + UUID.randomUUID() + ".json";
//        sparkUtil.saveJSONArray(path, res);

        return SparkResult.success(res);
    }

    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.JOIN;
    }
}
