package com.cl.spark.node;

import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import com.cl.spark.util.SparkUtil;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;

import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;


@Component
public class DataSourceNode extends BaseSparkNode {

    @Autowired
    SparkSession sparkSession;
    @Value("${spark.query.batchSize:500000}")
    Integer sparkQueryBatchSize;

    @Override
    public SparkResult process(SparkParam sparkParam) {
        JSONObject expression = sparkParam.getNodeExpression();
        String tableName = String.valueOf(expression.get("targetTable"));
        String dbCode = String.valueOf(expression.get("dbCode"));

        String url = expression.getString("url");
        String username = expression.getString("username");
        String password = expression.getString("password");
        String skip = expression.getString("skip");
        String limit = expression.getString("limit");
        String fieldsAs = expression.getString("fieldsAs");

        String select = "select *,'" + tableName + "' as TABLE_NAME,'" + dbCode + "' as DB_CODE " + " from " + tableName;
        if (StringUtils.hasLength(fieldsAs)) {
            select = "select " + fieldsAs + ",'" + tableName + "' as TABLE_NAME,'" + dbCode + "' as DB_CODE " + " from " + tableName;
        }
        int batchSize = sparkQueryBatchSize;
        Dataset<Row> dataset = null;
        if (limit != null && !limit.isEmpty()) {
            if (Integer.parseInt(limit) > batchSize) {
                int total = Integer.parseInt(limit);
                if (skip == null || skip.isEmpty()) {
                    skip = "0";
                }
                for (int i = Integer.parseInt(skip); i < total; i += batchSize) {
                    String dbTable = select + " limit " + i + "," + batchSize;
                    if (dataset == null) {
                        dataset = sparkSession.read()
                                .format("jdbc")
                                .option("url", url)
                                .option("dbtable", "( " + dbTable + " ) as temp_" + tableName)
                                .option("user", username)
                                .option("password", password)
                                .load();
                    } else {
                        dataset = dataset.unionAll(sparkSession.read()
                                .format("jdbc")
                                .option("url", url)
                                .option("dbtable", "( " + dbTable + " ) as temp_" + tableName)
                                .option("user", username)
                                .option("password", password)
                                .load());
                    }
                }
            } else if (Integer.parseInt(limit) == 0) {
                String dbTable = select + " limit 0";
                dataset = sparkSession.read()
                        .format("jdbc")
                        .option("url", url)
                        .option("dbtable", "( " + dbTable + " ) as temp_" + tableName)
                        .option("user", username)
                        .option("password", password)
                        .load();
            } else {
                String dbTable = select + " limit " + skip + "," + batchSize;
                System.out.println(dbTable);
                dataset = sparkSession.read()
                        .format("jdbc")
                        .option("url", url)
                        .option("dbtable", "( " + dbTable + " ) as temp_" + tableName)
                        .option("user", username)
                        .option("password", password)
                        .load();
            }

        } else {
            String dbTable = select + " limit " + skip + "," + batchSize;
            System.out.println(dbTable);
            dataset = sparkSession.read()
                    .format("jdbc")
                    .option("url", url)
                    .option("dbtable", "( " + dbTable + " ) as temp_" + tableName)
                    .option("user", username)
                    .option("password", password)
                    .load();
        }

        Dataset<Row> stringDataset = SparkUtil.toStringDataset(dataset);

        return SparkResult.success(stringDataset, tableName);

    }

    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.DATA_SOURCE;
    }


}
