package com.navinfo.tripanalysis.offline.service.impl;

import com.navinfo.tripanalysis.common.util.CommonUtils;
import com.navinfo.tripanalysis.offline.pojo.LoadDataParam;
import com.navinfo.tripanalysis.offline.util.BigDataUtils;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.math.BigDecimal;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;

/**
 * 从hive表加载数据的抽象类
 * @author web
 */
@Data
public abstract class AbstractHiveLoadService {
    protected final Logger logger = LoggerFactory.getLogger(getClass());
    /**
     * 加载类型
     */
    protected BigDataUtils.LoadType loadType;
    /**
     * hive表对应的hdfs表根目录
     */
    protected String hdfsPath;
    /**
     * hive库名
     */
    protected String dbName;
    /**
     * hive表名
     */
    protected String tableName;

    /**
     * 加载数据
     * @param spark
     * @param param
     * @return
     */
    protected JavaRDD<Row> loadData(SparkSession spark, LoadDataParam param) {
        long startTime = System.currentTimeMillis();
        logger.error("通过Hive加载数据，loadType:{}", loadType);

        //默认或配置使用Hive加载
        if(null==loadType || BigDataUtils.LoadType.HIVE.equals(loadType)){
            List<String> selectList = getSelectColumns();
            StringBuilder sb = new StringBuilder()
                    .append(String.format("select %s ", null!=selectList ? CommonUtils.mkString(selectList,",") :"*" ))
                    .append("\n")
                    .append(" from ")
                    .append(StringUtils.isNotEmpty(dbName) ? dbName+"." : "")
                    .append(tableName)
                    .append("\n")
                    .append(" where ")
                    .append(getWhereSql(param));

            String sql = sb.toString();
            logger.error("load data sql:-----------------\n{}", sql);

            JavaRDD<Row> rowJavaRDD = spark.sql(sql).toJavaRDD();

            logger.error("通过Hive加载数据结束，参数：{}，耗时{}ms", param, System.currentTimeMillis()-startTime);
            return rowJavaRDD;
        }

        //使用HDFS加载
        else{
            if(null != param){
                List<String> realPaths = getHDFSRealPaths(param);
                if (null!=realPaths && realPaths.size() > 0) {
                    Dataset<Row> dataSet = spark.read().parquet(realPaths.toArray(new String[0]));

                    String whereStr = getHDFSWhereSql(param);
                    logger.info("HDFS过滤条件为：{}", whereStr);

                    List<String> selectList = getSelectColumns();
                    if(null!=selectList && selectList.size()>0){
                        List<Column> columns = selectList.stream().map(e -> new Column(e)).collect(Collectors.toList());
                        return dataSet.select(columns.toArray(new Column[0])).where(whereStr).toJavaRDD();
                    }else{
                        return dataSet.where(whereStr).toJavaRDD();
                    }
                }else{
                    logger.error("load data hdfs path is null...-----------------\n{}");
                }
            }

            logger.error("通过Hive加载数据结束，参数：{}，耗时{}ms", param, System.currentTimeMillis()-startTime);
            return null;
        }
    }

    /**
     * 获取Row中colName字段为Int值
     * @param row
     * @param colName
     * @return
     */
    protected int getInt(Row row, String colName){
        return Integer.parseInt(Optional.ofNullable(row.getAs(colName)).orElse("0").toString());
    }
    /**
     * 获取Row中colName字段为Long值
     * @param row
     * @param colName
     * @return
     */
    protected long getLong(Row row, String colName){
        return Long.parseLong(Optional.ofNullable(row.getAs(colName)).orElse("0").toString());
    }

    /**
     * 获取Row中colName字段为Float值
     * @param row
     * @param colName
     * @return
     */
    protected float getFloat(Row row, String colName){
        return Float.parseFloat(Optional.ofNullable(row.getAs(colName)).orElse("0.0").toString());
    }

    /**
     * 获取Row中colName字段为Double值
     * @param row
     * @param colName
     * @return
     */
    protected double getDouble(Row row, String colName){
        return Double.parseDouble(Optional.ofNullable(row.getAs(colName)).orElse("0.0").toString());
    }

    /**
     * 获取Row中colName字段为Double值
     * @param row
     * @param colName
     * @return
     */
    protected BigDecimal getBigDecimal(Row row, String colName){
        return new BigDecimal(Optional.ofNullable(row.getAs(colName)).orElse("0.0").toString());
    }
    /**
     * 获取select字段<br/>
     * 必须实现
     * @return
     */
    protected abstract List<String> getSelectColumns();

    /**
     * 获取通过hive方式加载数据时的Where条件<br/>
     * 必须实现
     * @return
     */
    protected abstract String getWhereSql(LoadDataParam param);

    /**
     * 使用hdfs文件方式加载数据时的路径<br/>
     * 不实现可为空
     * @return
     */
    protected List<String> getHDFSRealPaths(LoadDataParam param){
        return null;
    }

    /**
     * 使用hdfs文件方式加载数据时的where条件<br/>
     * 不实现可为空
     * @return
     */
    protected String getHDFSWhereSql(LoadDataParam param){
        return "1=1";
    }

}
