package com.calabar.phm.etl.operator.api;

import org.apache.spark.sql.*;
import org.apache.spark.sql.catalyst.expressions.MonotonicallyIncreasingID;

import java.io.Serializable;

/**
 * @Author zmc <mingcheng.zhang@cdcalabar.com>
 * @Date 17-5-18 下午3:15
 * @Version v1.0
 * @Des 算子基类
 */
public abstract class ITransformer implements Serializable {
    private final String uid; //算子标识
    private final String paramsJson;//算子参数
    private static final String PERSIST_ORDER_COL = "persist_order_col";

    /**
     * @param uid        算子标识
     * @param paramsJson 算子参数
     */
    public ITransformer(String uid, String paramsJson) {
        this.uid = uid;
        this.paramsJson = paramsJson;
    }

    /**
     * 验证算子参数
     *
     * @param
     * @throws Exception
     */
    public abstract void validateParams() throws Exception;

    /**
     * 获取算子标识
     *
     * @return
     */
    public String getUid() {
        return uid;
    }

    /**
     * 获取算子参数
     *
     * @return
     */
    public String getParamsJson() {
        return paramsJson;
    }

    /**
     * 持久化数据读取功能
     *
     * @param session
     * @param hiveTableName
     * @return
     * @throws
     */
    public Dataset<Row> readPersistData(SparkSession session, String hiveTableName) throws Exception {
        Dataset<Row> dataset = null;
        try {
            //按照递增序列，重新排序，还原数据持久化之前的顺序。
            dataset = session.table(hiveTableName).sort(PERSIST_ORDER_COL).drop(PERSIST_ORDER_COL);
        } catch (Exception ex) {
            throw new Exception("读取持久化数据错误！", ex);
        }
        return dataset;
    }

    /**
     * 持久化数据写入功能c`
     *
     * @param dataset
     * @param hiveTableName
     * @throws
     */
    public void writePersistData(Dataset<Row> dataset, String hiveTableName) throws Exception {
        try {
            //增加递增序列，保证数据的顺序（目前在spark2.1.1中，paquet文件持久化不支持排序）。
            dataset.withColumn(PERSIST_ORDER_COL, new Column(new MonotonicallyIncreasingID()))
                    .write()
                    .saveAsTable(hiveTableName);
        } catch (Exception ex) {
            throw new Exception("写出持久化数据错误！", ex);
        }
    }

}
