package avicit.bdp.dds.server.utils;

import avicit.bdp.common.dto.StorageResourceConf;
import avicit.bdp.core.enums.EngineResourceTypeEnum;
import avicit.bdp.common.utils.SpringApplicationContext;
import avicit.bdp.common.utils.uploads.FileAdapterUtils;
import avicit.bdp.dds.dao.entity.UdfFunc;
import avicit.bdp.dds.service.process.ProcessService;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.MapUtils;
import org.slf4j.Logger;

import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;


/**
 * udf utils
 */
public class UDFUtils {

    /**
     * create function format
     */
    private static final String CREATE_FUNCTION_FORMAT = "create temporary function {0} as ''{1}''";

    private static final ProcessService processService = SpringApplicationContext.getBean(ProcessService.class);

    /**
     * create function list
     *
     * @param udfFuncTenantCodeMap key is udf function,value is tenant code
     * @param logger               logger
     * @return create function list
     */
    public static List<String> createFuncs(Map<UdfFunc, String> udfFuncTenantCodeMap, String projectId, Logger logger) {

        if (MapUtils.isEmpty(udfFuncTenantCodeMap)) {
            logger.info("can't find udf function resource");
            return null;
        }
        List<String> funcList = new ArrayList<>();

        // build jar sql
        buildJarSql(funcList, udfFuncTenantCodeMap, projectId);

        // build temp function sql
        buildTempFuncSql(funcList, udfFuncTenantCodeMap.keySet().stream().collect(Collectors.toList()));

        return funcList;
    }

    /**
     * build jar sql
     *
     * @param sqls                 sql list
     * @param udfFuncTenantCodeMap key is udf function,value is tenant code
     */
    private static void buildJarSql(List<String> sqls, Map<UdfFunc, String> udfFuncTenantCodeMap, String projectId) {
        //String uploadPath = HadoopUtils.getInstance().getConfiguration().get(Constants.FS_DEFAULTFS);
        StorageResourceConf conf = FileAdapterUtils.getStorageResourceByProjectId(projectId);
        String uploadPath = conf.getDeFaultFs();
        if (EngineResourceTypeEnum.OSS.getCode() == conf.getType()) {
            uploadPath = conf.getEndPoint() + "/" + conf.getStoragePath() + "/";
        }
        Set<Map.Entry<UdfFunc, String>> entries = udfFuncTenantCodeMap.entrySet();
        for (Map.Entry<UdfFunc, String> entry : entries) {
            sqls.add(String.format("add jar %s%s", uploadPath, entry.getKey().getFileName())); //.getResourceName()));
        }

    }

    /**
     * build temp function sql
     *
     * @param sqls     sql list
     * @param udfFuncs udf function list
     */
    private static void buildTempFuncSql(List<String> sqls, List<UdfFunc> udfFuncs) {
        if (CollectionUtils.isNotEmpty(udfFuncs)) {
            for (UdfFunc udfFunc : udfFuncs) {
                sqls.add(MessageFormat.format(CREATE_FUNCTION_FORMAT, udfFunc.getFuncName(), udfFunc.getClassName()));
            }
        }
    }

}
