package com.swsc.ai.tasks;

import com.swsc.ai.enums.TFEnum;
import com.swsc.ai.taskfactory.Task;
import com.swsc.ai.util.HDFSUtil;
import com.swsc.ai.util.SparkUtil;
import org.apache.spark.sql.SparkSession;

import java.sql.SQLException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.Map;

/**
 * @describe: 召回策略的筛选模板
 * @author: DuanCXin
 * @created: 2023-11-29 16:25
 */
public abstract class RecallTemplateTask implements Task {
    @Override
    public void run(Map<String, String> map) throws Exception {
        String projectName = map.getOrDefault("projectName", "ai_marketing");
        String className = map.getOrDefault("className", "test");
        String taskName = map.getOrDefault("taskName", "test");
        String subTaskName = map.getOrDefault("subTaskName", "test");
        String dt = map.getOrDefault("dt", LocalDate.now().format(DateTimeFormatter.ofPattern("yyyyMMdd")));
        String isTest = map.getOrDefault("isTest", TFEnum.FALSE.getName());
        /**
         * 任务序号 1：产品购买可能性–用于树模型 或 2：GBDT+LR
         */
        String taskNum = map.getOrDefault("taskNum", "1");
        /**
         * 获取画像宽表全路径
         */
        String portraitPath = map.getOrDefault("portraitPath", "test");
        String fileName = HDFSUtil.getOutHDFSPath(projectName, taskName, subTaskName, dt);
        if (TFEnum.TRUE.equals(TFEnum.fromTypeName(isTest))) {
            fileName = HDFSUtil.getTestOutHDFSPath(className, subTaskName, dt);
        }
        SparkSession session = SparkUtil.getSession(isTest, className);
        createView(session, dt, portraitPath);
        calculateEstimationResult(session, fileName, taskNum);
        if (null != session){
            session.stop();
        }
    }

    /**
     * 创建独立视图
     * @param session
     * @param str
     */
    public abstract void createView(SparkSession session, String... str) throws SQLException;

    /**
     * 计算预估特征
     * @param session
     * @param condition
     */
    public abstract void calculateEstimationResult(SparkSession session, String... condition);
}
