package com.swsc.ai.tasks;

import com.swsc.ai.config.SparkSqlConf;
import com.swsc.ai.constant.EnvConstant;
import com.swsc.ai.enums.TFEnum;
import com.swsc.ai.taskfactory.Task;
import org.apache.spark.sql.SparkSession;

import java.util.HashMap;
import java.util.Map;

/**
 * @describe: 写数据到MySQL的模板任务
 * @author: 容若
 * @created: 2023-12-05 16:39
 */
public abstract class WriteDataTempTask implements Task {

    @Override
    public void run(Map<String, String> configMap) throws Exception {
        Map<String, String> mySqlEnvMap = new HashMap<>();
        mySqlEnvMap.put("table", EnvConstant.MYSQL_TABLE);
        String isTest = configMap.getOrDefault("isTest", TFEnum.FALSE.getName());
        String className = configMap.getOrDefault("className", "test");
        /**
         * 任务数： 1：同业存单人群圈选  2：指数基金兴趣人群圈选 3：炒股兴趣人群圈选
         */
        String taskNum = configMap.getOrDefault("taskNum", "1");
        String hdfsPath = configMap.getOrDefault("hdfsPath", "test");
        if (TFEnum.TRUE.equals(TFEnum.fromTypeName(isTest))) {
            mySqlEnvMap.put("url", EnvConstant.MYSQL_URL_DEV);
            mySqlEnvMap.put("driver", EnvConstant.MYSQL_DRIVER_DEV);
            mySqlEnvMap.put("user", EnvConstant.MYSQL_USER_DEV);
            mySqlEnvMap.put("password", EnvConstant.MYSQL_PASSWORD_DEV);
            SparkSqlConf.envMap.put("SPARK", EnvConstant.SPARK_ENV_DEV);
        } else {
            mySqlEnvMap.put("url", EnvConstant.MYSQL_URL_PROD);
            mySqlEnvMap.put("driver", EnvConstant.MYSQL_DRIVER_PROD);
            mySqlEnvMap.put("user", EnvConstant.MYSQL_USER_PROD);
            mySqlEnvMap.put("password", EnvConstant.MYSQL_PASSWORD_PROD);
            SparkSqlConf.envMap.put("SPARK", EnvConstant.SPARK_ENV_PROD);
        }
        SparkSession session = SparkSqlConf.getSession(className);
        writeDataToMySql(session, mySqlEnvMap, hdfsPath, taskNum);
    }

    public abstract void writeDataToMySql(SparkSession session, Map<String, String> mySqlEnvMap, String hdfsPath, String taskNum);
}
