package com.swsc.ai.tasks;

import com.swsc.ai.enums.RecallTaskEnum;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.SaveMode;
import java.util.Map;
import java.util.Properties;
import java.util.stream.IntStream;

/**
 * @describe: 人群圈选标签写入MySQL
 * @author: 容若
 * @created: 2023-12-05 16:37
 */
public class WriteDataToMySqlTask extends WriteDataTempTask{
    @Override
    public void writeDataToMySql(SparkSession session, Map<String, String> mySqlEnvMap, String hdfsPath, String taskNum) {
//        hdfsPath = "file:///C:\\\\\\\\recall.csv";
        Dataset<Row> dataset = createRealView(session, hdfsPath);
        String randStr = String.valueOf((int) ((Math.random() * 9 + 1) * Math.pow(10, 5)));
        dataset = dataset.withColumn("group_name", functions.lit(RecallTaskEnum.taskName(taskNum)));
        dataset = dataset.withColumn("group_id",  functions.lit(randStr));
        dataset = dataset.withColumn("creater",  functions.lit("sparkTask"));
        dataset = dataset.withColumn("updater",  functions.lit("sparkTask"));
        dataset = dataset.withColumn("del_flag",  functions.lit(1));
        dataset = dataset.withColumn("created_time",  functions.now());
        dataset = dataset.withColumn("update_time",  functions.now());
        int[] array = IntStream.range(0, 10).toArray();
        dataset = dataset.withColumn("dummy", functions.explode(functions.lit(array))).drop(functions.col("dummy"));
        Properties properties = new Properties();
        properties.setProperty("user",mySqlEnvMap.get("user"));
        properties.setProperty("password",mySqlEnvMap.get("password"));
        properties.setProperty("driver",mySqlEnvMap.get("driver"));
        String url = mySqlEnvMap.get("url");
        String table = mySqlEnvMap.get("table");
        dataset.write().mode(SaveMode.Append)
                .jdbc(url, table, properties);
    }

    private Dataset<Row> createRealView(SparkSession session, String hdfsPath) {
        Dataset<Row> wideTableDF = null;
        try {
            wideTableDF = session.read().format("csv").option("header", "true").load(hdfsPath);
        } catch (Exception e) {
            System.err.println("未找到有效的宽表数据，查找路径为：" + hdfsPath);
        }
        return wideTableDF;
    }
}
