package com.swsc.ai.tasks;

import com.swsc.ai.config.SparkSqlConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @describe: 基金为主体数据计算
 * @author: DuanCXin
 * @created: 2023-10-20 16:34
 */
public class FundEntityTask extends WideTableTempTask {

    @Override
    public void createView(SparkSession session, String... whereStr) {
        long startTime = System.currentTimeMillis();
        Dataset<Row> mf_archives_df = SparkSqlConf.getDataByCompleteSQL(session, "select innercode, maincode, companycode, typecode, " +
                "fundnatureid, investmenttypecode,investstylecode, fundtypecode, floattypecode, ceil(sum(foundedsize)/1000000) as foundedsize, " +
                "investadvisorcode from mf_archives where updatestate <= 1 GROUP BY maincode");
        mf_archives_df.createOrReplaceTempView("mf_archives");

        Dataset<Row> mf_fundrisklevel_df = SparkSqlConf.getDataBySql(session, "mf_fundrisklevel", "1 = 1", "InnerCode",
                "RiskLevel", "EndDate");
        mf_fundrisklevel_df.createOrReplaceTempView("mf_fundrisklevel");

        Dataset<Row> mf_fundmanager_df = SparkSqlConf.getDataBySql(session, "mf_fundmanager", "updatestate <= 1", "InnerCode",
                "incumbent", "postnamecode","personalcode", "updatetime");
        mf_fundmanager_df.createOrReplaceTempView("mf_fundmanager");

        Dataset<Row> mf_personalinfo_df = SparkSqlConf.getDataBySql(session, "mf_personalinfo", "updatestate <= 1", "personalcode",
                "gendercode", "nationalitycode","birthdate","educationcode","experiencetime");
        mf_personalinfo_df.createOrReplaceTempView("mf_personalinfo");

        Dataset<Row> mf_fmretandscalerank_df = SparkSqlConf.getDataByCompleteSQL(session, "SELECT personalcode, returntypeavg, returntyperank, monrettypeavg, monrettyperank,\n" +
                "             totalaumtypeavg, totalaumrank, avgaumtypeavg,\n" +
                "             avgaumtyperank, enddate FROM mf_fmretandscalerank  \n" +
                "WHERE enddate >= DATE_SUB('"+whereStr[0]+"', INTERVAL 1 MONTH)");
        mf_fmretandscalerank_df.createOrReplaceTempView("mf_fmretandscalerank");

        Dataset<Row> mf_archivesattach_df = SparkSqlConf.getDataBySql(session, "mf_archivesattach", "updatestate <= 1", "innercode","datacode");
        mf_archivesattach_df.createOrReplaceTempView("mf_archivesattach");

        Dataset<Row> t_prod_buy_info_df = SparkSqlConf.getDataBySql(session, "t_prod_buy_info", "prod_name LIKE '%同业存单%'", "prod_code","prod_name");
        t_prod_buy_info_df.createOrReplaceTempView("t_prod_buy_info");

        long endTime = System.currentTimeMillis();
        long totalTime = endTime - startTime;
        System.out.println("创建独立视图成功！运行时间： " + totalTime + " 毫秒");
    }

    @Override
    public void calculateResult(SparkSession session, String dt, String fileName) {
        Dataset<Row> sqlDF = session.sql("select ma.*, mas.datacode, mrl.RiskLevel, mfm.personalcode, mpi.gendercode, mpi.nationalitycode,mpi.age,\n" +
                "       mpi.educationcode,mpi.experiencetime,mft.returntypeavg,\n" +
                "       mft.returntyperank, mft.monrettypeavg, mft.monrettyperank,\n" +
                "       mft.totalaumtypeavg, mft.totalaumrank, mft.avgaumtypeavg,\n" +
                "       mft.avgaumtyperank, mft.enddate, tb.prod_name\n" +
                "from(\n" +
                "     select innercode, maincode, companycode, typecode, fundnatureid, investmenttypecode,\n" +
                "            investstylecode, fundtypecode, floattypecode, foundedsize, investadvisorcode\n" +
                "     from mf_archives\n" +
                ") as ma\n" +
                "left join (\n" +
                "    select innercode, concat_ws('|', collect_set(datacode)) as datacode from mf_archivesattach group by innercode\n" +
                ") as mas\n" +
                "on mas.innercode = ma.innercode\n" +
                "left join (\n" +
                "    select t.InnerCode, t.RiskLevel from\n" +
                "        (select InnerCode, RiskLevel, ROW_NUMBER() OVER (PARTITION BY InnerCode ORDER BY EndDate DESC) AS row_num from mf_fundrisklevel) as t\n" +
                "    WHERE t.row_num = 1\n" +
                ") as mrl\n" +
                "on ma.innercode = mrl.InnerCode\n" +
                "left join (\n" +
                "    select t.innercode, t.personalcode from\n" +
                "        (select innercode, incumbent, postnamecode,personalcode, ROW_NUMBER() OVER (PARTITION BY innercode ORDER BY updatetime DESC) AS row_num from mf_fundmanager) as t\n" +
                "    WHERE t.row_num = 1 and t.incumbent = 1 and t.postnamecode = 1\n" +
                ") as mfm\n" +
                "on mfm.innercode = ma.innercode\n" +
                "left join (\n" +
                "    select\n" +
                "        personalcode, gendercode, nationalitycode,\n" +
                "        TIMESTAMPDIFF(YEAR, birthdate, NOW()) AS age,\n" +
                "        educationcode,experiencetime\n" +
                "    from mf_personalinfo\n" +
                ") as mpi\n" +
                "on mpi.personalcode = mfm.personalcode \n" +
                "left join (\n" +
                "    select t.* from\n" +
                "        (select\n" +
                "             personalcode, returntypeavg, returntyperank, monrettypeavg, monrettyperank,\n" +
                "             totalaumtypeavg, totalaumrank, avgaumtypeavg,\n" +
                "             avgaumtyperank, enddate, ROW_NUMBER() OVER (PARTITION BY personalcode ORDER BY enddate DESC) AS row_num from mf_fmretandscalerank) as t\n" +
                "    WHERE t.row_num = 1\n" +
                ") as mft\n" +
                "on mft.personalcode = mfm.personalcode \n" +
                "left join (SELECT distinct prod_code, 'NCD' as prod_name FROM t_prod_buy_info) as tb on tb.prod_code = ma.maincode");
        // 写入HDFS文件
        sqlDF.coalesce(1).write()
                .format("csv")
                .option("header", "true")
                .option("mode", "overwrite")
                .save(fileName);
    }
}
