package com.swsc.ai.tasks;

import com.swsc.ai.config.SparkSqlConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.net.URL;
import java.sql.SQLException;

/**
 * @describe:
 * @author: 容若
 * @created: 2023-12-28 14:18
 */
public class MyForecast extends ForecastTempTask {
    @Override
    public void createView(SparkSession session, String... str) throws SQLException {
        Dataset<Row> realView = createRealView(session, str[2]);
        realView.createOrReplaceTempView("realView");
        Dataset<Row> portrait = createRealView(session, str[0]);
//        Dataset<Row> portrait = createRealView(session, "file:///C:\\\\test.csv");
        portrait.createOrReplaceTempView("portraitView");

        Dataset<Row> forecast = createTabRealView(session, str[1]);
//        Dataset<Row> forecast = createRealView(session, "file:///C:\\\\recall.csv");
        forecast.createOrReplaceTempView("forecast");
        String sqlStr = "SELECT cust_no, prod_code FROM t_prod_buy_info WHERE create_time >= '20231206' and prod_code != 'S00001'";
        Dataset<Row> data = SparkSqlConf.getDataByCompleteSQL(session, sqlStr);
        data.createOrReplaceTempView("buyProdInfo");
    }

    @Override
    public void calculateEstimationResult(SparkSession session, String... str) {
//        Dataset<Row> sqlDF = session.sql("select\n" +
//                "        label," +
//                "        avg(total_cms_y)  AS avg_total_cms_y,\n" +
//                "        stddev(total_cms_y)  AS stddev_total_cms_y,\n" +
//                "        avg(mfin_val)  AS avg_mfin_val,\n" +
//                "        stddev(mfin_val)  AS stddev_mfin_val,\n" +
//                "        count(*)  AS count_buy_product_risk_label,\n" +
//                "        avg(activity_y)  AS avg_activity_y,\n" +
//                "        stddev(activity_y)  AS stddev_activity_y,\n" +
//                "        avg(all_entrst_amt)  AS avg_all_entrst_amt,\n" +
//                "        stddev(all_entrst_amt)  AS stddev_all_entrst_amt,\n" +
//                "        avg(stock_position_rate_last_y)  AS avg_stock_position,\n" +
//                "        stddev(stock_position_rate_last_y)  AS stddev_stock_position,\n" +
//                "        avg(total_ast_peak)  AS avg_total_ast_peak,\n" +
//                "        stddev(total_ast_peak)  AS stddev_total_ast_peak,\n" +
//                "        avg(turnover_y)  AS avg_turnover_y,\n" +
//                "        stddev(turnover_y)  AS stddev_turnover_y,\n" +
//                "        avg(asset_outflow_rate)  AS avg_asset_outflow_rate,\n" +
//                "        stddev(asset_outflow_rate)  AS stddev_asset_outflow_rate,\n" +
//                "        avg(all_entrst_num)  AS avg_all_entrst_num,\n" +
//                "        stddev(all_entrst_num)  AS stddev_all_entrst_num,\n" +
//                "        stddev(age)  AS stddev_age,\n" +
//                "        avg(age)  AS avg_age,\n" +
//                "        count(*)  AS count_career_code\n" +
//                "from (\n" +
//                "   select p.*, r.label from (\n" +
//                "       select client_id,label from realView\n" +
//                "   ) as r\n" +
//                "   inner join (\n" +
//                "      select client_id,total_cms_y,age,mfin_val,buy_product_risk_label,activity_y,all_entrst_amt,\n" +
//                "             stock_position_rate_last_y,total_ast_peak,turnover_y,asset_outflow_rate,\n" +
//                "             all_entrst_num,career_code from portraitView\n" +
//                "  ) as p\n" +
//                "  on p.client_id = r.client_id\n" +
//                ") as t group by t.label");
//        sqlDF.coalesce(1).write()
//                .format("csv")
//                .option("header", "true")
//                .option("mode", "overwrite")
//                .save("/ai_marketing/test/count.csv");

        Dataset<Row> buyDF = session.sql("select (case\n" +
                "       when b.cust_no is null then 0\n" +
                "       else 1\n" +
                "   end) as isBuy,\n" +
                "   (case\n" +
                "       when f.score < 0.8 then 0\n" +
                "       else 1\n" +
                "   end) as isForecastBuy, r.client_id, r.label\n" +
                "from (\n" +
                "    select client_id,label from realView\n" +
                ") as r\n" +
                "left join (\n" +
                "    select client_id,score from forecast\n" +
                ") as f\n" +
                "on r.client_id = f.client_id\n" +
                "left join (\n" +
                "    select cust_no, prod_code FROM buyProdInfo\n" +
                ") as b\n" +
                "on r.client_id = b.cust_no");
        buyDF.createOrReplaceTempView("buyDF");
        Dataset<Row> resDF = session.sql("SELECT isBuy, isForecastBuy,null as label, count(*) as total FROM buyDF GROUP BY isBuy, isForecastBuy");
        Dataset<Row> resLabelDF = session.sql("SELECT isBuy, isForecastBuy,label,count(*) as total FROM buyDF GROUP BY isBuy, isForecastBuy,label");
        Dataset<Row> resultDF = resDF.unionAll(resLabelDF);
        resultDF.coalesce(1).write()
                .format("csv")
                .option("header", "true")
                .option("mode", "overwrite")
                .save("/ai_marketing/test/buy.csv");
    }

    private Dataset<Row> createRealView(SparkSession session, String hdfsPath) {
        Dataset<Row> wideTableDF = null;
        try {
            wideTableDF = session.read().format("csv").option("header", "true").load(hdfsPath);
        } catch (Exception e) {
            System.err.println("未找到有效的宽表数据，查找路径为：" + hdfsPath);
        }
        return wideTableDF;
    }

    private Dataset<Row> createTabRealView(SparkSession session, String hdfsPath) {
        Dataset<Row> wideTableDF = null;
        try {
            wideTableDF = session.read().option("delimiter", "\t").format("csv").option("header", "true").load(hdfsPath);
        } catch (Exception e) {
            System.err.println("未找到有效的宽表数据，查找路径为：" + hdfsPath);
        }
        return wideTableDF;
    }
}
