package com.swsc.ai.tasks;

import cn.hutool.http.HttpRequest;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.swsc.ai.config.SparkSqlConf;
import com.swsc.ai.entity.ProdBuyForecastModel;
import com.swsc.ai.entity.ProdPurchForecastParam;
import com.swsc.ai.entity.ReCallResp;
import com.swsc.ai.entity.RecallScore;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

/**
 * @describe: NDCG算法
 * @author: 容若
 * @created: 2023-12-28 9:33
 */
public class NDCGTask extends NDCGTempTask {
    @Override
    public void createView(SparkSession session, String... str) throws SQLException {
        String sqlStr = "SELECT cust_no, prod_code FROM t_prod_buy_info WHERE create_time >= '20231206' and prod_code != 'S00001'";
        Dataset<Row> data = SparkSqlConf.getDataByCompleteSQL(session, sqlStr);
        data.createOrReplaceTempView("buyProd");

        String getInvestAdvisorSQL = "SELECT DISTINCT EMP_NO_NEW, CUST_NO FROM t_sale_product_relation_new";
        Dataset<Row> investAdvisorData = SparkSqlConf.getDataByCompleteSQL(session, getInvestAdvisorSQL);
        investAdvisorData.createOrReplaceTempView("investAdvisorData");

        Dataset<Row> sqlData = session.sql("select any_value(p.cust_no) as cust_no, p.prod_code, i.EMP_NO_NEW as investAdvisorId from (\n" +
                "    SELECT cust_no, prod_code FROM buyProd\n" +
                ") as p \n" +
                "inner join (\n" +
                "    SELECT EMP_NO_NEW, CUST_NO FROM investAdvisorData\n" +
                ") as i \n" +
                "on i.CUST_NO = p.cust_no group by p.prod_code, i.EMP_NO_NEW limit 10");
        List<ReCallResp> dataList = new ArrayList<>();

        JavaRDD<ProdPurchForecastParam> prodRDD = sqlData.javaRDD().map(row -> {
            // 将每一行数据转换为ProdPurchForecastParam对象
            String investAdvisorId = row.getString(row.fieldIndex("investAdvisorId"));
            String prodCode = row.getString(row.fieldIndex("prod_code"));
            return new ProdPurchForecastParam(investAdvisorId, "1", prodCode);
        });

        prodRDD.foreach(prod -> {
            String dataStr = JSON.toJSONString(prod, SerializerFeature.DisableCircularReferenceDetect);
            String resultStr = HttpRequest.post("http://172.18.10.18:33067/aiElastic/esDataHandle/exist_forecast")
                    .header("accessToken", "1")
                    .timeout(-1)
                    .body(dataStr)
                    .execute().body();
            ProdBuyForecastModel prodBuyForecastModel = JSON.parseObject(resultStr, ProdBuyForecastModel.class);
            if ("200".equals(prodBuyForecastModel.getCode())) {
                List<ReCallResp> resTemp = new ArrayList<>();
                String investAdvisorId = prodBuyForecastModel.getResp().getInvestAdvisorId();
                prodBuyForecastModel.getResp().getData().forEach(recallScore -> {
                    ReCallResp reCallResp = new ReCallResp();
                    reCallResp.setClientId(recallScore.getClientId());
                    reCallResp.setScore(recallScore.getScore());
                    reCallResp.setInvestAdvisorId(investAdvisorId);
                    resTemp.add(reCallResp);
                });
                dataList.addAll(resTemp);
            }
        });
        if (dataList.isEmpty()) {
            StructType schema = new StructType(new StructField[]{
                    new StructField("clientId", DataTypes.StringType, false, Metadata.empty()),
                    new StructField("score", DataTypes.StringType, false, Metadata.empty()),
                    new StructField("investAdvisorId", DataTypes.StringType, false, Metadata.empty())
            });
            Dataset<Row> emptyDataset = session.createDataset(Collections.emptyList(), Encoders.row(schema));
            emptyDataset.createOrReplaceTempView("baseData");
        } else {
            Dataset<Row> baseDf = session.createDataset(dataList, Encoders.bean(ReCallResp.class)).toDF();
            baseDf.createOrReplaceTempView("baseData");
            // 写入HDFS文件
            baseDf.coalesce(1).write()
                    .format("csv")
                    .option("header", "true")
                    .option("mode", "overwrite")
                    .save(str[0] + "/temp");
        }
    }

    @Override
    public void calculateResult(SparkSession session, String dt, String fileName, String ndcgNum) {
        Dataset<Row> dataset = session.sql("select (case\n" +
                "            when b.cust_no is null then 0\n" +
                "            else 1\n" +
                "    end) as isBuy, u.clientId,u.score,b.prod_code from (\n" +
                "    select clientId, score from baseData\n" +
                ") as u left join (\n" +
                "    SELECT cust_no, prod_code FROM buyProd\n" +
                ") as b\n" +
                "on u.clientId = b.cust_no");

        dataset.createOrReplaceTempView("baseForecastData");

        Dataset<Row> ndcgDF = session.sql("SELECT dcg/idcg as ndcg from (\n" +
                "     SELECT SUM(score / LOG(2, ideal_rank + 1)) AS idcg,\n" +
                "            SUM(score / LOG(2, actual_rank + 1)) AS dcg\n" +
                "     FROM (\n" +
                "          SELECT isBuy, clientId, prod_code,score,\n" +
                "                 ROW_NUMBER() OVER (ORDER BY isBuy DESC) AS ideal_rank,\n" +
                "                  ROW_NUMBER() OVER (ORDER BY score DESC) AS actual_rank\n" +
                "          FROM baseForecastData\n" +
                "     ) AS ranked_docs where actual_rank <= " + ndcgNum +
                ")");
        // 写入HDFS文件
        ndcgDF.coalesce(1).write()
                .format("csv")
                .option("header", "true")
                .option("mode", "overwrite")
                .save(fileName + "/result");
    }
}
