package com.swsc.ai.tasks;

import cn.hutool.http.HttpRequest;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.swsc.ai.config.SparkSqlConf;
import com.swsc.ai.entity.ProdBuyForecastModel;
import com.swsc.ai.entity.ProdPurchForecastParam;
import com.swsc.ai.entity.RecallScore;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.sql.SQLException;
import java.util.Collections;

/**
 * @describe:
 * @author: 容若
 * @created: 2023-12-27 13:46
 */
public class MyTaskNCDG extends NDCGTempTask{
    @Override
    public void createView(SparkSession session, String... str) throws SQLException {
        String sqlStr = "SELECT cust_no, prod_code FROM t_prod_buy_info WHERE create_time >= '20231206' and prod_code != 'S00001'";
        Dataset<Row> data = SparkSqlConf.getDataByCompleteSQL(session, sqlStr);
        data.createOrReplaceTempView("buyProd");
        ProdPurchForecastParam pd = new ProdPurchForecastParam(str[0], "1", str[1]);
        String dataStr = JSON.toJSONString(pd, SerializerFeature.DisableCircularReferenceDetect);
//        String resultStr = HttpUtil.post("127.0.0.1:9090/aiElastic/esDataHandle/exist_forecast_test", dataStr);
        //  http://172.18.10.18:33067
        String resultStr = HttpRequest.post("127.0.0.1:9090/aiElastic/esDataHandle/exist_forecast_test")
                .header("accessToken", "1")
                .timeout(5000)
                .body(dataStr)
                .execute().body();
        ProdBuyForecastModel prodBuyForecastModel = JSON.parseObject(resultStr, ProdBuyForecastModel.class);
        if ("200".equals(prodBuyForecastModel.getCode())) {
            Dataset<Row> baseDf = session.createDataset(prodBuyForecastModel.getResp().getData(), Encoders.bean(RecallScore.class)).toDF();
            baseDf.createOrReplaceTempView("baseData");
        } else {
            StructType schema = new StructType(new StructField[]{
                    new StructField("clientId", DataTypes.StringType, false, Metadata.empty()),
                    new StructField("score", DataTypes.StringType, false, Metadata.empty())
            });
            Dataset<Row> emptyDataset = session.createDataset(Collections.emptyList(), Encoders.row(schema));
            emptyDataset.createOrReplaceTempView("baseData");
        }
    }

    @Override
    public void calculateResult(SparkSession session, String dt, String fileName, String ndcgNum) {
        Dataset<Row> dataset = session.sql("select (case\n" +
                "            when b.cust_no is null then 0\n" +
                "            else 1\n" +
                "    end) as isBuy, u.clientId,u.score,b.prod_code from (\n" +
                "    select clientId, score from baseData\n" +
                ") as u left join (\n" +
                "    SELECT cust_no, prod_code FROM buyProd\n" +
                ") as b\n" +
                "on u.clientId = b.cust_no");
        dataset.show();
        dataset.createOrReplaceTempView("baseForecastData");
        Dataset<Row> ndcgDF = session.sql("SELECT dcg/idcg as ndcg from (\n" +
                "     SELECT SUM(score / LOG(2, ideal_rank + 1)) AS idcg,\n" +
                "            SUM(score / LOG(2, actual_rank + 1)) AS dcg\n" +
                "     FROM (\n" +
                "          SELECT isBuy, clientId, prod_code,score,\n" +
                "                 ROW_NUMBER() OVER (ORDER BY isBuy DESC) AS ideal_rank,\n" +
                "                  ROW_NUMBER() OVER (ORDER BY score DESC) AS actual_rank\n" +
                "          FROM baseForecastData\n" +
                "     ) AS ranked_docs where actual_rank <= " + ndcgNum +
                ")");
        ndcgDF.show();
    }
}
