package com.sui.bigdata.sml.model.service.impl;

import com.alibaba.fastjson.JSON;
import com.feidee.fd.sml.algorithm.forecast.Forecast;
import com.feidee.fdcommon.configuration.CustomConfiguration;
import com.sui.bigdata.sml.model.configuration.ModelConfig;
import com.sui.bigdata.sml.model.model.InferenceRequest;
import com.sui.bigdata.sml.model.model.InferenceResponse;
import com.sui.bigdata.sml.model.service.InferenceService;
import com.sui.bigdata.sml.model.util.Constants;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.DependsOn;
import org.springframework.stereotype.Service;

import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author songhaicheng
 * @date 2019/12/4 11:25
 * @description
 * @reviewer
 */
@Service
@DependsOn("disconfConfig")
@Slf4j
public class InferenceServiceImpl implements InferenceService {

    @Autowired
    private ModelConfig modelConfig;

    @Override
    public InferenceResponse forecast(InferenceRequest request) {
        long start = System.currentTimeMillis();
        List res = null; //特别说明：分类模型返回类型为List<Map<String,Double>>，回归模型返回类型为List<Double>
        List<Map<String, Object>> transition = Collections.singletonList(new HashMap<>(request.getFeatures()));
        for (int i = 0; i < modelConfig.getModels().size(); i++) {
            Forecast forecast = null;
            try {
                forecast = modelConfig.borrowModel(modelConfig.getModels().get(i));
                if (i != modelConfig.getModels().size() - 1) {
                    transition = forecast.predictJavaList(transition);
                    log.info("预测中间结果[{}]: {}", i + 1, JSON.toJSON(transition));
                } else {
                    //如果是回归模型，走回归预测方法
                    if (CustomConfiguration.getBoolean(CustomConfiguration.getString(Constants.SERVING_TOKEN)
                            + ".isRegModel", false)) {
                        res = forecast.predictionRegJavaList(transition);
                    } else {
                        res = forecast.predictProbabilityJavaList(transition, request.getTop(), null);
                    }
                }
            } catch (Exception e) {
                log.error("模型预测出错 {}[{}]：{}",
                        CustomConfiguration.getString(Constants.SERVING_TOKEN), i + 1, modelConfig.getModels().get(i).toString(),
                        e);
                return new InferenceResponse().setCost(System.currentTimeMillis() - start);
            } finally {
                modelConfig.returnModel(modelConfig.getModels().get(i), forecast);
            }
        }
        return new InferenceResponse().setResult(res.get(0)).setCost(System.currentTimeMillis() - start);
    }

    @Override
    public InferenceResponse forecasts(InferenceRequest request) {
        long start = System.currentTimeMillis();
        Map res = null;
        Map<String, Map<String, Object>> transition = new HashMap<>(request.getTotalFeatures());
        for (int i = 0; i < modelConfig.getModels().size(); i++) {
            Forecast forecast = null;
            try {
                forecast = modelConfig.borrowModel(modelConfig.getModels().get(i));
                if (i != modelConfig.getModels().size() - 1) {
                    transition = forecast.predictJavaMap(transition);
                } else {
                    //如果是回归模型，走回归预测方法
                    if (CustomConfiguration.getBoolean(CustomConfiguration.getString(Constants.SERVING_TOKEN)
                            + ".isRegModel", false)) {
                        res = forecast.predictionRegJavaMap(transition);
                    } else {//否则，走分类(概率)预测方法
                        res = forecast.predictProbabilityJavaMap(transition, request.getTop(), null);
                    }
                }
            } catch (Exception e) {
                log.error("模型预测出错 {}[{}]：{}",
                        CustomConfiguration.getString(Constants.SERVING_TOKEN), i + 1, modelConfig.getModels().get(i).toString(),
                        e);
                return new InferenceResponse().setCost(System.currentTimeMillis() - start);
            } finally {
                modelConfig.returnModel(modelConfig.getModels().get(i), forecast);
            }
        }
        return new InferenceResponse().setResult(res).setCost(System.currentTimeMillis() - start);
    }

}
