package com.hfzy.ihk.web.aliyuncall.job;

import com.alibaba.dubbo.config.annotation.Reference;
import com.alibaba.fastjson.JSONObject;
import com.hfzy.ihk.common.util.date.DateUtils;
import com.hfzy.ihk.facade.calldata.service.EsOptRmiService;
import com.hfzy.ihk.web.aliyuncall.biz.PushDataToEsBiz;
import com.hfzy.ihk.web.aliyuncall.constant.RedisFields;
import com.hfzy.ihk.web.aliyuncall.constant.RedisKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Profile;
import org.springframework.core.task.TaskExecutor;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

/**
 * 数据补推job
 */
@Component
@Profile({"pro","dev"})
public class CheckSaveEsJob {

    private final static Logger logger = LoggerFactory.getLogger(CheckSaveEsJob.class);

    private static Boolean isDataJobfinish = false;

    private static Boolean isUrlJobfinish = false;
    @Autowired
    RedisTemplate redisTemplate;


    @Autowired
    PushDataToEsBiz pushDataToEsBiz;

    @Resource
    TaskExecutor pushDataToEsTaskExecutor;


    /**
     * 将推送失败的云呼数据重新推到Es 可以设定五分钟一次
     */
    @Scheduled(cron = "0 0/5 * * * ?")
    public void checkCallHistorySaveEs(){
        if(isDataJobfinish)
            return ;
        isDataJobfinish = true;
        Set<String> updateFailedHistorys =null;
        try {
          updateFailedHistorys = redisTemplate.opsForSet().members(RedisKey.PUSH_TO_ES_FAILED_HISTORY);
        }catch (Exception e){
            logger.error("运行 repushdatatoEs 错误,获取set失败",e);
        }
        if(updateFailedHistorys == null||updateFailedHistorys.size()==0) {
            isDataJobfinish = false;
            return;
        }
        for(String sessionId:updateFailedHistorys){
            try {
                if (sessionId == null || sessionId.equals("null")) {
                    redisTemplate.opsForSet().remove(RedisKey.PUSH_TO_ES_FAILED_HISTORY, sessionId);
                    continue;
                }
                Map<String, String> dataMap = redisTemplate.opsForHash().entries(RedisKey.CALL_HISTORY_RPIX + sessionId);

                if (dataMap != null && dataMap.size() > 0) {

                    try {
                        String jsonStr = JSONObject.toJSONString(dataMap);
                        pushDataToEsTaskExecutor.execute(() -> {
                            pushDataToEsBiz.pushCallHistoryToEs(sessionId, jsonStr, RedisKey.CALL_HISTORY_RPIX +sessionId);
                        });
                        //推送成功删除推送失败记录
                        redisTemplate.opsForSet().remove(RedisKey.PUSH_TO_ES_FAILED_HISTORY, sessionId);
                    }catch (Exception e){
                        logger.error("====>推送Es 的线程池爆了",e);
                        try {
                            redisTemplate.opsForSet().add(RedisKey.PUSH_TO_ES_FAILED_HISTORY, sessionId);
                        }catch (Exception e1){
                            logger.error("将推送ES 失败数据放入redis 失败！sessionId：{}",sessionId,e1);
                        }

                    }

                }
            }catch (Exception e){
                logger.error("运行 repushdatatoEs 错误,sessionId:{}",sessionId,e);
            }
        }
        isDataJobfinish = false;

    }


    /**
     * 将推送失败的录音URL数据重新推送到Es
     * 可以设定为5分钟跑一次，没毛病
     */
    @Scheduled(cron = "0 0/5 * * * ?")
    public void checkRecordFileSaveEs(){
        if(isUrlJobfinish)
            return ;
        isUrlJobfinish = true;
        Set<String> updateRecordUrlSet =null;
        try {
            updateRecordUrlSet = redisTemplate.opsForSet().members(RedisKey.PUSH_RECORD_URL_TO_ES_FAILED);
        }catch (Exception e){
            logger.error("运行 repushRecordUrltoEs 错误,获取set失败",e);
        }

        if(updateRecordUrlSet==null||updateRecordUrlSet.size()==0){
            isUrlJobfinish = false;
            return;
        }

        for(String str :updateRecordUrlSet){
            String[] s = str.split("::");
            if(s.length!=2){
                //删除该记录
                redisTemplate.opsForSet().remove(RedisKey.PUSH_RECORD_URL_TO_ES_FAILED, str);
                continue;
            }
            String sessionId = s[0];
            String url = s[1];

            //将录音推送到Es

            try {
                Map<String,String> map = new HashMap<>();
                map.put(RedisFields.CALL_HISTORY_SESSION_ID,sessionId);
                map.put(RedisFields.CALL_HISTORY_RECORD_FILE_URL,url);
                String jsonStr = JSONObject.toJSONString(map);
                pushDataToEsTaskExecutor.execute(() -> {
                    pushDataToEsBiz.pushRecordFileToEs(sessionId, url,jsonStr);
                });
                //推送成功删除该记录
                redisTemplate.opsForSet().remove(RedisKey.PUSH_RECORD_URL_TO_ES_FAILED, str);
            }catch (Exception e){
                logger.error("====>推送url->Es 的线程池爆了",e);
                try {
                    redisTemplate.opsForSet().add(RedisKey.PUSH_RECORD_URL_TO_ES_FAILED, sessionId + "::" + url);
                }catch (Exception e1){
                    e1.printStackTrace();
                }
            }

        }
        isUrlJobfinish = false;
    }



}
