package com.asiainfo.dacp.datastash.broker.core;

import com.asiainfo.dacp.datastash.broker.core.enums.UnitTaskStateEnum;
import com.asiainfo.dacp.datastash.broker.core.vo.StashUnitProcess;
import com.asiainfo.dacp.datastash.broker.core.vo.StashUnitTask;
import com.asiainfo.dacp.datastash.broker.core.vo.StashUnitTaskHeartbeat;
import com.asiainfo.dacp.datastash.broker.tracelog.StashTraceLogger;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;

import java.util.List;
import java.util.Map;

/**
 *
 * unit执行中的操作封装
 *
 * @author MeiKefu
 * @date 2017-11-21
 */
@Component
public class StashUnitProcessModel {

    @Autowired
    private RedisTemplate redisTemplate;

    private static String UNIT_PROCESS_LIST = "datastash:unit_process_list";//处理中的任务列表

    private static String UNIT_TASK_STATE_LIST = "datastash:unit_task_state_list";//

    private static String UNIT_TASK_WAIT_QUEUE = "datastash:unit_task_wait_queue";//等待任务的队列

    private static String UNIT_TASK_DISPATCH_QUEUE = "datastash:unit_task_dispatch_queue";//等待分配work的task

    private static String UNIT_TASK_HEARTBEAT="datastash:unit_task_heartbeat_list";//task的心跳属性

    private static String UNIT_SHARDING_QUEUE_KEY = "datastash:unit_sharding_queue%s";
    private int priorityNum = 9;
    
    private static Logger LOG = LoggerFactory.getLogger(StashUnitProcessModel.class);

    @Autowired
    private StashTraceLogger tracelog;


    public void updateUnitTaskState(String taskProcId,String state){
        redisTemplate.opsForHash().put(UNIT_TASK_STATE_LIST,taskProcId,state);
    }
    
    public Map<String,String> getUnitTaskState(){
    	return redisTemplate.opsForHash().entries(UNIT_TASK_STATE_LIST);
    }

    public void pushUnitProcessList(StashUnitProcess stashUnitProcess){
        redisTemplate.opsForHash().put(UNIT_PROCESS_LIST,stashUnitProcess.getUnitProcId(),stashUnitProcess);
    }
    
    public StashUnitProcess getUnitProcessList(String unitProcId){
    	return (StashUnitProcess) redisTemplate.opsForHash().get(UNIT_PROCESS_LIST, unitProcId);
    }
    
    public void deleteUnitProcessList(String unitProcId){
        redisTemplate.opsForHash().delete(UNIT_PROCESS_LIST,unitProcId);
    }
    
    public List<StashUnitProcess> getUnitProcessList(){
    	List<StashUnitProcess> stashUnitProcessList=redisTemplate.opsForHash().values(UNIT_PROCESS_LIST);
    	return stashUnitProcessList;
    	
    }

    public void pushUnitTaskWaitQueue(List<StashUnitTask> unitTasks){
        redisTemplate.opsForList().rightPushAll(UNIT_TASK_WAIT_QUEUE,unitTasks);
    }

    public void pushUnitTaskWaitQueue(StashUnitTask unitTask){
        redisTemplate.opsForList().rightPush(UNIT_TASK_WAIT_QUEUE,unitTask);
    }

    public StashUnitTask fetchUnitTaskWaitQueue(){
        return (StashUnitTask)redisTemplate.opsForList().leftPop(UNIT_TASK_WAIT_QUEUE);
    }

    public StashUnitTask fetchUnitTaskDispatchQueue(){
        return (StashUnitTask)redisTemplate.opsForList().leftPop(UNIT_TASK_DISPATCH_QUEUE);
    }

    public String getUnitTaskState(String taskProcId){
        String taskState = (String)redisTemplate.opsForHash().get(UNIT_TASK_STATE_LIST,taskProcId);
        return taskState;
    }

    public void pushUnitTaskDispatchQueue(StashUnitTask unitTask){

        final byte[] dispKey = redisTemplate.getKeySerializer().serialize(UNIT_TASK_DISPATCH_QUEUE);
        final byte[] dispRawValue = redisTemplate.getValueSerializer().serialize(unitTask);


        final byte[] taskStateKey = redisTemplate.getKeySerializer().serialize(UNIT_TASK_STATE_LIST);
        final byte[] taskStateRawKey = redisTemplate.getHashKeySerializer().serialize(unitTask.getTaskProcId());
        final byte[] taskStateRawValue = redisTemplate.getHashValueSerializer().serialize(UnitTaskStateEnum.BLOCKED_RESOURCE.value());

        RedisCallback<List<Object>> pipelineCallback = new RedisCallback<List<Object>>() {
            @Override
            public List<Object> doInRedis(RedisConnection connection) throws DataAccessException {
                connection.rPush(dispKey,dispRawValue);
                connection.hSet(taskStateKey,taskStateRawKey,taskStateRawValue);
                return null;
            }
        };
        redisTemplate.executePipelined(pipelineCallback);
    }

    public void requeueUnitTaskDispatchQueue(StashUnitTask unitTask){
        redisTemplate.opsForList().leftPush(UNIT_TASK_DISPATCH_QUEUE,unitTask);
    }

    public void taskHeartbeat(StashUnitTaskHeartbeat taskHeartbeat){
        redisTemplate.opsForHash().put(UNIT_TASK_HEARTBEAT,taskHeartbeat.getTaskProcId(),taskHeartbeat);
    }
    
    public StashUnitTaskHeartbeat getTaskHeartBeat(String taskProcId){
    	StashUnitTaskHeartbeat taskHeartBeat = null;
    	try {
    		taskHeartBeat=(StashUnitTaskHeartbeat) redisTemplate.opsForHash().get(UNIT_TASK_HEARTBEAT, taskProcId);
		} catch (Exception e) {
			LOG.warn("获取unit_task_heartbeat_list为空,{}",e);
		}
    	return taskHeartBeat;
    }

    /**
     * 1.更新分片的状态
     * 2.更新broker任务的队列
     * 3.有空闲的分片，需要进入分片的排队的队列中
     *
     */
    /*public void pushWorkQueue(StashUnitProcess stashUnitProcess){

        //TODO 事务
        if(stashUnitProcess.sparingShardings().size()==0){//TODO 剩余分片为空
//            stashUnitProcessing.setStatus("processing");
        }else{
//            stashUnitProcessing.setStatus("sharding");
            pushUnitShardingQueue(stashUnitProcess);
        }

        updateStashUnitProcess(stashUnitProcess);

        List<StashUnitTask> preparingShardings = stashUnitProcess.preparingShardings();

        if(preparingShardings.size()>0){
            //进入队列
            for (StashUnitTask stashUnitSharding:preparingShardings){
                String key = String.format(BROKER_WORK_QUEUE_KEY,stashUnitSharding.getBrokerId());
                redisTemplate.opsForList().rightPush(key,stashUnitSharding);
            }
        }
    }

    private void pushUnitShardingQueue(StashUnitProcess stashUnitProcessing){
        String key = String.format(UNIT_SHARDING_QUEUE_KEY,String.valueOf(stashUnitProcessing.getUnitPriority()));
        redisTemplate.opsForList().leftPush(key,stashUnitProcessing);
    }

    public StashUnitProcess fetchUnitShardingQueue(){

        for (int i=1;i<priorityNum;i++){

            String key = String.format(UNIT_SHARDING_QUEUE_KEY,String.valueOf(i));

            Object obj = redisTemplate.opsForList().leftPop(key);

            if(obj!=null){
                return (StashUnitProcess)obj;
            }
        }
        return null;
    }*/


    /*
     * 更新sharding的状态

    public void updateStashUnitSharding(String unitCode, String batchNo, String taskId){

        StashUnitProcess stashUnitProcess = (StashUnitProcess)redisTemplate.opsForHash().get(UNIT_TASKS_LIST,unitCode+":"+batchNo);
        stashUnitProcessing.updateShardingStatus(taskId,"processing");

        if(stashUnitProcessing.sparingShardings().size()==0){
//            stashUnitProcessing.setState("processing");
        }else{

        }
        updateStashUnitProcessing(stashUnitProcessing);
    }
     */
}
