package com.foreveross.taskservice.taskcache.redis;

import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;

import org.apache.poi.ss.formula.functions.T;
import org.springframework.stereotype.Service;

import redis.clients.jedis.Jedis;
import redis.clients.jedis.Pipeline;

import com.foreveross.taskservice.common.CacheDataSpace;
import com.foreveross.taskservice.common.InstanceFactory;
import com.foreveross.taskservice.common.TaskStatusEnum;
import com.foreveross.taskservice.common.model.AbstractTaskModel;
import com.foreveross.taskservice.common.model.TaskAction;
import com.foreveross.taskservice.common.model.airfreight.AirFreightTaskModel;
import com.foreveross.taskservice.taskcache.ITaskCacheQuery;
import com.foreveross.taskservice.taskcache.ITaskCacheStorage;
import com.foreveross.taskservice.taskcache.TaskRollbackConvertorApplication;
import com.foreveross.taskservice.taskcache.redis.core.TaskRedisCallback;
import com.foreveross.taskservice.taskcache.redis.util.RedisSerializeUtil;
import com.foreveross.taskservice.taskcache.redis.util.TaskScopeUtil;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
@Service
public class RedisTaskCacheStorageImpl extends BaseRedisTaskCache implements ITaskCacheStorage{
	
	
	@Override
	public int addTasks(final LinkedHashSet<AbstractTaskModel> models)throws Exception {
		
		/*return excute(new TaskRedisCallback<Integer>() {
			
			@Override
			public Integer doInRedis(Jedis j) throws Exception {
				Set<AbstractTaskModel> fils=filterIncludeNotExist(models, j);
				for(AbstractTaskModel m:fils){
					m.setStatus(TaskStatusEnum.TASK_WATI.getStatus());
					logAction(m, TaskAction.getEnterWaitQueueaAction());
					saveOrUpdateCacheTask(m,j);//保存更新到缓存中
				}
				return fils.size();
			}
		});*/
		TaskRedisCallback<Integer> action=new TaskRedisCallback<Integer>() {
			@Override
			public Integer doInRedis(Jedis j) throws Exception {
				Set<AbstractTaskModel> fils=filterIncludeNotExist(models, j);
				Pipeline pl=openPipeline(j, true);
				for(AbstractTaskModel m:fils){
					m.setStatus(TaskStatusEnum.TASK_WATI.getStatus());
					logAction(m, TaskAction.getEnterWaitQueueaAction());
					saveOrUpdateCacheTask(m,pl);//保存更新到缓存中
//					AirFreightTaskModel k=(AirFreightTaskModel)m;
					// 将QueryIndexLog保存
//					pl.zadd(buildChannelWaitQueueKeyByte(new CacheDataSpace(m.getCreateTime(), m.getTaskType()), m.getChannel().getHbaseKey()),
//							TaskScopeUtil.getWaitQueueScope(m.getPriority()),RedisSerializeUtil.serializeString(m.getId()));
					//添加搜索索引
//					saveQueryIndex(m, pl);
				}
				syncPipline(pl, true);
				return fils.size();
			}
		};
		return excuteDefaultLocks(action, models);
		
	}
	
	
	//redis新模型的操作
	@Override
	public List<AbstractTaskModel>getTasks(String channelCode,int maxSize, String status) throws Exception {
		List<byte[]>keyListss=InstanceFactory.getInstance(ITaskCacheQuery.class).getKeys(channelCode, status);
		final List<byte[]>keyLists =new ArrayList<byte[]>();
		if(keyListss.size()>maxSize){
			keyListss=keyListss.subList(0, maxSize);
		}
		keyLists.addAll(keyListss);
		final List<AbstractTaskModel> modelLists=new ArrayList<AbstractTaskModel>();
		return excute(new TaskRedisCallback<List<AbstractTaskModel>>() {
			@Override
			public List<AbstractTaskModel> doInRedis(Jedis j) throws Exception {
				if(keyLists.size()>0){
					for(byte[] key:keyLists){
						modelLists.add(RedisSerializeUtil.deserializeObject(j.get(key),AbstractTaskModel.class));
					}
					tansformStatus(keyLists);					
					return modelLists;
				}
				return null;
			}
		});
	}
	
	
	@Override
	public List<AbstractTaskModel> getTasks(String channelCode, String status)
			throws Exception {
		final List<byte[]>keyLists=InstanceFactory.getInstance(ITaskCacheQuery.class).getKeys(channelCode, status);
		final List<AbstractTaskModel> modelLists=new ArrayList<AbstractTaskModel>();
		return excute(new TaskRedisCallback<List<AbstractTaskModel>>() {
			@Override
			public List<AbstractTaskModel> doInRedis(Jedis j) throws Exception {
				if(keyLists.size()>0){
					for(byte[] key:keyLists){
						modelLists.add(RedisSerializeUtil.deserializeObject(j.get(key),AbstractTaskModel.class));
					}
					tansformStatus(keyLists);
					//获取之后把“wait”状态改成“doing”状态(只要修改对应的key)
					return modelLists;
				}
				return null;
			}
		});
	}
	
	
	
	@Override
	public LinkedHashSet<AbstractTaskModel> getTasks(final CacheDataSpace d, final String channelId,
			final int maxSize) throws Exception {
		TaskRedisCallback<LinkedHashSet<AbstractTaskModel>> action=new TaskRedisCallback<LinkedHashSet<AbstractTaskModel>>() {
			
			@Override
			public LinkedHashSet<AbstractTaskModel> doInRedis(Jedis j) throws Exception {
				/**
				 * jedis模糊查詢出所有keys
				 * 根據KEY查詢出對應的
				 */
				byte[] waitKey="0000012014102700002100*".getBytes();//buildChannelWaitQueueKeyByte(d,channelId);//edisSerializeUtil.serializeString("TASKDETAIL:HASH");//QUEUE:20141023000001:WAIT:00002:SORTEDSET
				byte[] doingKey="0000012014102700002102*".getBytes();//buildChannelDoingQueueKeyByte(d,channelId);
				Set<byte[]> keySets=j.keys(waitKey);
				Iterator<byte[]> it=keySets.iterator();
				LinkedHashSet<AbstractTaskModel> result = Sets.newLinkedHashSet();
				while(it.hasNext()){
					byte[]bystr=it.next();
					result.add(RedisSerializeUtil.deserializeObject(j.get(bystr),AbstractTaskModel.class));
					System.out.println(RedisSerializeUtil.deserializeString(bystr));//0000012014102700002100201411032014110377076145
				}
				LinkedHashSet<byte[]> ids=Sets.newLinkedHashSet(j.zrevrange(waitKey,0,maxSize-1));
				LinkedHashMap<String,AbstractTaskModel> modelMap=findCacheTasksMap(ids, j);
				LinkedHashMap<String,QueryIndexLog> logMap=findCacheQueryIndexLogMap(ids, j);
				Pipeline pl=openPipeline(j, true);
				
				for(Entry<String, AbstractTaskModel> en:modelMap.entrySet()){
					byte[] mId=RedisSerializeUtil.serializeString(en.getKey());
					//删除原队列
					pl.zrem(waitKey,mId);
					//取出之后，将其移入“doing”队列,优先级与时间关联
					pl.zadd(doingKey, TaskScopeUtil.getDoingQueueScope(), mId);
					logAction(en.getValue(), TaskAction.getEnterDoQueueAction());
					en.getValue().setStatus(TaskStatusEnum.TASK_DOING.getStatus());
					en.getValue().setExcuteCount(en.getValue().getExcuteCount()+1);
					en.getValue().setStartTime(new Date());
					saveOrUpdateCacheTask(en.getValue(), pl);
					updateQueryIndex(en.getValue(), logMap.get(en.getKey()), pl);//更新索引
				}
				syncPipline(pl, true);
				return Sets.newLinkedHashSet(modelMap.values());
			}
		};
		return excuteDefaultLock(action,d,channelId);
	}

	@Override
	public <T extends AbstractTaskModel> int rollbacks(final LinkedHashSet<AbstractTaskModel> models)
			throws Exception {
		TaskRedisCallback<Integer> action=new TaskRedisCallback<Integer>() {
			
			@Override
			public Integer doInRedis(Jedis j) throws Exception {
				List<AbstractTaskModel> rts=Lists.newArrayList(models);
				LinkedHashMap<String,AbstractTaskModel> cacheModelMap=findCacheTasksMapByDissociateTasks(models, j);
				LinkedHashMap<String,QueryIndexLog> logMap=findCacheQueryIndexLogMapByDissociateTasks(models, j);
				Pipeline pl=openPipeline(j, true);
				int count=0;
				for(AbstractTaskModel rt:rts){
					if(!cacheModelMap.containsKey(rt.getId())){
						continue;
					}
					AbstractTaskModel ct=cacheModelMap.get(rt.getId());
					if(ct.getStatus()!=TaskStatusEnum.TASK_DOING.getStatus()){
						continue;
					}
					ct.setEndTime(new Date());
					CacheDataSpace space= CacheDataSpace.AirFreightSpace();
					byte[] doingKey=buildChannelDoingQueueKeyByte(space,ct.getChannel().getHbaseKey());
					byte[] finishKey=buildChannelFinishQueueKeyByte(space,ct.getChannel().getHbaseKey());
					byte[] taskId=RedisSerializeUtil.serializeString(ct.getId());
					//移除进行队列
					pl.zrem(doingKey,taskId);
					//这里需要将内存中的task，即原task，和回滚回来的task做一次对比,更新原task，某些属性可以更新，但是某些不可以
					TaskRollbackConvertorApplication<AbstractTaskModel> application=
							getTaskRollbackConvertorApplication(rt, ct);
					if(application==null){
						throw new Exception("无法加载兑换接口，任务不能回滚");
					}
					application.conver(rt, ct);
					logAction(ct,TaskAction.getEnterFinishQueueAction(ct));
					pl.zadd(finishKey,TaskScopeUtil.getFinishQueueScope(ct.getStatus()), taskId);//放入完成队列
					
					if(ct.getByteLength()!=0){//累计网页数据量
						pl.incrBy(buildCountInfoKeyByte(space,ct.getChannel().getHbaseKey(), TaskCacheContext.SPACE_BYTELENTH),ct.getByteLength());
					}
					if(ct.getCrawlCount()!=0){//累计抓取量
						pl.incrBy(buildCountInfoKeyByte(space,ct.getChannel().getHbaseKey(), TaskCacheContext.SPACE_CRAWLCOUNT),ct.getCrawlCount());
					}
					ct.setEndTime(new Date());
					saveOrUpdateCacheTask(ct,pl);
					updateQueryIndex(ct, logMap.get(ct.getId()), pl);
					count++;
				}
				syncPipline(pl, true);
				return count;
			}};
		return excuteDefaultLocks(action,models);
	}

	@Override
	public int reloadCompensation(final CacheDataSpace d, final String channelId,
			final int maxExcuteCount, final TaskStatusEnum status) throws Exception {
		if(status==null || status==TaskStatusEnum.TASK_WATI || status==TaskStatusEnum.TASK_DOING || maxExcuteCount<=1){
			return 0;
		}
		return excuteDefaultLock(new TaskRedisCallback<Integer>() {

			@Override
			public Integer doInRedis(Jedis j) throws Exception {
				double[] scope=TaskScopeUtil.getFinishQueueMinMaxScope(status.getStatus());
				byte[] fk=buildChannelFinishQueueKeyByte(d, channelId);
				byte[] wk=buildChannelWaitQueueKeyByte(d, channelId);
				LinkedHashSet<byte[]> ids=(LinkedHashSet<byte[]>) j.zrangeByScore(fk, scope[0], scope[1]);
				LinkedHashMap<String,AbstractTaskModel> cacheModelMap=findCacheTasksMap(ids, j);
				LinkedHashMap<String,QueryIndexLog> logMap=findCacheQueryIndexLogMap(ids, j);
				Pipeline pl=openPipeline(j, true);
				int count=0;
				for(Entry<String, AbstractTaskModel> en:cacheModelMap.entrySet()){
					AbstractTaskModel m=en.getValue();
					if(m.getExcuteCount()>=maxExcuteCount){
						continue;
					}
					logAction(m, TaskAction.getReloadCompensationAction(m));
					m.setStatus(TaskStatusEnum.TASK_WATI.getStatus());
					logAction(m, TaskAction.getEnterWaitQueueaAction());
					pl.zrem(fk, RedisSerializeUtil.serializeString(m.getId()));
					pl.zadd(wk, TaskScopeUtil.getWaitQueueScope(m.getPriority()),RedisSerializeUtil.serializeString(m.getId()));
					m.setStartTime(null);
					m.setEndTime(null);
					saveOrUpdateCacheTask(m, pl);
					updateQueryIndex(m, logMap.get(m.getId()), pl);
					count++;
				}
				syncPipline(pl, true);
				return count;
			}
		}, d,channelId);
	}
	@Override
	public int reloadCompensation(Date d, int taskType, String channelId,
			int maxExcuteCount, TaskStatusEnum status) throws Exception {
		return reloadCompensation(new CacheDataSpace(d, taskType), channelId, maxExcuteCount, status);
	}
	
	/**
	 * 转换状态（把key转换）doing
	 * @param keys
	 * @throws Exception
	 */
	//0000012014103000002100201411102014111017237286
	public  void tansformStatus(final List<byte[]> keys) throws Exception{
		excute(new TaskRedisCallback<T>() {
			@Override
			public T doInRedis(Jedis j) throws Exception {	
				//更新key
				for(byte[] oldKey:keys){
					String newkeys= RedisSerializeUtil.deserializeString(oldKey);
					j.rename(RedisSerializeUtil.deserializeString(oldKey),newkeys.replaceAll(newkeys.substring(19,22), "102"));
				}
				return null;
			}
		});
	}


}
