package com.processmining.service.impl;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Map;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.deckfour.xes.model.XLog;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.processmining.dao.EventLogMapper;
import com.processmining.dao.MergeEventLogMapper;
import com.processmining.dao.NormLogToEventLogMapper;
import com.processmining.dao.RawLogToEventLogMapper;
import com.processmining.dao.RawLogToNormLogMapper;
import com.processmining.dao.UserMapper;
import com.processmining.entity.EventLog;
import com.processmining.entity.Log;
import com.processmining.entity.NormLog;
import com.processmining.entity.NormLogToEventLog;
import com.processmining.entity.RawLogToEventLog;
import com.processmining.entity.RawLogToNormLog;
import com.processmining.entity.User;
import com.processmining.service.IEventLogService;
import com.processmining.util.algo.SummarizeTool;
import com.processmining.util.algo.SummarizeXLog;
import com.processmining.util.fs.HDFSOperator;


/**
 * 事件日志管理业务层
 * @author hxuhao
 * 
 */
@Service
public class EventLogServiceImpl implements IEventLogService{

	@Value("${k2.hdfs.eventLogPath}")
	private String EVENTLOG_PATH_PREFIX;
	
	private static Logger logger = LogManager.getLogger(EventLogServiceImpl.class.getName());
	
	@Autowired
	private EventLogMapper eventLogDao;
	
	@Autowired
	private HDFSOperator hdfs;
	
	@Autowired
	private NormLogToEventLogMapper normLogToEventLogDao;
	
	@Autowired
	private RawLogToNormLogMapper rawLogToNormLogDao;
	
	@Autowired
	private UserMapper userDao;

	@Autowired
	private RawLogToEventLogMapper rawLogToEventLogDao;

	@Autowired
	private MergeEventLogMapper mergeEventLogDao;

	@Override
	public boolean uploadLog(InputStream inputStream, Log log) throws Exception {
		
		
		logger.info("user " + log.getCreatorid() + " upload eventLog " + " start.");
		
		String hdfsId = hdfs.uploadFile(inputStream, EVENTLOG_PATH_PREFIX);
		if(hdfsId != null){
			log.setHdfsid(hdfsId);
			int ret = eventLogDao.insertSelective((EventLog) log);
			logger.info("user " + log.getCreatorid() + " upload eventLog. hdfs id : " + log.getHdfsid());
			if(ret==1){

				//System.out.println("user " + log.getCreatorid() + " insert eventLog. log id : " + log.getId());
				logger.info("user " + log.getCreatorid() + " insert eventLog. log id : " + log.getId());
				return true;
			}else{
				logger.error("user " + log.getCreatorid() + " insert eventLog. failed");
			}
			
		}else{
			logger.error("user " + log.getCreatorid() + " upload eventLog. failed");
		}

		

		return false;
	}


	@Override
	public InputStream downloadLog(Integer logId) throws Exception {
		EventLog record = eventLogDao.selectByPrimaryKey(logId);
		if(record==null){
			throw new FileNotFoundException("文件不存在");
		}
		System.out.println(EVENTLOG_PATH_PREFIX);
		InputStream input = hdfs.downloadFile(EVENTLOG_PATH_PREFIX+record.getHdfsid());
		System.out.println("get eventlog inputsteam");
		return input;
	}

	@Override
	public Log selectByPK(Integer logId) {
		EventLog eventLog = eventLogDao.selectByPrimaryKey(logId);
		
		if(eventLog==null){
			return null;
		}
		// 查询上传者
		eventLog.setCreator(userDao.selectByPrimaryKey(eventLog.getCreatorid()).getNickname());
		
		// 时间对象转换字符串
		DateFormat format = new SimpleDateFormat("yyyy年MM月dd日 HH:mm");
		eventLog.setCreateTime(format);
		
		Integer eventlogId = eventLog.getId();
		// 查询其对应的原始日志
		RawLogToEventLog toEventLog = rawLogToEventLogDao.selectByEventLogId(eventlogId);
		if(toEventLog!=null){
			eventLog.setToRawLogId(toEventLog.getRawlogid());
			eventLog.setToRawLogName(toEventLog.getRawlogname());
		}
		
		
		return eventLog;
	}

	@Override
	public boolean deleteBatch(List<Integer> logIds, Integer uid) throws Exception {
		boolean ret = true;
		// 寻找目前存在的日志
		List<EventLog> eventloglist = eventLogDao.selectByPKList(logIds, uid);
		
		for(EventLog normlog : eventloglist){
			if(!delete(normlog.getId(),uid)){
				ret = false;
				break;
			}
		}
		return ret;
	}
	
	
	
	
	// 共享
	@Override
	public boolean shareLog(Integer logid, Integer uid) {
		EventLog log = eventLogDao.selectByPrimaryKey(logid);
		if(log.getCreatorid()==uid){
			if(log.getIsshared()){
				return true;
			}else{
				log.setIsshared(true);
				eventLogDao.updateByPrimaryKeySelective(log);
				return true;
			}
		}
		
		return false;
	}

		
	// 没有权限


	// 不共享
	@Override
	public boolean unshareLog(Integer logid, Integer uid) {
		EventLog log = eventLogDao.selectByPrimaryKey(logid);
		if(log.getCreatorid()==uid){
			if(!log.getIsshared()){
				return true;
			}else{
				log.setIsshared(false);
				eventLogDao.updateByPrimaryKeySelective(log);
				return true;
			}
		}
		
		// 没有权限
		return false;
	}


	@Override
	public boolean delete(Integer logId, Integer uid) throws IOException {
		
		boolean retsult = true;
		
		EventLog eventlog = eventLogDao.selectByPrimaryKey(logId);
		if(eventlog!=null&&eventlog.getCreatorid()==uid){
			

			Integer eventlogId = eventlog.getId();
			if(eventlog.getIsmerged()){
				
				// 删除融合关系
				mergeEventLogDao.deleteByTargetId(eventlogId);
				
			}else{
				
				// 删除与原始日志的联系
				rawLogToEventLogDao.deleteByEventLogId(eventlogId);
				
				// 删除与规范化日志的联系
				normLogToEventLogDao.deleteByEventLogId(eventlogId);
				
				// 删除本条记录
				eventLogDao.deleteByPrimaryKey(eventlogId);
				//System.out.println(eventlogId);
				
				
				// 从HDFS中删除
				if(hdfs.deleteFile(EVENTLOG_PATH_PREFIX+eventlog.getHdfsid(),false)){
					logger.info("user " + eventlog.getCreatorid() + " delete eventLog in hdfs . log id : " + eventlog.getId());
				}else{
					logger.error("user " + eventlog.getCreatorid() + " delete eventLog in hdfs failed .");
				}
			
			}
			
		
			return true;
			
		}
		
		
		return false;
	}

	@Override
	public PageInfo<EventLog> searchByName(int pageNum, int pageSize, String info, Integer uid) {
		PageHelper.startPage(pageNum,pageSize);
		List<EventLog> logList ;
		if(info==null){
			logList = eventLogDao.selectAll(uid);
		}else{
			logList = eventLogDao.selectByName(info,uid);
		}
		DateFormat format = new SimpleDateFormat("yyyy年MM月dd日 HH:mm");
		for(EventLog eventlog : logList){
			// 查询上传者昵称
			
			User u = userDao.selectByPrimaryKey(eventlog.getCreatorid());
			String creator =  u!=null? u.getNickname():"-";
			eventlog.setCreator(creator);
			
			// 时间对象转换字符串
			eventlog.setCreateTime(format);
			
			Integer eventlogId = eventlog.getId();
			// 查询其对应的原始日志
			RawLogToEventLog torawlog = rawLogToEventLogDao.selectByEventLogId(eventlogId);
			if(torawlog!=null){
				eventlog.setToRawLogId(torawlog.getRawlogid());
				eventlog.setToRawLogName(torawlog.getRawlogname());
			}
			
			// 查询其对应的规范化日志
			NormLogToEventLog toEventLog = normLogToEventLogDao.selectByEventLogId(eventlogId);
			if(toEventLog!=null){
				eventlog.setToNormLogId(toEventLog.getNormalizedlogid());
				eventlog.setToNormLogName(toEventLog.getNormalizedlogname());
			}
		}
		PageInfo<EventLog> logListWithPage = new PageInfo<EventLog>(logList);
		return logListWithPage;
	}

	@Override
	public List<EventLog> searchByPKList(List<Integer> logIds, Integer uid) {
		return eventLogDao.selectByPKList(logIds, uid);
	}

	@Override
	public boolean deleteIfExist(Integer normlogId, Integer uid) throws IOException {
		boolean ret = false;
		// 删除直接有联系的事件日志
		NormLogToEventLog toNormLog = normLogToEventLogDao.selectByNormLogId(normlogId);
		if(toNormLog!=null){
			if(this.delete(toNormLog.getEventlogid(),uid)){
				//System.out.println("删除原事件化日志");
				ret = true;
			}else{
				ret = false;
			}
		}
		
		// 删除间接联系的事件日志
		RawLogToNormLog rawLogToNormLog = rawLogToNormLogDao.selectByNormLogId(normlogId);
		if(rawLogToNormLog!=null){
			RawLogToEventLog rawLogToEventLog = rawLogToEventLogDao.selectByRawLogId(rawLogToNormLog.getRawlogid());
			if(rawLogToEventLog!=null){
				if(this.delete(rawLogToEventLog.getEventlogid(), uid)){
					ret = true;
				}else{
					ret = false;
				}
			}
		}
		
		return ret;
	}

	@Override
	public boolean BuildRelationship(NormLog normLog, EventLog eventLog) {
		

		Integer eventLogId = eventLog.getId();
		String eventLogName = eventLog.getName();
		Integer normLogId = normLog.getId();
		String normLogName = normLog.getName();
		
		// 与规范化日志
		NormLogToEventLog toEventLog = new NormLogToEventLog();
		toEventLog.setEventlogid(eventLogId);
		toEventLog.setEventlogname(eventLogName);
		toEventLog.setNormalizedlogid(normLogId);
		toEventLog.setNormalizedlogname(normLogName);
		normLogToEventLogDao.insertSelective(toEventLog);
		
		// 与原始日志
		RawLogToNormLog toNormLog = rawLogToNormLogDao.selectByNormLogId(normLog.getId());
		if(toNormLog!=null){
			RawLogToEventLog rawLogToEventLog = new RawLogToEventLog();
			rawLogToEventLog.setEventlogid(eventLogId);
			rawLogToEventLog.setEventlogname(eventLogName);
			rawLogToEventLog.setRawlogid(toNormLog.getRawlogid());
			rawLogToEventLog.setRawlogname(toNormLog.getRawlogname());
			
			rawLogToEventLogDao.insertSelective(rawLogToEventLog);
		}
		
		return false;
	}

	@Override
	public boolean generateChiefInfo(Integer eventLogId) {
		// TODO Auto-generated method stub
		return false;
	}


	@Override
	public boolean updateLog(EventLog log) {
		int ret = eventLogDao.updateByPrimaryKeySelective(log);
		if(ret==1)
			return true;
		return false;
	}


	@Override
	public boolean deleteFake(Integer logid) {
		// TODO Auto-generated method stub
		return false;
	}


	@Override
	public boolean deleteFakeBatch(List<Integer> logIds, Integer uid) {
		// TODO Auto-generated method stub
		return false;
	}


	@Override
	public boolean cleanLog(Integer uid) {
		
		return false;
	}

	
	
	
	
	

}
