package com.processmining.service.impl;

import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.List;

import org.apache.hadoop.hdfs.client.HdfsUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.deckfour.xes.model.XLog;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.processmining.dao.EventLogMapper;
import com.processmining.dao.NormLogMapper;
import com.processmining.dao.NormLogToEventLogMapper;
import com.processmining.dao.RawLogToNormLogMapper;
import com.processmining.dao.UserMapper;
import com.processmining.entity.EventLog;
import com.processmining.entity.Log;
import com.processmining.entity.NormLog;
import com.processmining.entity.NormLogToEventLog;
import com.processmining.entity.RawLog;
import com.processmining.entity.RawLogToNormLog;
import com.processmining.entity.User;
import com.processmining.entity.RawLogToEventLog;
import com.processmining.service.INormLogService;
import com.processmining.util.fs.HDFSOperator;
@Service
public class NormLogServiceImpl implements INormLogService{
	
	
	private static Logger logger = LogManager.getLogger(NormLogServiceImpl.class.getName());
	

	@Value("${k2.hdfs.normLogPath}")
	private String NORMLOG_PATH_PREFIX;
	
	@Value("${k2.hdfs.eventLogPath}")
	private String EVENTLOG_PATH_PREFIX;
	
	@Autowired
	private UserMapper userDao;
	
	@Autowired
	private NormLogMapper normLogDao;
	
	@Autowired
	private RawLogToNormLogMapper rawLogToNormLogDao;
	
	@Autowired
	private NormLogToEventLogMapper normLogToEventLogDao;
	
	
	@Autowired
	private EventLogMapper eventLogDao;
	
	@Autowired
	private HDFSOperator hdfs;

	/**
	 * 转换为事件日志
	 */
	@Override
	public boolean convertToEventLog(Integer normLogId) {
		// TODO Auto-generated method stub
		return false;
	}


	@Override
	public PageInfo<NormLog> searchByName(int pageNum, int pageSize, String info, Integer uid) {

		PageHelper.startPage(pageNum,pageSize);
		List<NormLog> logList ;
		if(info==null){
			logList = normLogDao.selectAll(uid);
		}else{
			logList = normLogDao.selectByName(info,uid);
		}
		DateFormat format = new SimpleDateFormat("yyyy年MM月dd日 HH:mm");
		for(NormLog normlog : logList){
			// 查询上传者昵称
		
			User u = userDao.selectByPrimaryKey(normlog.getCreatorid());
			String creator = u!=null?u.getNickname():"-";
			normlog.setCreator(creator);
			
			// 时间对象转换字符串
			normlog.setCreateTime(format);
			
			Integer normLogId = normlog.getId();
			// 查询其对应的原始日志
			RawLogToNormLog toNormLog = rawLogToNormLogDao.selectByNormLogId(normLogId);
			if(toNormLog!=null){
				normlog.setToRawLogId(toNormLog.getRawlogid());
				normlog.setToRawLogName(toNormLog.getRawlogname());
			}
			
			// 查询其对应的事件日志
			NormLogToEventLog toEventLog = normLogToEventLogDao.selectByNormLogId(normLogId);
			if(toEventLog!=null){
				normlog.setToEventLogId(toEventLog.getEventlogid());
				normlog.setToEventLogName(toEventLog.getEventlogname());
			}
		}
		PageInfo<NormLog> logListWithPage = new PageInfo<NormLog>(logList);
		return logListWithPage;
	}

	@Override
	public List<NormLog> searchByPKList(List<Integer> normlogIds, Integer uid) {
		return normLogDao.selectByPKList(normlogIds, uid);
	}



	@Override
	public boolean uploadLog(InputStream inputStream, Log log) throws Exception {
		
		logger.info("user " + log.getCreatorid() + " upload normLog " + " start.");
		
		String hdfsId = hdfs.uploadFile(inputStream, NORMLOG_PATH_PREFIX);
		if(hdfsId != null){
			log.setHdfsid(hdfsId);
			int ret = normLogDao.insertSelective((NormLog) log);
			logger.info("user " + log.getCreatorid() + " upload normLog. hdfs id : " + log.getHdfsid());
			if(ret==1){
				logger.info("user " + log.getCreatorid() + " insert normLog. log id : " + log.getId());
				return true;
			}else{
				logger.error("user " + log.getCreatorid() + " insert normLog. failed");
			}
		}else{
			logger.error("user " + log.getCreatorid() + " upload normLog. failed");
		}
		return false;
	}


	
	@Override
	public InputStream downloadLog(Integer logId) throws Exception {

		NormLog record = normLogDao.selectByPrimaryKey(logId);
		InputStream input = hdfs.downloadFile(NORMLOG_PATH_PREFIX+record.getHdfsid());
		return input;
	}


	// 主键查询
	@Override
	public Log selectByPK(Integer logId) {
		
		NormLog normlog = normLogDao.selectByPrimaryKey(logId);
		// 查询上传者
		normlog.setCreator(userDao.selectByPrimaryKey(normlog.getCreatorid()).getNickname());
		
		// 时间对象转换字符串
		DateFormat format = new SimpleDateFormat("yyyy年MM月dd日 HH:mm");
		normlog.setCreateTime(format);
		
		Integer normlogId = normlog.getId();
		// 查询其对应的原始日志
		RawLogToNormLog toNormLog = rawLogToNormLogDao.selectByNormLogId(normlogId);
		if(toNormLog!=null){
			normlog.setToRawLogId(toNormLog.getRawlogid());
			normlog.setToRawLogName(toNormLog.getRawlogname());
		}
		
		// 查询其对应的事件日志
		NormLogToEventLog toEventLog = normLogToEventLogDao.selectByNormLogId(normlogId);
		if(toEventLog!=null){
			normlog.setToEventLogId(toEventLog.getEventlogid());
			normlog.setToEventLogName(toEventLog.getEventlogname());
		}
		
		return normlog;
	}


	// 单个删除
	@Override
	public boolean delete(Integer logId,Integer uid) throws IOException {
		NormLog normlog = normLogDao.selectByPrimaryKey(logId);
		if(normlog!=null&&normlog.getCreatorid()==uid){
			
			// 从HDFS中删除
			hdfs.deleteFile(NORMLOG_PATH_PREFIX+normlog.getHdfsid(),false);
		
			Integer normlogId = normlog.getId();
			// 删除与原始日志的联系
			rawLogToNormLogDao.deleteByNormLogId(normlogId);
			
			// 删除与事件日志的联系
			normLogToEventLogDao.deleteByNormLogId(normlogId);
			
			// 删除本条记录
			normLogDao.deleteByPrimaryKey(normlogId);
			return true;
		}
		
		
		return false;
	}


	// 批量删除
	@Override
	public boolean deleteBatch(List<Integer> logIds, Integer uid) throws Exception {
		boolean ret = true;
		// 寻找目前存在的日志
		List<NormLog> normloglist = normLogDao.selectByPKList(logIds, uid);
		
		for(NormLog normlog : normloglist){
			if(!delete(normlog.getId(),uid)){
				ret = false;
				break;
			}
		}
		return ret;
	}


	// 共享
	@Override
	public boolean shareLog(Integer logid, Integer uid) {
		NormLog log = normLogDao.selectByPrimaryKey(logid);
		if(log.getCreatorid()==uid){
			if(log.getIsshared()){
				return true;
			}else{
				log.setIsshared(true);
				normLogDao.updateByPrimaryKeySelective(log);
				return true;
			}
		}
		
		// 没有权限
		return false;
	}


	// 不共享
	@Override
	public boolean unshareLog(Integer logid, Integer uid) {
		NormLog log = normLogDao.selectByPrimaryKey(logid);
		if(log.getCreatorid()==uid){
			if(!log.getIsshared()){
				return true;
			}else{
				log.setIsshared(false);
				normLogDao.updateByPrimaryKeySelective(log);
				return true;
			}
		}
		
		// 没有权限
		return false;
	}


	// 规范化之前清除已有的规范化日志
	@Override
	public boolean deleteIfExist(Integer rawlogId,Integer uid) throws IOException {
		boolean ret = false;
		RawLogToNormLog toNormLog = rawLogToNormLogDao.selectByRawLogId(rawlogId);
		if(toNormLog!=null){
			if(this.delete(toNormLog.getNormalizedlogid(),uid)){
				System.out.println("删除原规范化日志");
				ret = true;
			}else{
				ret = false;
			}
		}
		
		return ret;
	}


	@Override
	public boolean deleteFake(Integer logid) {
		// TODO Auto-generated method stub
		return false;
	}


	@Override
	public boolean deleteFakeBatch(List<Integer> logIds, Integer uid) {
		// TODO Auto-generated method stub
		return false;
	}


	@Override
	public boolean cleanLog(Integer uid) {
		
		return false;
	}

}
