package com.processmining.service.impl;


import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Map;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;


import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.processmining.dao.NormLogMapper;
import com.processmining.dao.RawLogMapper;
import com.processmining.dao.RawLogToNormLogMapper;
import com.processmining.dao.RawLogToEventLogMapper;
import com.processmining.dao.UserMapper;
import com.processmining.entity.Log;
import com.processmining.entity.NormLog;
import com.processmining.entity.NormLogToEventLog;
import com.processmining.entity.RawLog;
import com.processmining.entity.RawLogToNormLog;
import com.processmining.entity.RawLogToEventLog;
import com.processmining.entity.User;
import com.processmining.service.IRawLogService;
import com.processmining.support.normalize.FormatInfo;
import com.processmining.support.normalize.LogConfiguration;
import com.processmining.support.normalize.TempLine;
import com.processmining.util.FileReadLine;
import com.processmining.util.fs.HDFSOperator;




/**
 * 原始日志业务层实现
 * @author hxuhao
 *
 */
@Service
public class RawLogServiceImpl implements IRawLogService{
	
	private static Logger logger = LogManager.getLogger(RawLogServiceImpl.class.getName());
	
	@Value("${basic.rawLogPath}")
	private String RAWLOG_PATH_PREFIX;
	
	@Value("${k2.hdfs.normLogPath}")
	private String NORMLOG_PATH_PREFIX;
	
	@Autowired
	private RawLogToNormLogMapper rawLogToNormLogDao;
	
	@Autowired
	private RawLogToEventLogMapper rawLogToEventLogDao; 
	
	@Autowired
	private UserMapper userDao;
	
	@Autowired
	private RawLogMapper rawlogDao;
	
	@Autowired
	private NormLogMapper normLogDao;
	
	@Autowired
	private HDFSOperator hdfs;


	@Override
	public boolean uploadLog(InputStream input,Log log) throws Exception {
		
		logger.info("user " + log.getCreatorid() + " upload rawLog " + " start.");
		
		String hdfsId = hdfs.uploadFile(input, RAWLOG_PATH_PREFIX);
		if(hdfsId != null){
			log.setHdfsid(hdfsId);
			int ret = rawlogDao.insertSelective((RawLog) log);
			logger.info("user " + log.getCreatorid() + " upload rawLog. hdfs id : " + log.getHdfsid());
			if(ret==1){
				logger.info("user " + log.getCreatorid() + " insert rawLog. log id : " + log.getId());
				return true;
			}else{
				logger.error("user " + log.getCreatorid() + " insert rawLog. failed");
			}
		}else{
			logger.error("user " + log.getCreatorid() + " upload rawLog. failed");
		}
		return false;
	}

	@Override
	public InputStream downloadLog(Integer logId) throws Exception {
		RawLog record = rawlogDao.selectByPrimaryKey(logId);
		InputStream input = hdfs.downloadFile(RAWLOG_PATH_PREFIX+record.getHdfsid());
		return input;
	}

	@Override
	public boolean delete(Integer logId,Integer uid) throws IOException {
		RawLog rawlog = rawlogDao.selectByPrimaryKey(logId);
		if(rawlog!=null&&rawlog.getCreatorid()==uid){
			
			// 从HDFS中删除
			hdfs.deleteFile(RAWLOG_PATH_PREFIX+rawlog.getHdfsid(),false);
		
			Integer rawlogId = rawlog.getId();
			// 删除与规范化日志的联系
			rawLogToNormLogDao.deleteByRawLogId(rawlogId);
			
			// 删除与事件日志的联系
			rawLogToEventLogDao.deleteByRawLogId(rawlogId);
			
			// 删除本条记录
			rawlogDao.deleteByPrimaryKey(rawlogId);
			return true;
		}
		
		
		return false;
	}
	
	/**
	 * 规范化
	 * @param id		原始日志id
	 * @param formats	数据项格式配置
	 * @param timeNames 	时间项整合
	 * @param dataNames		数据项整合
	 * @param oriitemSeparator		原数据项分割符
	 * @param orinameValSeparator	原名称值分隔符
	 * @param orinulVal				原空值
	 * @param targetitemSeparator		目标数据项分隔符
	 * @param targetnameValSeparator	目标数据项分隔符
	 * @param targetnulVal				目标空值
	 * @return 规范化日志
	 * @throws Exception 
	 */
	@Override
	public boolean convertToNormLog(Integer id,Integer uid ,String formats, String timeNames, String targetTimeName,String dataNames,
			String oriitemSeparator, String orinameValSeparator, String orinulVal, String targetitemSeparator,
			String targetnameValSeparator, String targetnulVal) throws Exception {
		
		boolean result = false;
		
		// 获得原始日志文件流
		InputStream input = this.downloadLog(id);
		
//		BufferedReader reader = new BufferedReader(new InputStreamReader(input, "utf-8"));
//		String line2 ;
//		while((line2 = reader.readLine())!=null){
//			System.out.println(line2);
//		}
		
		// 总共写入条数
		int ret = 0;
		
		String nulVal=" ";
		String seperator="\t";			
		
		// 规范化日志buffer
		StringBuilder normLogBuffer = new StringBuilder();
		if(input != null){
			FileReadLine readLine = new FileReadLine(input);
			LogConfiguration LC = new LogConfiguration(formats, timeNames,targetTimeName,dataNames, oriitemSeparator, orinameValSeparator, orinulVal);
			while(readLine.hasNext()){
				String line = readLine.getLine();
				if(!line.equals("")){
					//将一行录入TempLine
					TempLine tempLine=null;
					if(LC.getNameValSeparator()==null)
						tempLine = new TempLine(line,LC.getItemSeparator(),LC.getNulVal());
					else 
						tempLine = new TempLine(line, LC.getNameValSeparator(), LC.getItemSeparator(),LC.getNulVal());	
					
					
					//第一步：对所有数据项格式化
					FormatInfo[] tempFormatInfos = LC.getFormatInfos();
					Map<String,String> itemMap = tempLine.getItemMap();
					for(int i=0;i<tempFormatInfos.length;i++){
						FormatInfo tempFormatInfo = tempFormatInfos[i];
						String temp = itemMap.get(tempFormatInfo.getItemNameOrIndex());
						/*System.out.println("start-------------------------------------------------------------");
						System.out.println(tempFormatInfo.getItemNameOrIndex());
						System.out.println(temp);
						System.out.println("end-------------------------------------------------------------");*/
						temp = tempFormatInfos[i].formatTransform(temp,itemMap.get(tempFormatInfos[i].getFormatTypeNameOrIndex()));
						/*System.out.println("start**************************************************************");
						System.out.println(temp);
						for (String key : tempLine.getItemMap().keySet()) {
							 
					           System.out.println("key= "+ key + " and value= " + tempLine.getItemMap().get(key));
					 
					          }
						System.out.println(tempFormatInfos[i].getFormatTypeNameOrIndex());
						System.out.println(tempLine.getItemMap().get(tempFormatInfos[i].getFormatTypeNameOrIndex()));
						System.out.println("end**************************************************************");
						*/
						tempLine.modifyValue(tempFormatInfo.getItemNameOrIndex(), temp);
					}
					
					//第二步：合并时间项
					tempLine.mergeTimeItems(LC.getTargetTimeName(),LC.getTimeItems());
					
					//第三步：整合其他项
					tempLine.renameORmerge(LC.getRenameORmergeItems());
					
					//第四步：写入新日志
					if(ret==0){
						normLogBuffer.append(tempLine.generateItemNamesLine(seperator)+"\r\n");//写入文件首行
						//System.out.println(tempLine.generateItemNamesLine(seperator)+"\r\n");
					}
						normLogBuffer.append(tempLine.generateNewLine(seperator, nulVal)+"\r\n");
						//System.out.println(tempLine.generateNewLine(seperator, nulVal)+"\r\n");
					
					ret++;//记录总共写入多少条数据
				}
			}
		}
		//System.out.println(normLogBuffer);

		
		
		// 生成规范化日志名字
		RawLog rawLog = rawlogDao.selectByPrimaryKey(id);
		String rawLogName = rawLog.getName();
		String[] arr = rawLogName.split("\\.");
		//System.out.println(arr[0]);
		//System.out.println(rawLogName);
		StringBuilder normLogNameSB = new StringBuilder() ;
		normLogNameSB.append(arr[0]);
		normLogNameSB.append("_norm.");
		normLogNameSB.append(arr[1]);
		String normLogName = normLogNameSB.toString();
		
		// 往hdfs写规范化日志
		InputStream normLogInput = new ByteArrayInputStream(normLogBuffer.toString().getBytes("utf-8"));
		String normLogHDFSID = hdfs.uploadFile(normLogInput, NORMLOG_PATH_PREFIX);
		if(normLogHDFSID!=null){
		
			// 向数据库添加规范化日志记录
			NormLog normLog = new NormLog();
			normLog.setFormat(rawLog.getFormat());
			normLog.setIsshared(false);
			normLog.setHdfsid(normLogHDFSID);
			normLog.setName(normLogName);
			normLog.setCreatorid(uid);
			if(normLogDao.insertSelective(normLog)==1){
				// 添加原始日志-规范化日志联系
				RawLogToNormLog toNormLog = new RawLogToNormLog();
				toNormLog.setNormalizedlogid(normLog.getId());
				toNormLog.setNormalizedlogname(normLog.getName());
				toNormLog.setRawlogid(rawLog.getId());
				toNormLog.setRawlogname(rawLog.getName());
				if(rawLogToNormLogDao.insertSelective(toNormLog)==1){
					//System.out.println("succeed");
					result = true;
				}
			}
		}
		

		
		return result;
	}

	@Override
	public Log selectByPK(Integer logId) {
		
		RawLog rawlog = rawlogDao.selectByPrimaryKey(logId);
		// 查询上传者
		rawlog.setCreator(userDao.selectByPrimaryKey(rawlog.getCreatorid()).getNickname());
		
		// 时间对象转换字符串
		DateFormat format = new SimpleDateFormat("yyyy年MM月dd日 HH:mm");
		rawlog.setCreateTime(format);
		
		Integer rawLogId = rawlog.getId();
		// 查询其对应的规范化日志
		RawLogToNormLog toNormLog = rawLogToNormLogDao.selectByNormLogId(rawLogId);
		if(toNormLog!=null){
			rawlog.setToNormLogId(toNormLog.getNormalizedlogid());
			rawlog.setToNormLogName(toNormLog.getNormalizedlogname());
		}
		
		// 查询其对应的事件日志
		RawLogToEventLog toEventLog = rawLogToEventLogDao.selectByRawLogId(rawLogId);
		if(toEventLog!=null){
			rawlog.setToEventLogId(toEventLog.getEventlogid());
			rawlog.setToEventLogName(toEventLog.getEventlogname());
		}
		
		return rawlog;
	}

	
	
	@Override
	public PageInfo<RawLog> searchByName(int pageNum, int pageSize,String info,Integer uid) {
	
		PageHelper.startPage(pageNum,pageSize);
		List<RawLog> logList ;
		if(info==null){
			logList = rawlogDao.selectAll(uid);
		}else{
			logList = rawlogDao.selectByName(info,uid);
		}
		DateFormat format = new SimpleDateFormat("yyyy年MM月dd日 HH:mm");
		for(RawLog rawlog : logList){
			// 查询上传者昵称
			User u = userDao.selectByPrimaryKey(rawlog.getCreatorid());
			String creator = u!=null?u.getNickname():"-";
			rawlog.setCreator(creator);
			
			// 时间对象转换字符串
			rawlog.setCreateTime(format);
			
			Integer rawLogId = rawlog.getId();
			// 查询其对应的规范化日志
			RawLogToNormLog toNormLog = rawLogToNormLogDao.selectByRawLogId(rawLogId);
			if(toNormLog!=null){
				rawlog.setToNormLogId(toNormLog.getNormalizedlogid());
				rawlog.setToNormLogName(toNormLog.getNormalizedlogname());
			}
			
			// 查询其对应的事件日志
			RawLogToEventLog toEventLog = rawLogToEventLogDao.selectByRawLogId(rawLogId);
			if(toEventLog!=null){
				rawlog.setToEventLogId(toEventLog.getEventlogid());
				rawlog.setToEventLogName(toEventLog.getEventlogname());
			}
		}
		PageInfo<RawLog> logListWithPage = new PageInfo<RawLog>(logList);
		return logListWithPage;

	}

	@Override
	public boolean deleteBatch(List<Integer> logIds,Integer uid) throws Exception {
		
		boolean ret = true;
		// 寻找目前存在的日志
		List<RawLog> rawloglist = rawlogDao.selectByPKList(logIds, uid);
		
		for(RawLog rawlog : rawloglist){
			System.out.println(rawlog.getName());
			if(rawlog.getCreatorid()==uid){
				String hdfsId = rawlog.getHdfsid();
				if(hdfs.deleteFile(RAWLOG_PATH_PREFIX + hdfsId, false)){
					
					Integer rawlogId = rawlog.getId();
					rawlogDao.deleteByPrimaryKey(rawlogId);
					rawLogToEventLogDao.deleteByRawLogId(rawlogId);
					rawLogToNormLogDao.deleteByRawLogId(rawlogId);
		
				}else{
					// 有错直接放弃后续操作
					ret = false;
					break;
				}
			}else{
				ret = false;
				break;
			}
		}
		return ret;
		
	}

	@Override
	public List<RawLog> searchByPKList(List<Integer> rawlogIds, Integer uid) {
		return rawlogDao.selectByPKList(rawlogIds, uid);
	}

	@Override
	public boolean shareLog(Integer logid,Integer uid) {
		RawLog log = rawlogDao.selectByPrimaryKey(logid);
		if(log.getCreatorid()==uid){
			if(log.getIsshared()){
				return true;
			}else{
				log.setIsshared(true);
				rawlogDao.updateByPrimaryKeySelective(log);
				return true;
			}
		}
		// 没有权限
		return false;
	}

	@Override
	public boolean unshareLog(Integer logid,Integer uid) {
		RawLog log = rawlogDao.selectByPrimaryKey(logid);
		if(log.getCreatorid()==uid){
			if(!log.getIsshared()){
				return true;
			}else{
				log.setIsshared(false);
				rawlogDao.updateByPrimaryKeySelective(log);
				return true;
			}
		}
		// 没有权限
		return false;
	}

	@Override
	public boolean deleteFake(Integer logid) {
		return false;

	}

	@Override
	public boolean deleteFakeBatch(List<Integer> logIds, Integer uid) {
		
		boolean ret = true;

		return ret;
	}

	@Override
	public boolean cleanLog(Integer uid) {
		// TODO Auto-generated method stub
		return false;
	}


}
