package com.hollycrm.hollysqm.job.index;

import static java.io.File.separator;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Random;

import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.FileFilterUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.common.SolrInputDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import com.dangdang.ddframe.job.api.JobExecutionMultipleShardingContext;
import com.dangdang.ddframe.job.plugin.job.type.simple.AbstractSimpleElasticJob;
import com.hollycrm.hollysqm.bean.FtpConfigurBean;
import com.hollycrm.hollysqm.bean.XmlFtpSourceBean;
import com.hollycrm.hollysqm.core.index.factory.IndexSearchFactory;
import com.hollycrm.hollysqm.core.index.service.IndexSearchService;
import com.hollycrm.hollysqm.core.util.DocToBeanUtil;
import com.hollycrm.hollysqm.core.vo.V8DocBean;
import com.hollycrm.hollysqm.core.voice.bean.ParseResult;
import com.hollycrm.hollysqm.core.voice.parsing.IVoiceParse;
import com.hollycrm.hollysqm.core.voice.parsing.factory.VoiceParserFactory;
import com.hollycrm.hollysqm.entities.TblJobIndex;
import com.hollycrm.hollysqm.entities.TblVocCustcontinfo;
import com.hollycrm.hollysqm.job.index.dao.CustcontinfoJobDao;
import com.hollycrm.hollysqm.job.index.dao.IndexJobDao;
import com.hollycrm.hollysqm.util.Constant;
import com.hollycrm.hollysqm.util.ftp.FTPManager;
import com.hollycrm.hollysqm.util.ftp.FTPServerInterface;

/**
 * 建立V8索引JOb
 * 第一步，读取指定时间段的接触记录
 * 第二步，通过ID+时间，下载XML文件
 * 第三步，解析XML文件，抽取文本，组合对象
 * 第四步，提交文档对象
 * @author jianglong
 * @date 2017年2月23日 下午4:25:44
 */
@Service
public class CreateV8IndexJob extends AbstractSimpleElasticJob  {
	
	private final Logger log = LoggerFactory.getLogger(getClass());
	/**
	 * 获取下载Job时间节点
	 */
	@Autowired
	private IndexJobDao indexJobDao;
	/**
	 * 获取接触记录数
	 */
	@Autowired
	private CustcontinfoJobDao custcontinfoJobDao;
	/**
	 * 获取Solr查询对象
	 */
	@Autowired
	private IndexSearchFactory indexSearchFactory;
	
	/**
	 * 提供FTP下载配置
	 */
	@Autowired
	private XmlFtpSourceBean ftpSourceBean;
	/**
	 * XML下载本地存储目录
	 */
	@Value("${ftp.download1.savePath}")
	private String saveLocalPath;	
	/**
	 * ftp下载服务
	 */
	private FTPManager ftpManager = new FTPManager();
	
	private String exeTime = null;
	
	private final static String SLASH= "/"; //生产环境下是linux系统，采用File.separator自动识别;
	


	/**
	 * job执行的主方法
	 */
	@Override
	public void process(JobExecutionMultipleShardingContext shardingContext) {
		log.info("开始执行CreateV8IndexJob...");
		TblJobIndex jobIndex = null;
		List<TblJobIndex> indexList = null;
		try {
			indexList = indexJobDao.getJobIndexList(Constant.CREATE_V8_INDEX_JOB);
			if (indexList==null || indexList.size()<=0)
				throw new RuntimeException("tbl_job_index 表中数据不可以为空！");
			jobIndex = indexList.get(0);
			if (StringUtils.equals(jobIndex.getStatus(),"0")){//0表示上一次Job还未执行完，需防止插队		
				log.info("上一次Job任务还未结束，不能执行本次操作,详情请查看Tab_Job_Index表...");
			}
			exeTime = jobIndex.getExeTime();
		} catch (Exception e) {
			e.printStackTrace();
		}	
		String areaCode = jobIndex.getAreaCode();
		String hours = jobIndex.getExeTime();
		String jobParameter = Constant.SUCCESS;
		try {
			this.ftpXmlDown(hours);
			this.commitIndex(areaCode, hours);		
		} catch (Exception e) {
			log.error("",e);
			jobParameter = Constant.FAIL;
			e.printStackTrace();
		} finally {
			try{
				//清理已下载的XML目录
				FileUtils.deleteDirectory(new File(saveLocalPath + "/" + hours));
			}catch(IOException ioe){
				log.error("删除本地目录失败！" + saveLocalPath + separator + hours);
			}
			// 传递给listener
			shardingContext.setJobParameter(jobParameter);
		}
	}

	
	/**
	 * 远程FTP下载该小时所有XML文件
	 * @param hours
	 * @throws Exception
	 */
	public void ftpXmlDown(String hours) throws Exception {
		int i = 0 ;
		int downFailCount = 0;
		FTPServerInterface ftp = null;
		try {
			FtpConfigurBean ftpConfigurBean = ftpSourceBean.getFtpConfigurList().get(0);
			ftp = ftpManager.getFtpConn(ftpConfigurBean);//获取FTP连接
			if (ftp == null){				
				throw new Exception("FTP对象不可用...") ;
			}
			String donwDir = ftpConfigurBean.getParentPath() + SLASH + exeTime;
			try{
				if (!ftp.isExistsDir(donwDir)){//判断远程ftp目录是否生成
					ftp.close();
					throw new Exception("下载失败，FTP服务器上未生成对目录:"+donwDir);
				}
			}catch(Exception e){
				throw e;
			}
			
			List<String> listFiles = ftp.listFiles(donwDir);
			if (listFiles==null || listFiles.size()<=0){
				throw new RuntimeException("FTP目录已创建，但未获取XML内容！");
			}
			for (String fileName : listFiles) {								
				try{					
					ftp.download(donwDir +SLASH+ fileName, 
							saveLocalPath+ SLASH + exeTime +SLASH+ fileName);//下载XML
					log.debug("下载："+donwDir +SLASH+ fileName +"\t"+
							saveLocalPath+ SLASH + exeTime + SLASH+ fileName);
					i ++;
				}catch(Exception e){
					log.error("下载："+donwDir +SLASH+ fileName +"\t"+
							saveLocalPath+ SLASH + exeTime +SLASH+ fileName,e);
					downFailCount ++ ;
				}											
			}
			log.info("共成功从FTP服务中下载XML数：" + i );
			//如果所有下载均失败，则本次执行失败
			if (downFailCount >= listFiles.size()){
				throw new Exception("下载失败...");
			}
		} catch (Exception e) {
			log.error("FTP下载" + hours + "小时XML失败");
			throw e;
		} finally {
			try {
				if (ftp != null) {
					ftp.close();
				}
			} catch (Exception e) {
				log.warn("FTP close,", e);
			}
		}
	}
	
	/**
	 * 解析该小时下的所有XML，并获取所有XML对应的接触记录数据进行组合，并批量提交到Solr中
	 * @param areaCode
	 * @param hours
	 * @throws Exception
	 */
	public void commitIndex(String areaCode, String hours) throws Exception {
		Collection<File> files = FileUtils.listFiles(new File(saveLocalPath + SLASH + hours), FileFilterUtils.suffixFileFilter("xml"), null);
		if (files == null || files.size() <= 0)
			throw new Exception("未获取" + saveLocalPath + SLASH + hours + "目录或目录中没有文件...");
		String xmlName = "";
		ParseResult xmlBean = null;
		IVoiceParse xmlParser = null;
		InputStream tempIs = null;
		List<SolrInputDocument> documents = new ArrayList<>();
		
		String tempFile = null; 
		// ---------------测试数据------------------
		int i=0,size = 10;
		Random r = new Random();
		List<String> userList = new ArrayList<>();
		userList.add("70001");
		userList.add("70002");
		userList.add("70003");
		userList.add("70004");
		userList.add("70005");
		userList.add("70006");
		userList.add("70007");
		userList.add("70008");
		userList.add("70009");
		userList.add("70010");
		userList.add("70011");
		userList.add("70012");
		// ---------------测试数据------------------
		//获取Solr查询对象
		IndexSearchService indexSearchService = indexSearchFactory.getNewIndexService(Constant.V8);//根据V8或I8标识返回索引对象
//		for (TblVocCustcontinfo data : dataList){
		for (File file : files) {	
			xmlName = file.getName();
			log.debug("解析XML文件:" + xmlName);																									              
	        try {		            
	            tempIs = new FileInputStream(file);
	            xmlParser = VoiceParserFactory.getInstance().defaultParser(tempIs, false);
	            xmlBean = xmlParser.parse();
	        } catch (Exception e) {
	        	xmlBean = null;
	            log.error("创建索引-解析xml出错[" + tempFile + "]", e);
	        }finally{
	        	try {
		            if (tempIs != null)
		                tempIs.close();
		            //删除XML文件				
		        } catch (Exception e) {}
	        }
	        if (xmlBean == null){ //没有获取XML解析数据，则跳过
	        	continue;
	        }
	        String custcontinfoId = xmlName.substring(0, xmlName.indexOf('.'));//文件名称即ID
			TblVocCustcontinfo data = custcontinfoJobDao.getCustcontinfo(custcontinfoId);
			
	        // ----------测试账号-------------
	        i = r.nextInt(size);//随机分配（任意选择）一个质检员
	        data.setUserCode(userList.get(i));
	        // ----------测试账号-------------
	        
            //将解析出来的结果赋值到voc
            data.setTxtContent(xmlBean.getAllText());
            data.setTxtContentAgent(xmlBean.getAgentText());
            data.setTxtContentUser(xmlBean.getUserText());
            data.setSilenceLength(xmlBean.getSilence());
            data.setRecoinfoLength(xmlBean.getDuration());
            data.setQualityStatus("0");//0未质检，1已质检
            V8DocBean docBean = new V8DocBean(data);
            documents.add(DocToBeanUtil.beanToAnnDoc(docBean));
			log.info("solr add doc:"+data.getCustcontinfoId()+"\t"+data.getCaller()+"\t"+data.getAcceptTime());
		}
		try{
			//提交Doc建立索引
	        indexSearchService.addDocs(documents);
		}catch(Exception e){
			log.error("批量添加"+ exeTime +"下的索引失败！",e);			
		}
	}
	
	/**
	 * 由于湖北现场ASR升级后，原85备机也正常启用，因此进行策略分配 小时为奇数录音文件上传到85，如：1、3、5、7、9...
	 * 小时为偶数录音文件上传到84，如：0、2、4、6、8... 因此转换为XML后，需要根据不同的小时数，去不同的FTP服务上下载XML
	 * @param hours
	 */
	public String getFtpOperator(String hours) {
		return (Integer.parseInt(hours) % 2 == 0) ? "ftp1" : "ftp2";
		
	}
		
}
