package com.hollycrm.hollysqm.job.index;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;

import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.common.SolrInputDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import com.dangdang.ddframe.job.api.JobExecutionMultipleShardingContext;
import com.dangdang.ddframe.job.plugin.job.type.dataflow.AbstractBatchThroughputDataFlowElasticJob;
import com.hollycrm.hollysqm.bean.FtpConfigurBean;
import com.hollycrm.hollysqm.bean.XmlFtpSourceBean;
import com.hollycrm.hollysqm.core.index.factory.IndexSearchFactory;
import com.hollycrm.hollysqm.core.index.service.IndexSearchService;
import com.hollycrm.hollysqm.core.util.DocToBeanUtil;
import com.hollycrm.hollysqm.core.vo.V8DocBean;
import com.hollycrm.hollysqm.core.voice.bean.ParseResult;
import com.hollycrm.hollysqm.core.voice.parsing.IVoiceParse;
import com.hollycrm.hollysqm.core.voice.parsing.factory.VoiceParserFactory;
import com.hollycrm.hollysqm.entities.TblJobIndex;
import com.hollycrm.hollysqm.entities.TblVocCustcontinfo;
import com.hollycrm.hollysqm.job.index.dao.CustcontinfoJobDao;
import com.hollycrm.hollysqm.job.index.dao.IndexJobDao;
import com.hollycrm.hollysqm.util.Constant;
import com.hollycrm.hollysqm.util.ftp.FTPServerInterface;
import com.hollycrm.hollysqm.util.ftp.FTPManager;

/**
 * 建立V8索引JOb
 * 第一步，读取指定时间段的接触记录
 * 第二步，通过ID+时间，下载XML文件
 * 第三步，解析XML文件，抽取文本，组合对象
 * 第四步，提交文档对象
 * @author jianglong
 * @date 2017年2月23日 下午4:25:44
 */
@Service
public class CreateV8IndexJobBak extends AbstractBatchThroughputDataFlowElasticJob<TblVocCustcontinfo>  {
	
	private final Logger log = LoggerFactory.getLogger(getClass());
	/**
	 * 每小时的开始时间和结束时间
	 */
	private static final String START_TIME="0000";	
	private static final String END_TIME="5959";
	/**
	 * 获取下载Job时间节点
	 */
	@Autowired
	private IndexJobDao indexJobDao;
	/**
	 * 获取接触记录数
	 */
	@Autowired
	private CustcontinfoJobDao custcontinfoJobDao;
	/**
	 * 获取Solr查询对象
	 */
	@Autowired
	private IndexSearchFactory indexSearchFactory;
	
	/**
	 * 提供FTP下载配置
	 */
	@Autowired
	private XmlFtpSourceBean ftpSourceBean;
	/**
	 * XML下载本地存储目录
	 */
	@Value("${ftp.download1.savePath}")
	private String saveLocalPath;	
	/**
	 * ftp下载服务
	 */
	private FTPManager ftpManager = new FTPManager();
	
	private String exeTime = null;
	
	private final static String SLASH= "/"; //生产环境下是linux系统，采用File.separator自动识别;
	
	/**
	 * 第一步：加载数据
	 * 第二步：返回结果集
	 */
	@Override
	public List<TblVocCustcontinfo> fetchData(JobExecutionMultipleShardingContext shardingContext) {
		log.info("开始执行CreateV8IndexJob...");
		List<TblJobIndex> indexList = null;
		List<TblVocCustcontinfo> list =null;
		FTPServerInterface ftp = null;
		String xmlFileName = "";
		try{
			indexList = indexJobDao.getJobIndexList(Constant.CREATE_V8_INDEX_JOB);
			if (indexList==null || indexList.size()<=0)
				throw new RuntimeException("tbl_job_index 表中数据不可以为空！");
			TblJobIndex jobIndex = indexList.get(0);
			if (StringUtils.equals(jobIndex.getStatus(),"0")){//0表示上一次Job还未执行完，需防止插队		
				log.info("上一次Job任务还未结束，不能执行本次操作,详情请查看Tab_Job_Index表...");
				return null;
			}
			exeTime = jobIndex.getExeTime();
			log.info("V8Job执行时间："+exeTime+",JobParameter:"+shardingContext.getJobParameter());
//			exeTime = "2017022408";
			indexJobDao.updateJobStatus(Constant.CREATE_V8_INDEX_JOB);//更新当前Job为执行中的状态，防止其它任务插队
			list = custcontinfoJobDao.getCustcontinfoList(exeTime+START_TIME, exeTime+END_TIME);													
		}catch(Exception e){
			log.error("Job执行异常...",e);
			shardingContext.setJobParameter(Constant.FAIL);
			return null;
		}
		log.info((exeTime+START_TIME)+"至"+(exeTime+END_TIME)+"共查询出："+(list==null?0:list.size()));
		log.info("ftp本地存储目录："+ saveLocalPath);
		String jobParameter = Constant.SUCCESS;
		if (list!=null && list.size()>0){//是否获得接触记录集合
			FtpConfigurBean ftpConfigurBean = ftpSourceBean.getFtpConfigurList().get(0);
			ftp = ftpManager.getFtpConn(ftpConfigurBean);//获取FTP连接
			if (ftp == null){
				log.error("FTP对象不可用...");
				shardingContext.setJobParameter(Constant.FAIL);
				return null;
			}
			String donwDir = ftpConfigurBean.getParentPath() + SLASH + exeTime;
			try{
				if (!ftp.isExistsDir(donwDir)){//判断远程ftp目录是否生成
					ftp.close();
					throw new Exception("下载失败，FTP服务器上未生成对目录:"+donwDir);
				}
			}catch(Exception e){
				log.error(e.getMessage());
				shardingContext.setJobParameter(Constant.FAIL);
				return null;
			}
			int downFailCount = 0;
			for (TblVocCustcontinfo info : list){//将所有XML先下载到本地
				xmlFileName = info.getCustcontinfoId() +".xml";
				try{					
					ftp.download(donwDir +SLASH+ xmlFileName, 
							saveLocalPath+ SLASH + exeTime +SLASH+ xmlFileName);//下载XML
					log.debug("下载："+donwDir +SLASH+ xmlFileName +"\t"+
							saveLocalPath+ SLASH + exeTime + SLASH+ xmlFileName);
				}catch(Exception e){
					log.error("下载："+donwDir +SLASH+ xmlFileName +"\t"+
							saveLocalPath+ SLASH + exeTime +SLASH+ xmlFileName,e);
					downFailCount ++ ;
				}
			}
			if (ftp!=null){
				try{
					ftp.close();
				}catch(Exception e){
					e.printStackTrace();
				}
			}
			//如果所有下载均失败，则本次执行失败
			if (downFailCount >= list.size()){
				jobParameter = Constant.FAIL;
				list = null;
			}
		}else{
			jobParameter = Constant.FAIL;
		}
		shardingContext.setJobParameter(jobParameter);
		return list;
	}
	
	/**
	 * 第三步：处理fetchData方法传入的结果集中
	 */
	@Override
	public int processData(JobExecutionMultipleShardingContext shardingContext, List<TblVocCustcontinfo> dataList) {
		int proCount = 0;
		if (dataList==null || dataList.size()<=0)
			return proCount;
		IVoiceParse xmlParser = null;
		ParseResult xmlBean = null;
        InputStream tempIs = null; 
		List<SolrInputDocument> documents = new ArrayList<>(dataList.size());
		String tempFile = null; 
		
		// ---------------测试数据------------------
		int i=0,size = 10;
		Random r = new Random();
		List<String> userList = new ArrayList<>();
		userList.add("70001");
		userList.add("70002");
		userList.add("70003");
		userList.add("70004");
		userList.add("70005");
		userList.add("70006");
		userList.add("70007");
		userList.add("70008");
		userList.add("70009");
		userList.add("70010");
		userList.add("70011");
		userList.add("70012");
		// ---------------测试数据------------------
		
		//获取Solr查询对象
		IndexSearchService indexSearchService = indexSearchFactory.getNewIndexService(Constant.V8);//根据V8或I8标识返回索引对象
		for (TblVocCustcontinfo data : dataList){
			tempFile = saveLocalPath+SLASH+ exeTime + SLASH + data.getCustcontinfoId() +".xml";
			log.debug("voc xml parsing ");	               
	        try {
	            tempIs = new FileInputStream(tempFile);
	            xmlParser = VoiceParserFactory.getInstance().defaultParser(tempIs, false);
	            xmlBean = xmlParser.parse();
	        } catch (Exception e) {
	        	xmlBean = null;
	            log.error("创建索引-解析xml出错[" + tempFile + "]", e);
	        }finally{
	        	try {
		            if (tempIs != null)
		                tempIs.close();
		            //删除XML文件				
		        } catch (Exception e) {}
	        }
	        if (xmlBean == null){ //没有获取XML解析数据，则跳过
	        	proCount -- ;
	        	continue;
	        }
	        
	        // ----------测试账号-------------
	        i = r.nextInt(size);//随机分配（任意选择）一个质检员
	        data.setUserCode(userList.get(i));
	        // ----------测试账号-------------
	        
            //将解析出来的结果赋值到voc
            data.setTxtContent(xmlBean.getAllText());
            data.setTxtContentAgent(xmlBean.getAgentText());
            data.setTxtContentUser(xmlBean.getUserText());
            data.setSilenceLength(xmlBean.getSilence());
            data.setRecoinfoLength(xmlBean.getDuration());
            data.setQualityStatus("0");//0未质检，1已质检
            V8DocBean docBean = new V8DocBean(data);
            documents.add(DocToBeanUtil.beanToAnnDoc(docBean));
			log.info("solr add doc:"+data.getCustcontinfoId()+"\t"+data.getCaller()+"\t"+data.getAcceptTime());
			proCount ++ ;
		}
		try{
			//提交Doc建立索引
	        indexSearchService.addDocs(documents);
		}catch(Exception e){
			proCount=0;
			log.error("批量添加"+ exeTime +"下的索引失败！",e);			
		}finally{
			try{
				FileUtils.deleteDirectory(new File(saveLocalPath+SLASH+ exeTime + SLASH));
			}catch(IOException ioe){
				log.error("删除目录失败，dir:"+saveLocalPath+SLASH+ exeTime + SLASH ,ioe);
			}
		}
		//proCount的返回值用于表示数据是否处理成功的数量
		return proCount;
	}
	
	/**
	 * 非流式处理
	 */
	@Override
	public boolean isStreamingProcess() {
		// TODO Auto-generated method stub
		return false;
	}
}
