package cn.wx.read.kafka.service;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.csvreader.CsvReader;

import cn.hutool.core.lang.Singleton;
import cn.wx.common.utils.TypeConversion;
import cn.wx.read.core.entity.Org;
import cn.wx.read.core.entity.ScholarFos;
import cn.wx.read.core.entity.qo.OrgQ;
import cn.wx.read.core.entity.qo.ScholarFosQ;
import cn.wx.read.core.service.OrgService;
import cn.wx.read.core.service.ScholarFosService;
import cn.wx.read.es.entity.EsExpert;
import cn.wx.read.es.service.EsExpertService;
import cn.wx.read.kafka.pool.ExecutorPool;
import cn.wx.read.kafka.pool.Gone;
import cn.wx.read.neo4j.entity.Neo4jExpert;
import cn.wx.read.neo4j.service.Neo4jImportDataService;

/**
 * 专家解析
 * 张剑
 */
@Service
public class ConsumeExpertService implements InitializingBean {
	
	Logger logger = LoggerFactory.getLogger(ConsumeExpertService.class);
	
	@Autowired
	Neo4jImportDataService neo4jImportDataService;
	
	@Autowired
	EsExpertService esExpertService;
	
	@Autowired
    ScholarFosService scholarFosService;
	
	@Autowired
	OrgService orgService;
	
	ExecutorPool executorPool = Singleton.get(ExecutorPool.class);
	
	private final static String zoneName_expert = "ZoneExpert_";
	private final static String zoneName_ghindex = "ZoneExpertGHIndex_";
	private final static String zoneName_patents = "ZoneExpertPatents_";
	private final static String zoneName_news = "ZoneExpertNews_";
	
	/**
	 * 开始分片
	 * filePath 文件所在路径
	 * fileName 文件名称
	 */
	public void consumeZone(String filePath,String fileName,Gone gone,String dataSource) {
		
		/**
		 * 获取分片的个数
		 */
		Integer size = executorPool.zoneCsv(filePath, fileName, dataSource);
		
		executorPool.initPool();
		
		/**
         * 分片完毕，启动线程
         */
		int sz = size / ExecutorPool.tc;
		
		if(sz == 0) {
			for(int i=0;i<size;i++){
				executorPool.execute(new ThreadMultiReader(filePath,i,i,gone,dataSource));
			}
		}else{
			int e=0;
			for(int i=0;i<size;i++){
			    if(e==sz) {
			    	executorPool.execute(new ThreadMultiReader(filePath,i-e,i,gone,dataSource));
			    	e=0;
			    }else{
			    	e++;
			    }
			}
			if(e>0){
				int a=size - e + 1;
				int b=size - 1;
				executorPool.execute(new ThreadMultiReader(filePath,(a>b?b:a),b,gone,dataSource));
			}
		}

        executorPool.poolShutdown();
	}

	/**
	 * 内部类线程
	 */
	class ThreadMultiReader implements Runnable {

		private Integer s_;
		
		private Integer e_;
		
		private String fp_;
		
		private Gone gone_;
		
		private String dataSource_;
		
		public ThreadMultiReader(String fp_,Integer s_,Integer e_,Gone gone_,String dataSource_) {
			this.s_ = s_;
			this.e_ = e_;
			this.fp_ = fp_;
			this.gone_ = gone_;
			this.dataSource_ = dataSource_;
		}
		
		@Override
		public void run() {
			
			/**
			 * 开始处理
			 */
			for(int i=s_;i<=e_;i++) {
				
				switch (dataSource_) {
				case zoneName_expert:
					consumeExpert(fp_ + zoneName_expert + i +".csv",gone_);
					break;
				case zoneName_ghindex:
					consumeExpertHGIndex(fp_ + zoneName_expert + i +".csv",gone_);
					break;
				case zoneName_patents:
					consumeExpertPatents(fp_ + zoneName_expert + i +".csv",gone_);
					break;
				case zoneName_news:
					consumeExpertNews(fp_ + zoneName_expert + i +".csv",gone_);
					break;
				default:
					break;
				}
				
			}
			
		}

		public Integer getS_() {
			return s_;
		}

		public void setS_(Integer s_) {
			this.s_ = s_;
		}

		public Integer getE_() {
			return e_;
		}

		public void setE_(Integer e_) {
			this.e_ = e_;
		}

		public String getFp_() {
			return fp_;
		}

		public void setFp_(String fp_) {
			this.fp_ = fp_;
		}

		public Gone getGone_() {
			return gone_;
		}

		public void setGone_(Gone gone_) {
			this.gone_ = gone_;
		}
		
	}
	
	/**
	 * 解析专家
	 * @param filePath
	 */
	public void consumeExpert(String filePath,Gone go){
		
		/**
		 * 定义ne4j对象
		 */
		Neo4jExpert ne = null;
		List<Neo4jExpert> nes = new ArrayList<>();
		
		/**
		 * 定义es对象
		 */
		EsExpert ese = null;
		List<EsExpert> eses = new ArrayList<>();
		
		try {
			
			CsvReader csvReader = executorPool.getCsvReader(filePath);
            csvReader.readHeaders();
            
            while (csvReader.readRecord()){
            	
            	String line = csvReader.getRawRecord();
            	
				if(StringUtils.isBlank(line)) {
					continue;
				}
				/**
				 * 从csv中获取数据
				 */
				String fId = csvReader.get("nId");
				String name = csvReader.get("name");
				String arrayFos = csvReader.get("arrayFos");
				Integer citations = TypeConversion.converInteger(csvReader.get("citations"));
				String curOrg = csvReader.get("curOrg");
				String language = csvReader.get("language");
				String country = csvReader.get("country");
				
				List<String> fieldNames = null;
				List<Long> fieldfIds = null;
				Integer diversity = 0;
				Long curOrgfId = 0L;
				
				if(StringUtils.isNotBlank(arrayFos)) {
					arrayFos = arrayFos.replaceAll("___", " ");
					fieldNames = Arrays.asList(arrayFos.split(","));
					diversity = fieldNames.size();
					
					fieldfIds = new ArrayList<>();
					
					for(String fn : fieldNames) {
						fieldfIds.add(fMap.get(fn));
					}
				}
				
				if(StringUtils.isNotBlank(curOrg)) {
					curOrgfId = oMap.get(curOrg);
				}
				
				ne = new Neo4jExpert();
				ne.setfId(fId);
				ne.setName(name);
				nes.add(ne);
				
				ese = new EsExpert();
				ese.setId(fId);
				ese.setfId(fId);
				ese.setName(name);
				ese.setFieldfIds(fieldfIds);
				ese.setFoSets(fieldNames);
				ese.setAuthorsOrg(StringUtils.isBlank(curOrg) ? null : curOrg);
				ese.setCurOrgfId(curOrgfId);
				ese.setCountry(StringUtils.isBlank(country) ? null : country);
				ese.setLanguage(StringUtils.isBlank(language) ? null : language);
				ese.setCitations(citations);
				ese.setDiversity(diversity);
				eses.add(ese);
				
				/**
				 * 批量插入
				 * 2000一组
				 */
				if(nes.size() == go.getIc()) {
					try{
						Long st = System.currentTimeMillis();
						if(go.equals(Gone.Neo4j)){
							neo4jImportDataService.importBetchExpert(nes);
						}else if(go.equals(Gone.Elasticsearch)) {
							esExpertService.importBetchExpert(eses);
						}
						Long et = System.currentTimeMillis();
						logger.info("一组专家耗时：" + (float)(et-st)/1000);
					}catch (Exception e) {
						/**
						 * 执行错误 插入日志
						 */
					}finally {
						nes.clear();
						eses.clear();
					}
				}
            }
            
            if(nes.size() > 0){
            	if(go.equals(Gone.Neo4j)){
					neo4jImportDataService.importBetchExpert(nes);
				}else if(go.equals(Gone.Elasticsearch)) {
					esExpertService.importBetchExpert(eses);
				}
            }
            /**
			 * 关闭读入流
			 */
			csvReader.close();
			csvReader = null;
			ne = null;
			nes = null;
			ese = null;
			eses = null;
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	/**
	 * 导入hindex gindex
	 */
	public void consumeExpertHGIndex(String filePath, Gone go) {
		
		/**
		 * 定义es对象
		 */
		EsExpert ese = null;
		List<EsExpert> eses = new ArrayList<>();
		
		try {
			
			CsvReader csvReader = executorPool.getCsvReader(filePath);
            csvReader.readHeaders();
            
            while (csvReader.readRecord()){
            	
            	String line = csvReader.getRawRecord();
            	
				if(StringUtils.isBlank(line)) {
					continue;
				}
				/**
				 * 从csv中获取数据
				 */
				String fId = csvReader.get("nId");
				String hIndex = csvReader.get("hIndex");
				String gIndex = csvReader.get("gindex");

				
				ese = new EsExpert();
				ese.setId(fId);
				ese.setfId(fId);
				ese.sethIndex(TypeConversion.converInteger(hIndex));
				ese.setgIndex(TypeConversion.converInteger(gIndex));
				eses.add(ese);
				
				/**
				 * 批量插入
				 * 2000一组
				 */
				if(eses.size() == go.getIc()) {
					try{
						Long st = System.currentTimeMillis();
						esExpertService.importHGIndex(eses);
						Long et = System.currentTimeMillis();
						logger.info("一组专家hindex和gindex耗时：" + (float)(et-st)/1000);
					}catch (Exception e) {
						/**
						 * 执行错误 插入日志
						 */
					}finally {
						eses.clear();
					}
				}
            }
            
            if(eses.size() > 0){
				esExpertService.importHGIndex(eses);
            }
            /**
			 * 关闭读入流
			 */
			csvReader.close();
			csvReader = null;
			ese = null;
			eses = null;
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	
	
	/**
	 * 导入专利统计信息
	 */
	
	public void consumeExpertPatents(String filePath, Gone go) {
		
		/**
		 * 定义es对象
		 */
		EsExpert ese = null;
		List<EsExpert> eses = new ArrayList<>();
		
		try {
			
			CsvReader csvReader = executorPool.getCsvReader(filePath);
            csvReader.readHeaders();
            
            while (csvReader.readRecord()){
            	
            	String line = csvReader.getRawRecord();
            	
				if(StringUtils.isBlank(line)) {
					continue;
				}
				/**
				 * 从csv中获取数据
				 */
				String fId = csvReader.get("name");
				String patentsNum = csvReader.get("patent_num");
				String patentsCit = csvReader.get("citation_num");
				String efpData = csvReader.get("area_num");
				String eqWorkData = csvReader.get("company_num");
				String eqWork4Data = csvReader.get("value_num");
				
				ese = new EsExpert();
				ese.setId(fId);
				ese.setfId(fId);
				ese.setPatentsNum(TypeConversion.converInteger(patentsNum));
				ese.setPatentsCit(TypeConversion.converInteger(patentsCit));
				ese.setEfpData(TypeConversion.converInteger(efpData));
				ese.setEqWorkData(TypeConversion.converInteger(eqWorkData));
				ese.setEqWork4Data(TypeConversion.converInteger(eqWork4Data));
				eses.add(ese);
				
				/**
				 * 批量插入
				 * 2000一组
				 */
				if(eses.size() == go.getIc()) {
					try{
						Long st = System.currentTimeMillis();
						esExpertService.importExpertPatents(eses);
						Long et = System.currentTimeMillis();
						logger.info("一组专家hindex和gindex耗时：" + (float)(et-st)/1000);
					}catch (Exception e) {
						/**
						 * 执行错误 插入日志
						 */
					}finally {
						eses.clear();
					}
				}
            }
            
            if(eses.size() > 0){
				esExpertService.importExpertPatents(eses);
            }
            /**
			 * 关闭读入流
			 */
			csvReader.close();
			csvReader = null;
			ese = null;
			eses = null;
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	/**
	 * 导入专家新闻社交统计信息
	 */
	public void consumeExpertNews(String filePath, Gone go) {
		
		/**
		 * 定义es对象
		 */
		EsExpert ese = null;
		List<EsExpert> eses = new ArrayList<>();
		
		try {
			
			CsvReader csvReader = executorPool.getCsvReader(filePath);
            csvReader.readHeaders();
            
            while (csvReader.readRecord()){
            	
            	String line = csvReader.getRawRecord();
            	
				if(StringUtils.isBlank(line)) {
					continue;
				}
				/**
				 * 从csv中获取数据
				 */
				String fId = csvReader.get("id");
				String nNum = csvReader.get("newstotal");
				String sNum = csvReader.get("socialtotal");
				
				ese = new EsExpert();
				ese.setId(fId);
				ese.setfId(fId);
				ese.setNnum(TypeConversion.converInteger(nNum));
				ese.setSnum(TypeConversion.converInteger(sNum));
				eses.add(ese);
				
				/**
				 * 批量插入
				 * 2000一组
				 */
				if(eses.size() == go.getIc()) {
					try{
						Long st = System.currentTimeMillis();
						esExpertService.importExpertNews(eses);
						Long et = System.currentTimeMillis();
						logger.info("一组专家hindex和gindex耗时：" + (float)(et-st)/1000);
					}catch (Exception e) {
						/**
						 * 执行错误 插入日志
						 */
					}finally {
						eses.clear();
					}
				}
            }
            
            if(eses.size() > 0){
				esExpertService.importExpertNews(eses);
            }
            /**
			 * 关闭读入流
			 */
			csvReader.close();
			csvReader = null;
			ese = null;
			eses = null;
		} catch (IOException e) {
			e.printStackTrace();
		}
	}	
	
	/**
	 * 领域的map集合
	 */
	private Map<String,Long> fMap = new HashMap<>();
	
	/**
	 * 机构的map集合
	 */
	private Map<String,Long> oMap = new HashMap<>();
	
	@Override
	public void afterPropertiesSet() throws Exception {
		
		/**
		 * 将领域写入内存
		 */
		List<ScholarFos> list = scholarFosService.queryList(new ScholarFosQ());
		if(list != null) {
			for(ScholarFos sf : list) {
				fMap.put(sf.getfName(), sf.getpId());
			}
		}
		
		/**
		 * 将机构写入内存
		 */
		List<Org> oList = orgService.queryList(new OrgQ());
		if(oList != null) {
			for(Org sf : oList) {
				oMap.put(sf.getName(), sf.getpId());
			}
		}
	}
}
