package com.hs.hbp.service.impl;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleFragmenter;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.Version;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.hs.core.dao.IBaseDao;
import com.hs.core.service.impl.BaseSevice;
import com.hs.core.utils.PageParam;
import com.hs.core.utils.PageUtil;
import com.hs.hbp.dao.impl.HbpZhuDao;
import com.hs.hbp.model.HbpZhu;
import com.hs.hbp.service.IHbpZhuService;
import com.hs.hbp.util.HBPConstant;
@Service 
public class HbpZhuService extends BaseSevice implements IHbpZhuService{
    private  HbpZhuDao dataDao;
	@Override
	public IBaseDao getBaseDao() {
		
		return dataDao;
	}
	@Autowired
	public void setDataDao(HbpZhuDao dataDao) {
		this.dataDao = dataDao;
	}

	public PageUtil getPageList(PageParam pageParam){
		return this.dataDao.getPageList(pageParam);
	}
   
	
	public Map savePreinfo(Map data) {
		
		return null;
	}
	/**
	 * 导入中文文件
	 */
	public void importCnFile(){
		String fileResource = "D:/work/pbh/高血压题录CBM下载-测试" ;
		File resource = new File(fileResource);
		if(resource.isDirectory()){
			File[] onefileList = resource.listFiles();
			for (int i=0; i<onefileList.length; i++){
				File onefile = onefileList[i];
				parseCnHbp(onefile);
			}
		}
	}
	/**
	 * 导入英文文档中标题为英文的文件
	 */
	public void importEnEnFile(){
		String fileResource = "D:/work/pbh/导入英文英文" ;
		File resource = new File(fileResource);
		if(resource.isDirectory()){
			File[] onefileList = resource.listFiles();
			for (int i=0; i<onefileList.length; i++){
				File onefile = onefileList[i];
				parseEnEnHbp(onefile);
			}
		}
	}
	
	/**
	 * 导入英文文档中标题为中文的文件
	 */
	public void importEnCnFile(){
		String fileResource = "D:/work/pbh/导入英文中文" ;
		File resource = new File(fileResource);
		if(resource.isDirectory()){
			File[] onefileList = resource.listFiles();
			for (int i=0; i<onefileList.length; i++){
				File onefile = onefileList[i];
				parseEnCnHbp(onefile);
			}
		}
	}
	
	public void parseEnEnHbp(File destfile){
		InputStreamReader isr = null ;
		try {
			isr = new InputStreamReader(new FileInputStream(destfile), "UTF-8");
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		}
		if(isr != null){
			BufferedReader br = new BufferedReader(isr);
			String onepart = "";
			int i=1;
			String temp = null ;
			try {
				while((temp=br.readLine())!=null){
					temp = temp.trim();
					if(temp.indexOf("TY  -")!=-1){
						onepart = "";
						onepart = onepart + temp + "/r/n";
						i=1;
					}else{
						onepart = onepart + temp + "/r/n";
						if(onepart.toString().indexOf("ER  -")!=-1 && i==1){
							dealOnepartEnEn(onepart.toString());
							System.out.println("bbbbbbbbb" + "\r\n"+ onepart.toString());
							i += 1 ;
						}
					}
				}
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}
	
	public void parseEnCnHbp(File destfile){
		InputStreamReader isr = null ;
		try {
			isr = new InputStreamReader(new FileInputStream(destfile), "gbk");
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		}
		System.out.println("222");
		if(isr != null){
			BufferedReader br = new BufferedReader(isr);
			String onepart = "";
			int i=1;
			String temp = null ;
			try {
				while((temp=br.readLine())!=null){
					temp = temp.trim();
					System.out.println("temp:" + temp );
					if(temp.indexOf("文献类型:")!=-1){
						onepart = "";
						onepart = onepart + temp + "/r/n";
						i=1;
					}else{
						onepart = onepart + temp + "/r/n";
						if(onepart.toString().indexOf("索取号:")!=-1 && i==1){
							dealOnepartEnCn(onepart.toString());
							System.out.println("bbbbbbbbb" + "\r\n"+ onepart.toString());
							i += 1 ;
						}
					}
				}
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}
	
	public void parseCnHbp(File destfile){
		InputStreamReader isr = null ;
		try {
			isr = new InputStreamReader(new FileInputStream(destfile), "UTF-8");
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		}
		if(isr != null){
			BufferedReader br = new BufferedReader(isr);
			String onepart = "";
			int i=1;
			String temp = null ;
			try {
				while((temp=br.readLine())!=null){
					if(temp.indexOf("【流水号】")!=-1){
						onepart = "";
						onepart = onepart + temp + "/r/n";
						i=1;
					}else{
						onepart = onepart + temp + "/r/n";
						if(onepart.toString().indexOf("【更新日期】")!=-1 && i==1){
							HbpZhu hbp = dealOnepart(onepart.toString());
							System.out.println("bbbbbbbbb" + "\r\n"+ onepart.toString());
							i += 1 ;
						}
					}
				}
				isr.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			
		}
		
	}
	
	public HbpZhu dealOnepartEnEn(String onepart){
		HbpZhu hbp = new HbpZhu();
		hbp.setImporttype(HBPConstant.HBP_IMPORT_TYPE_ENEN);
		String[] partlist = onepart.split("/r/n");
		int k = 1;
		int p = 1;
		String n11 = "" ;
		String language = "" ;
		String publishtype = "" ;
		String askno = "" ;
		for(int i=0; i<partlist.length; i++){
			String part = partlist[i] ;
			int pos = 0 ;
			pos = part.indexOf("  -");
			if(pos != -1 && part.length()>pos+4){
				String content = part.substring(pos+4,part.length()).trim();
				if(part.indexOf("TY  -")!=-1){
					hbp.setDocumenttype(content);
				}else if(part.indexOf("T1  -")!=-1){
					hbp.setEntitle(replaceTitle(content));
				}else if(part.indexOf("N1  -")!=-1){
					if(k == 1){
						n11 = content;
						k++ ;
					}else if(k == 2){
						language = content;
						k++ ;
					}else if (k == 3){
						publishtype = content;
						k++ ;
					}else if (k == 4){
						askno = content;
						k++ ;
					}
				}else if(part.indexOf("N2  -")!=-1){
					hbp.setAbstract_(content);
				}
				else if(part.indexOf("KW  -")!=-1){
					if(p == 1){
						hbp.setEnthemewords(content);
						p++ ;
					}else if(p == 2){
						String themewords = hbp.getEnthemewords();
						if(!themewords.toLowerCase().trim().equals(content.toLowerCase().trim())){
							hbp.setEnthemewords(hbp.getEnthemewords() + ";" + content);
						}
						p++ ;
					}
				}else if(part.indexOf("UR  -")!=-1){
					hbp.setAskno(content);
				}else if(part.indexOf("AU  -")!=-1){
					hbp.setAuthor(content);
				}else if(part.indexOf("JF  -")!=-1){
					hbp.setPeriodicalname(content);
				}else if(part.indexOf("PY  -")!=-1){
					hbp.setPublishyear(content);
				}else if(part.indexOf("VL  -")!=-1){
					hbp.setColume(content);
				}else if(part.indexOf("IS  -")!=-1){
					hbp.setPeriod(content);
				}else if(part.indexOf("SP  -")!=-1){
					hbp.setPageno(content);
				}else if(part.indexOf("CY  -")!=-1){
					hbp.setPublishcity(content);
				}else if(part.indexOf("Y2  -")!=-1){
					hbp.setPublishdate(content);
				}else if(part.indexOf("SN  -")!=-1){
					hbp.setIssn(content);
				}else if(part.indexOf("ER  -")!=-1){
					
				}
				System.out.println(k);
				if(k == 5){
					hbp.setLanguage(language);
					hbp.setPublishtype(publishtype);
					hbp.setAskno(askno);
					System.out.println("索取号：：：" + askno);
				}else if(k == 4){
					hbp.setLanguage(n11);
					hbp.setPublishtype(language);
					hbp.setAskno(publishtype);
					System.out.println("索取号：：：" + publishtype);
				}
			}
			
		}
		this.saveObject(hbp);
		return hbp ;
	}
	
	public HbpZhu dealOnepartEnCn(String onepart){
		HbpZhu hbp = new HbpZhu();
		hbp.setImporttype(HBPConstant.HBP_IMPORT_TYPE_ENCN);
		String[] partlist = onepart.split("/r/n");
		int k = 1;
		int p = 1;
		for(int i=0; i<partlist.length; i++){
			String part = partlist[i] ;
			int pos = 0 ;
			pos = part.indexOf(":");
			if(pos != -1 && part.length()>pos+2){
				String content = part.substring(pos+2,part.length()).trim();
				if(part.indexOf("文献类型:")!=-1){
					hbp.setDocumenttype(content);
				}else if(part.indexOf("标题:")!=-1){
					hbp.setEntitle(replaceTitle(content));
				}else if(part.indexOf("摘要:")!=-1){
					hbp.setAbstract_(content);
				}else if(part.indexOf("主题词:")!=-1){
					hbp.setEnthemewords(content);
				}else if(part.indexOf("相关网址:")!=-1){
					hbp.setWebsite(content);
				}else if(part.indexOf("作者:")!=-1){
					hbp.setAuthor(content);
				}else if(part.indexOf("作者地址:")!=-1){
					hbp.setAuthorunit(content);
				}else if(part.indexOf("刊名:")!=-1){
					hbp.setPeriodicalname(content);
				}else if(part.indexOf("出版年:")!=-1){
					hbp.setPublishyear(content);
				}else if(part.indexOf("卷:")!=-1){
					hbp.setColume(content);
				}else if(part.indexOf("期:")!=-1){
					hbp.setPeriod(content);
				}else if(part.indexOf("页码:")!=-1){
					hbp.setPageno(content);
				}else if(part.indexOf("出版地:")!=-1){
					hbp.setPublishcity(content);
				}else if(part.indexOf("出版日期:")!=-1){
					hbp.setPublishdate(content);
				}else if(part.indexOf("ISSN")!=-1){
					hbp.setIssn(content);
				}else if(part.indexOf("语言:")!=-1){
					hbp.setLanguage(content);
				}else if(part.indexOf("出版类型:")!=-1){
					hbp.setPublishtype(content);
				}else if(part.indexOf("索取号:")!=-1){
					System.out.println("索取号：：：" + content);
					hbp.setAskno(content);
				}
			}
		}
		this.saveObject(hbp);
		return hbp ;
	}
	
	public HbpZhu dealOnepart(String onepart){
		HbpZhu hbp = new HbpZhu();
		hbp.setImporttype(HBPConstant.HBP_IMPORT_TYPE_CN);
		hbp.setLanguage("中文");
		String[] partlist = onepart.split("/r/n");
		for(int i=0; i<partlist.length; i++){
			
			String part = partlist[i] ;
			int pos = 0 ;
			pos = part.indexOf("】");
			if(pos != -1 && part.length()>pos+2){
				String content = part.substring(pos+2,part.length());
				content = content.trim();
				if(part.indexOf("【流水号】")!=-1){
					System.out.println("流水号：：：" + content);
					hbp.setSerialno(content);
				}else if(part.indexOf("【CA】")!=-1){
					hbp.setCa(content);
				}else if(part.indexOf("【分类号】")!=-1){
					hbp.setClassno(content);
				}else if(part.indexOf("【标题】")!=-1){
					hbp.setTitle(content);
				}else if(part.indexOf("【英文标题】")!=-1){
					hbp.setEntitle(content);
				}else if(part.indexOf("【作者】")!=-1){
					hbp.setAuthor(content);
				}else if(part.indexOf("【作者单位】")!=-1){
					hbp.setAuthorunit(content);
				}else if(part.indexOf("【国省市名】")!=-1){
					hbp.setCity(content);
				}else if(part.indexOf("【摘要】")!=-1){
					hbp.setAbstract_(content);
				}else if(part.indexOf("【参文数】")!=-1){
					hbp.setReferenceno(Long.parseLong(content));
				}else if(part.indexOf("【著者文摘】")!=-1){
					hbp.setAuthorabstract(content);
				}else if(part.indexOf("【出处】")!=-1){
					int postion = content.indexOf(" ");
					if(postion != -1){
						String source = content.substring(0, postion);
						hbp.setSource(content);
						String sourcecode = content.substring(postion,content.length());
						int pp = sourcecode.indexOf(";");
						String publishyear = sourcecode.substring(0,pp).trim();
						hbp.setSourcecode(source);
						hbp.setPublishyear(publishyear);
					}
				}else if(part.indexOf("【ISSN】")!=-1){
					hbp.setIssn(content);
				}else if(part.indexOf("【国内代码】")!=-1){
					hbp.setCounrtycode(content);
				}else if(part.indexOf("【内部代码】")!=-1){
					hbp.setInnercode(content);
				}else if(part.indexOf("【出版地】")!=-1){
					hbp.setPublishcity(content);
				}else if(part.indexOf("【关键词】")!=-1){
					hbp.setKeywords(content);
				}else if(part.indexOf("【主题词】")!=-1){
					hbp.setThemewords(content);
				}else if(part.indexOf("【特征词】")!=-1){
					hbp.setFeaturewords(content);
				}else if(part.indexOf("【基金】")!=-1){
					hbp.setFund(content);			
				}else if(part.indexOf("【文献类型】")!=-1){
					hbp.setDocumenttype(content);			
				}else if(part.indexOf("【更新日期】")!=-1){
					hbp.setUpdatedate(content);			
				}
			}
		}
		this.saveObject(hbp);
		return hbp ;
	}

	public String replaceTitle(String title){
		String result = title;
		if(title.startsWith("[")){
			result = title.substring(1);
		}
		if(title.endsWith("]")){
			result = result.substring(0,result.length()-1);
		}
		return result ;
	}
	
	public int createIndex(List list){
		 String indexPath = HBPConstant.INDEX_ZHU_DIR;
		 int numberindex = 0 ;
		 IndexWriter writer = null ;
		 Analyzer analyzer = new SmartChineseAnalyzer(Version.LUCENE_33); 
		 try {
			 Directory dir = new SimpleFSDirectory(new File(indexPath));    
			 IndexWriterConfig indexWriterConfig = new IndexWriterConfig(Version.LUCENE_33,analyzer);
			 writer = new IndexWriter(dir,indexWriterConfig); 
			//writer = new IndexWriter(FSDirectory.open(new File(indexPath)),analyzer,true,IndexWriter.MaxFieldLength.LIMITED);
			   for(int i=0; i<list.size(); i++){
				   Map map = (Map)list.get(i);
				   Document doc = new Document();
				   doc.add(new Field("rid",objectToString(map.get("rid")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("classno",objectToString(map.get("classno")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("title",objectToString(map.get("title")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("entitle",objectToString(map.get("entitle")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("author",objectToString(map.get("author")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("authorunit",objectToString(map.get("authorunit")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("city",objectToString(map.get("city")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("abstract",objectToString(map.get("abstract")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("periodicalname",objectToString(map.get("periodicalname")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("publishyear",objectToString(map.get("publishyear")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("colume",objectToString(map.get("colume")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("pageno",objectToString(map.get("pageno")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("publishcity",objectToString(map.get("publishcity")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("period",objectToString(map.get("period")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("publishdate",objectToString(map.get("publishdate")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("issn",objectToString(map.get("issn")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("counrtycode",objectToString(map.get("counrtycode")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("publishcity",objectToString(map.get("publishcity")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("themewords",objectToString(map.get("themewords")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("enthemewords",objectToString(map.get("enthemewords")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("documenttype",objectToString(map.get("documenttype")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("language",objectToString(map.get("language")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("keywords",objectToString(map.get("keywords")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("fund",objectToString(map.get("fund")),Field.Store.YES,Field.Index.ANALYZED));
				   writer.addDocument(doc);
			   }
			  numberindex=writer.numDocs();
			  writer.optimize(); 
			  writer.close(); 
		} catch (CorruptIndexException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (LockObtainFailedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		finally{
			if(writer != null){
				try {
					writer.close();
				} catch (CorruptIndexException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				} catch (IOException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}
		}
		
		return numberindex;
	}
	 
	public List searchData(String fieldString,String searchStr){
		List result = new ArrayList();
		String indexDir = HBPConstant.INDEX_ZHU_DIR ;
		try {
			SmartChineseAnalyzer analyzer = new SmartChineseAnalyzer(Version.LUCENE_33);
			IndexSearcher searcher = new IndexSearcher(FSDirectory.open(new File(indexDir)),true);
			QueryParser parser = new QueryParser(Version.LUCENE_33,
					fieldString, analyzer);
			Query query = parser.parse(searchStr);
			int top_num =15 ;
			TopDocs collector = searcher.search(query, top_num); 
			ScoreDoc[] hits = collector.scoreDocs; 
			System.out.println("hits length:::::" + collector.totalHits);
			
			for (int i = 0; i < hits.length; i++) {
				Document document = searcher.doc(hits[i].doc); 
				String content = document.get(fieldString);
				SimpleHTMLFormatter simpleHTMLFormatter = new SimpleHTMLFormatter(
					     "<font color='red'>", "</font>");
				Highlighter highlighter = new Highlighter(simpleHTMLFormatter, new QueryScorer(query));
				if(null != content ){
					highlighter.setTextFragmenter(new SimpleFragmenter(content==null?0:content.length()));
					TokenStream tokenStream = analyzer.tokenStream(fieldString, new StringReader(content));
					String highLightText = highlighter.getBestFragment(tokenStream,content);
					System.out.println(highLightText);
					Map map = this.getHighlightString(document,fieldString);
					map.put(fieldString, highLightText);
					result.add(map); 
				}
			}
			
		} catch (CorruptIndexException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (ParseException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (InvalidTokenOffsetsException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return result ;
	}
	public String objectToString(Object obj){
		String result = "";
		if(obj == null){
			result = "";
		}else{
			result = obj.toString();
		}
		return result;
	}
	
	public Map getHighlightString(Document document,String fieldString){
		Map map = new HashMap();
		if(!"rid".equals(fieldString)){
			map.put("rid", document.get("rid"));
		}
		if(!"title".equals(fieldString)){
			map.put("title", document.get("title"));
		}
		if(!"author".equals(fieldString)){
			map.put("author", document.get("author"));
		}
		if(!"themewords".equals(fieldString)){
			map.put("themewords", document.get("themewords"));
		}
		if(!"publishcity".equals(fieldString)){
			map.put("publishcity", document.get("publishcity"));
		}
		if(!"publishyear".equals(fieldString)){
			map.put("publishyear", document.get("publishyear"));
		}
		if(!"entitle".equals(fieldString)){
			map.put("entitle", document.get("entitle"));
		}
		if(!"periodicalname".equals(fieldString)){
			map.put("periodicalname", document.get("periodicalname"));
		}
		if(!"enthemewords".equals(fieldString)){
			map.put("enthemewords", document.get("enthemewords"));
		}
		return map;
	}
	public PageUtil getList(PageParam pageParam) {
		 
		return this.dataDao.getList(pageParam);
	}
}
