package com.hs.hbp.action;

import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleFragmenter;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.Version;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;

import com.hs.core.db.DBMgr;
import com.hs.core.db.QueryPage;
import com.hs.core.utils.PageParam;
import com.hs.core.utils.PageUtil;
import com.hs.core.utils.SystemUtil;
import com.hs.hbp.model.HbpZhu;
import com.hs.hbp.service.IHbpZhuService;
import com.hs.hbp.util.HBPConstant;
@Controller  
public class HbpZhuController extends AbstractAction {
	private static final Logger logger = Logger.getLogger(HbpZhuController.class);
	private IHbpZhuService hbpZhuService;
	 @Autowired   
	 public void setHbpZhuService(IHbpZhuService hbpZhuService) {
		this.hbpZhuService = hbpZhuService;
	}
	 /**
	  * pbh著录项目列表
	  * @param request
	  * @param response
	  * @return
	  * @throws ServletException
	  * @throws IOException
	  */
	@RequestMapping("/listHbpZhu1.do")   
	public ModelAndView listHbpZhu1(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException { 
		
		StringBuffer searchBuffer = new StringBuffer(" select * from HBP_ZHU a where 1=1  ");
		String publishyear = request.getParameter("nianfen");
		String field = request.getParameter("field");  
		String q = request.getParameter("searchStr");  

		request.setAttribute("field", field);
		request.setAttribute("searchStr", q);
		
		if(StringUtils.isNotBlank(publishyear)){
			searchBuffer.append(" and a.publishyear like '%"+publishyear+"%' "); 
			request.setAttribute("nianfen", publishyear);
		} 
		if(StringUtils.isNotBlank(field)){ 
			if(StringUtils.isNotBlank(q)){ 
				if("title".equals(field)){
					searchBuffer.append(" and (a.title like '%"+q+"%' or a.entitle like '%"+q+"%') "); 
				}
				if("author".equals(field)){
					searchBuffer.append(" and a.author like '%").append(q).append("%' "); 
				}
				if("abstract".equals(field)){
					searchBuffer.append(" and (a.sourcecode like '%").append(q).append("%' or "); 
					searchBuffer.append(" a.periodicalname like '%").append(q).append("%') "); 
				}
			}
		} 
		searchBuffer.append(" order by a.serialno asc");
		QueryPage qp = new QueryPage(request,"/listHbpZhu1.do",searchBuffer.toString());
		ArrayList datalist = (ArrayList) qp.getDataList();
		String pagestr = qp.getPageStr();
		Map data = new HashMap();
		 
		data.put("dataList", datalist );  
		data.put("pagestr", datalist );  
 		return null;  
	}
	/**
	  * pbh著录项目列表
	  * @param request
	  * @param response
	  * @return
	  * @throws ServletException
	  * @throws IOException
	  */
	@RequestMapping("/listHbpZhu.do")   
	public ModelAndView listHbpZhu(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException { 
		PageParam pageParam = new  PageParam(request,"listHbpZhu.do");
		PageUtil pageUtil = null;
		StringBuffer searchBuffer = new StringBuffer(" select a from HbpZhu a where 1=1  ");
		String publishyear = request.getParameter("nianfen");
		String field = request.getParameter("field");  
		String q = request.getParameter("searchStr");  
	    String onesql = request.getParameter("onesql");
	    logger.debug("onesql:" + onesql);

		request.setAttribute("field", field);
		request.setAttribute("searchStr", q);
		
		if(StringUtils.isNotBlank(publishyear)){
			searchBuffer.append(" and a.publishyear like '%"+publishyear+"%' ");
			pageParam.getConditionMap().put("nianfen", publishyear);
			request.setAttribute("nianfen", publishyear);
		} 
		if(StringUtils.isNotBlank(field)){
			pageParam.getConditionMap().put("field", field);
			if(StringUtils.isNotBlank(q)){
				pageParam.getConditionMap().put("searchStr", q);
				if("title".equals(field)){
					searchBuffer.append(" and (a.title like '%"+q+"%' or a.entitle like '%"+q+"%') "); 
				}
				if("author".equals(field)){
					searchBuffer.append(" and a.author like '%").append(q).append("%' "); 
				}
				if("chubanwu".equals(field)){
					searchBuffer.append(" and (a.sourcecode like '%").append(q).append("%' or "); 
					searchBuffer.append(" a.periodicalname like '%").append(q).append("%') "); 
				}
				if("thewords".equals(field)){
					searchBuffer.append(" and (a.enthemewords like '%").append(q).append("%' or "); 
					searchBuffer.append(" a.themewords like '%").append(q).append("%') "); 
				}
				if("abstract".equals(field)){
					searchBuffer.append(" and a.abstract_ like '%").append(q).append("%' ");  
				}
			}
		}  
		if(StringUtils.isNotEmpty(onesql)){
			searchBuffer.append(" and a.id in(").append(onesql).append(")");
		}
		logger.debug("sql:" + searchBuffer.toString());
		//searchBuffer.append(" order by substr(a.publishyear,0,4) desc");
		Map data = new HashMap();
		pageParam.setSearchSql(searchBuffer.toString());
		pageUtil = hbpZhuService.getList(pageParam); 
 		data.put("dataList", pageUtil);     
 		return new ModelAndView("app/list",data);  
	}
	/**
	  * 查询单条数据的明细
	  * @param request
	  * @param response
	  * @return
	  * @throws ServletException
	  * @throws IOException
	  */
	@RequestMapping("/searchDetail.do")   
	public ModelAndView searchDetail(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException { 
		PageParam pageParam = new  PageParam(request,"searchDetail.do");
		PageUtil pageUtil = null;
		StringBuffer searchBuffer = new StringBuffer(" select a from HbpZhu a where rownum<10000  ");
		searchBuffer.append(" order by a.serialno");
		String rid = request.getParameter("rid");
		String language = request.getParameter("language");
		HbpZhu obj = null;
		if(StringUtils.isNotBlank(rid)){
			obj = (HbpZhu)hbpZhuService.findByID(HbpZhu.class, new Long(rid));
		}
		request.setAttribute("dataList", obj);
		String returnPage = "app/searchDetail";
		if("en".equals(language)){
			returnPage = "app/searchPubmedDetail";
		}
		return new ModelAndView(returnPage);  
	}
	/**
	  * pbh著录项目列表
	  * @param request
	  * @param response
	  * @return
	  * @throws ServletException
	  * @throws IOException
	  */
	@RequestMapping("/searchHbpZhu.do")   
	public ModelAndView searchHbpZhu(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException { 
		PageParam pageParam = new  PageParam(request,"searchHbpZhu.do");
		PageUtil pageUtil = null;
		StringBuffer searchBuffer = new StringBuffer(" select * from Hbp_Zhu a where rownum<10  ");
		 searchBuffer.append(" order by a.serialno");
		List hbp = DBMgr.getDbUtilInst().queryList(searchBuffer.toString(),null);
		Map data = new HashMap();
		pageParam.setSearchSql(searchBuffer.toString());
//		pageUtil = hbpZhuService.getPageList(hbp,pageParam);
//		data.put("dataList", pageUtil);     
		request.getSession().setAttribute("dataList", pageUtil);
		return new ModelAndView("app/static/list",data);  
	}
	
	/**
	  * 建立索引
	  * @param request
	  * @param response
	  * @return
	  * @throws ServletException
	  * @throws IOException
	  */
	@RequestMapping("/createZhuIndex.do")   
	public ModelAndView createZhuIndex(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException { 
		StringBuffer searchBuffer = new StringBuffer(" select * from (select * from hbp_zhu a where isindexed=0) where  rownum<=10000 ");
		searchBuffer.append(" order by rid");
		DBMgr dbo = DBMgr.getDbUtilInst();//.queryList(searchBuffer.toString(),null);
		int countall = 0;
		String indexPath = HBPConstant.INDEX_ZHU_DIR; 
		Analyzer analyzer = new SmartChineseAnalyzer(Version.LUCENE_33); 

		IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_33,analyzer);
		config.setMaxBufferedDocs(2000);
		
		config.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND );
		config.setWriteLockTimeout(2000); 
		config.setMaxThreadStates(20);
		config.setRAMBufferSizeMB(500d);
		Directory dir = FSDirectory.open(new File(indexPath));     
	    IndexWriter writer  = new IndexWriter(dir,config);
	    
	     
		do{
			List dataList = dbo.queryList("select to_char(count(*)) as countall from hbp_zhu where isindexed=0", null);
			Map data = (Map)dataList.get(0);
			String allnum = (String)data.get("countall");
			logger.debug("共有"+allnum +"条数据未建立索引.");
			countall = Integer.parseInt(allnum);
			List indexData = dbo.queryList(searchBuffer.toString(), null);
			createIndex(indexData,writer);
			for(int i=0;i<indexData.size();i++){
				Map row = (Map)indexData.get(i); 
				dbo.update("update hbp_zhu set isindexed=? where rid=?", new Object[]{1,row.get("rid")}); 
			}
		}while(countall >0);
		
		if(null != writer){
			writer.optimize();
			writer.close();
		}
		return new ModelAndView("app/test/testuser");  
	}
	
	public static void main(String[] args) throws IOException{
		
		 
	}
	public static String objectToString(Object obj){
		String result = "";
		if(obj == null){
			result = "";
		}else{
			result = obj.toString();
		}
		return result;
	} 
	public  static void createIndex(List list,IndexWriter writer) throws CorruptIndexException, IOException{
		 logger.debug("index is starting!!!!");
		 int count = 0;
		 long startTime = new Date().getTime();
		 long endTime = 0;
		 int numberindex = 0 ; 
		 try { 

			   for(int i=0; i<list.size(); i++){
				  
				   Map map = (Map)list.get(i);
				   Document doc = new Document();
				   //主键
				   doc.add(new Field("rid",objectToString(map.get("rid")),Field.Store.YES,Field.Index.NO));
				   //分类号
				   doc.add(new Field("classno",objectToString(map.get("classno")),Field.Store.NO,Field.Index.ANALYZED));
				   doc.add(new Field("title",objectToString(map.get("title")),Field.Store.YES,Field.Index.ANALYZED));
				   //中文标题
				   doc.add(new Field("entitle",objectToString(map.get("entitle")),Field.Store.YES,Field.Index.ANALYZED));
				   //作者
				   doc.add(new Field("author",objectToString(map.get("author")),Field.Store.YES,Field.Index.ANALYZED));
				   //作者单位
				   doc.add(new Field("authorunit",objectToString(map.get("authorunit")),Field.Store.YES,Field.Index.ANALYZED));
				   //城市
				   doc.add(new Field("city",objectToString(map.get("city")),Field.Store.NO,Field.Index.NOT_ANALYZED));
				   //摘要
				   doc.add(new Field("abstract",objectToString(map.get("abstract")),Field.Store.NO,Field.Index.ANALYZED));
				   //期刊名
				   doc.add(new Field("periodicalname",objectToString(map.get("periodicalname")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("soursecode",objectToString(map.get("soursecode")),Field.Store.YES,Field.Index.ANALYZED));

				   doc.add(new Field("publishyear",objectToString(map.get("publishyear")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   //卷
				   doc.add(new Field("colume",objectToString(map.get("colume")),Field.Store.NO,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("pageno",objectToString(map.get("pageno")),Field.Store.YES,Field.Index.NO));
				   doc.add(new Field("publishcity",objectToString(map.get("publishcity")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("period",objectToString(map.get("period")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("publishdate",objectToString(map.get("publishdate")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("issn",objectToString(map.get("issn")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("counrtycode",objectToString(map.get("counrtycode")),Field.Store.YES,Field.Index.NOT_ANALYZED));
				   doc.add(new Field("publishcity",objectToString(map.get("publishcity")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("themewords",objectToString(map.get("themewords")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("enthemewords",objectToString(map.get("enthemewords")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("documenttype",objectToString(map.get("documenttype")),Field.Store.YES,Field.Index.NO));
				   doc.add(new Field("language",objectToString(map.get("language")),Field.Store.YES,Field.Index.NO));
				   doc.add(new Field("keywords",objectToString(map.get("keywords")),Field.Store.YES,Field.Index.ANALYZED));
				   doc.add(new Field("fund",objectToString(map.get("fund")),Field.Store.YES,Field.Index.ANALYZED));
				   String type = objectToString(map.get("IMPORTTYPE"));
				    
				   doc.add(new Field("imptype",type,Field.Store.YES,Field.Index.NOT_ANALYZED));

				   writer.addDocument(doc);
				   count++;
			   }
			 numberindex = writer.numDocs();
			 endTime = new Date().getTime();

			
		} catch (CorruptIndexException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (LockObtainFailedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		finally{
			if(writer != null){
				 writer.commit();
			}
		}
		logger.debug("索引耗时:" + (startTime - endTime)/1000);
		logger.debug("索引数量:" + numberindex);
	}
	Highlighter highlighter = null;
	SmartChineseAnalyzer analyzer =  null;
	IndexReader indexReader = null;
	SimpleHTMLFormatter  shf = null;
	Query query  = null;
	BooleanQuery bqType = null;
	/**
	  * 通过索引查询数据
	  * @param request
	  * @param response
	  * @return
	 * @throws Exception 
	  */
	@RequestMapping(value="/searchZhuData.do")   
	public ModelAndView searchZhuData(HttpServletRequest request,
			HttpServletResponse response) throws Exception { 
		String indexDir = SystemUtil.getWebappRoot().getAbsolutePath()+"/index" ; 
		Map data = new HashMap(); 
		String q = request.getParameter("searchStr");
		long start = new Date().getTime();// start time

		
		logger.debug("SearchWord:"+q);
		if(StringUtils.isBlank(q)){
			return new ModelAndView("app/static/list",data);  
		}
		BooleanClause.Occur[] flags = null;
		String pagenum = request.getParameter("pagenum"); 
		String language = request.getParameter("language");
		String fieldString = request.getParameter("field") ;
		data.put("field", fieldString);
		data.put("language", language);

		
		int pageIndex = 1;
		if(StringUtils.isNotEmpty(pagenum)){
			pageIndex = Integer.parseInt(pagenum);
		}
		
		List<Map<String, String>> list = new ArrayList<Map<String, String>>();
		StringBuffer buffer = null;
 
		indexReader = IndexReader.open(FSDirectory
				.open(new File(indexDir)), true);
		String[] fields = null;
		
		
		//判断是否选择了字段
//		 if(StringUtils.isNotBlank(fieldString) && !"null".equalsIgnoreCase(fieldString)){ 
//			logger.debug("fieldString:" + fieldString);
//			fields =  new String[] { fieldString};
//			flags = new BooleanClause.Occur[] { BooleanClause.Occur.SHOULD};  
//		 }else{
//			 if("en".equals(language)){
//					fields =  new String[] { "entitle", "abstract","author","enthemewords","publishyear","publishcity" };
//					flags = new BooleanClause.Occur[] { BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD};
//
//					 
//				} else{
					fields =  new String[] { "title", "abstract","author","themewords","publishyear","publishcity","entitle","enthemewords","soursecode" };
					flags = new BooleanClause.Occur[] { BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD,BooleanClause.Occur.SHOULD};

				//}
		// }
		  // 过滤器   
		 analyzer = new SmartChineseAnalyzer(Version.LUCENE_33);
		 
		 shf = new SimpleHTMLFormatter("<font class='rh'>", "</font>");
		
		 query = MultiFieldQueryParser.parse(Version.LUCENE_33,q, fields, flags,analyzer);
		 
//		 if("en".equals(language)){
//			 logger.debug("==============english============");
//			// Query query3 = new TermQuery(new Term("imptype", "2"))
//			 
//			 Filter typeFilter = new FieldCacheTermsFilter("imptype", "2"); 
//			 query = new FilteredQuery(query, typeFilter); ;     
//			            
////			 bqType = new BooleanQuery();   
////			 bqType.add(query3, BooleanClause.Occur.SHOULD);    
////			 Filter typeFilter = new FieldCacheTermsFilter("imptype", "2"); 
////			 Filter typeFilter1 = new FieldCacheTermsFilter("imptype", "3"); 
//////			// 把过滤器应用于Query   
////			 query = new FilteredQuery(query, typeFilter);   
////			 query = new FilteredQuery(query, typeFilter1);
//		 }else{
//			 logger.debug("===========cn===============");
//			 Filter typeFilter = new FieldCacheTermsFilter("imptype", "1"); 
//			 query = new FilteredQuery(query, typeFilter); 
//		 }
 	
		try { 
			IndexSearcher searcher = new IndexSearcher(FSDirectory.open(new File(indexDir)),true);
			// 分页
			PageInfo pageInfo = new PageInfo();
			// 当前页码
			pageInfo.setPageIndex(pageIndex);
			// 起始记录数
			int startRec = pageInfo.getStartRec();
			// 结束记录数
			int endRec = pageInfo.getEndRec();
			
			TopScoreDocCollector collector = TopScoreDocCollector.create(
					endRec, false);
//			if("en".equals(language))
//				searcher.search(query, collector);
//			else
				searcher.search(query, collector);
			ScoreDoc[] hits = collector.topDocs().scoreDocs;
			
			//记录总数
			int totalRec = collector.getTotalHits();
			pageInfo.setTotalRec(totalRec);// 总记录数
			System.out.println("记录总数:"+totalRec);
			endRec = endRec > collector.getTotalHits() ? collector.getTotalHits() : endRec;

			 
			
			Document doc;
			 for (int i = startRec; i < endRec; i++) {
				 Map<String, String> map = new HashMap<String, String>();
				 int docid = hits[i].doc;
				 doc = searcher.doc(docid);
					String title = doc.get("title");
					String author = doc.get("author");
					String imptype = doc.get("imptype");

					String themewords = doc.get("themewords");
					String publishyear = doc.get("publishyear");
					String publishcity = doc.get("publishcity");
					String rid = doc.get("rid");
//					String dailijgmc = doc.get("dailijgmc");
//					String zhuanlilx = doc.get("patenttype");
//					String diyisqr = doc.get("diyisqr");
//					String zhufenlh = doc.get("zhufenlh"); 
					map.put("title", highlight(docid,"title",title,language)); 
					map.put("themewords", highlight(docid,"themewords",themewords,language));
					map.put("imptype", imptype);
					
					String enthemewords = doc.get("enthemewords");
					String entitle = doc.get("entitle");
 					String periodicalname = doc.get("periodicalname");
					String soursecode = doc.get("soursecode");

					map.put("enthemewords", highlight(docid,"enthemewords",enthemewords,language));
					map.put("entitle", highlight(docid,"entitle",entitle,language));
					map.put("periodicalname", highlight(docid,"periodicalname",periodicalname,language));
					map.put("soursecode", soursecode);
//					map.put("title", title); 
//					map.put("themewords", "themewords");
					map.put("author", author);
					map.put("publishyear", publishyear);
					map.put("rid", rid);
					map.put("publishcity", publishcity); 
					map.put("score", (hits[i].score*100+"").substring(0,4)+"%"); 
					list.add(map);
			 }

			pageInfo
					.setTotalPage((pageInfo.getTotalRec()
							% pageInfo.getPageSize() == 0) ? (pageInfo
							.getTotalRec() / pageInfo.getPageSize())
							: (pageInfo.getTotalRec() / pageInfo.getPageSize()) + 1);// 总页数
			 

			// 总页数
			int totalPage= pageInfo.getTotalPage();
			
			// 假定最多显示10页
			int maxPage = 10;
			maxPage = totalPage > maxPage ? maxPage : totalPage; 
			
			// 额定递推数字
			int pushNum = 5;
			
			// 页数数组

			int startPage = pageIndex - pushNum > 0 ? pageIndex-pushNum:1;
			int endPage = pageIndex +5 >= totalPage ? totalPage:pageIndex +5; 
			int[] pageNumbers = new int[(endPage+1-startPage)];
			for (int i = 1; i <= (endPage+1-startPage); i++) {
				pageNumbers[i - 1] = startPage + i -1; 
			} 
			pageInfo.setPageNumbers(pageNumbers);
			pageInfo.setPrePage(pageInfo.getPageIndex() - 1);
			pageInfo.setNextPage(pageInfo.getPageIndex() + 1);
			 
			
		
			int count = collector.getTotalHits();
			long end = new Date().getTime();
			logger.debug("找到 " + collector.getTotalHits() + " 条数据，花费时间 "
					+ (end - start) + " 秒");

//			buffer.append("找到 <b>").append(collector.getTotalHits()).append(
//					"</b>").append(" 条数据，花费时间 ").append((end - start) / 1000)
//					.append("秒");

			double usetime = (double)(end - start) /1000;

//			resultMap.put("searchword", q);
//			resultMap.put("count", count);
//			resultMap.put("usetime", buffer.toString());
//			resultMap.put("resultList", list);
//			request.setAttribute("resultMap", resultMap);
//			System.out.println(URLEncoder.encode(q, "utf-8"));
			logger.debug("-----------------------");
			data.put("searchStr", q);
 			data.put("count", count);
			data.put("usetime", usetime);
 			data.put("pageinfo", pageInfo); 
			data.put("startitem", startRec + 1);
			data.put("enditem", endRec);
			
			
			
			 
		} catch (NumberFormatException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (InvalidTokenOffsetsException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
//		} catch (InvalidTokenOffsetsException e) {
//			// TODO Auto-generated catch block
//			e.printStackTrace();
//		} 
		String returnPage = "app/static/list"; 
		data.put("list", list);
		return new ModelAndView(returnPage,data);  
	}
	private String highlight(int docid,String fieldName,String content,String language) throws NumberFormatException, IOException, InvalidTokenOffsetsException{
		String  result = content; 
 		Highlighter highlighter = new Highlighter(shf, new QueryScorer(query));
		if(null != content ){  
			highlighter.setTextFragmenter(new SimpleFragmenter(200));
			TokenStream tokenStream = analyzer.tokenStream(fieldName, new StringReader(content));
			String highLightText = highlighter.getBestFragment(tokenStream,content); 
			if(StringUtils.isNotBlank(highLightText)){
				result = highLightText;
			}
		}
		return result;
	} 
	
}
