package com.mijie.fentsi;


import java.io.StringReader;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.lang.ObjectUtils.Null;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.wltea.analyzer.cfg.Configuration;
import org.wltea.analyzer.cfg.DefaultConfig;

import org.wltea.analyzer.dic.Dictionary;
import org.wltea.analyzer.dic.Hit;
import org.wltea.analyzer.lucene.IKAnalyzer;

import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.search.TopDocs;
// r12 import org.apache.tools.ant.taskdefs.Sleep;
import org.wltea.analyzer.cfg.Configuration;
import org.wltea.analyzer.lucene.IKAnalyzer;
import java.io.BufferedReader;
import org.wltea.analyzer.cfg.DefaultConfig;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;

import org.wltea.analyzer.cfg.Configuration;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;
import org.wltea.analyzer.dic.Dictionary;
import org.wltea.analyzer.dic.Hit;

import com.mijie.homi.search.util.DbNdsController;
import com.mijie.homi.search.util.Dbcontroller;
import com.mijie.homi.search.util.fileC0;
import com.mijie.homi.search.util.god;
import com.mijie.homi.search.util.god1;
import com.mijie.homi.search.util.listUtil;
import com.mijie.homi.search.util.mycfg;
import com.mijie.homi.search.util.strUtil;

public class fentsiOr {

	/**
	 * @param args
	 */
	public static void main(String[] args) {

		fentsiOr c = new fentsiOr();
		// String s = c
		// .fentsi("让你有用物质换不来的开心和微笑，不会让你冻着，更不会让你饿着，我没有钱，但我有用钱买不到的真心和真情，我会让我爱得人幸福，不光只有物质保障");
		// System.out.println(s);
		// String s2 = s.replaceAll(" ", "|");
		// System.out.println(s2);
		// String s3 = c.filtSyvtsi(s);
		// System.out.println("--" + s3);
		// System.out.println("--finish");

		String path = "c:\\fentsiOK.txt";
		// path="c:\\t.txt";

		String path2 = "c:\\pinlw_by_paodin.txt";
		// tonjyiWordPinlw(path,path2);
		// tonjyiSingleWordCount(path);

		// filtSyvtsiFromFentsiokByIk();
		// filtSyvtsi4WordPinlwTonjyiByPaodin();

		// fentsi4topicAll();

		// keywordTonjyi4topicPerType();

		// keywordTonjyi4topicPerTypeFor_filtSyvtsi4tonjyiPinlw();

		// testDisableIktsiku();

		// splitBySingeChar();
		// deSingleWord("c:\\wordFromInputmethod.txt");

		Date start = new Date();
		 
		
	//	fentsifor3zi4zi();
 	//fentsi4sincin();
	//	testDisableIktsiku();
		test4luceneImpAndIkimp();
		System.out.println("--fini");
		Date end = new Date();
		System.out.println("检索完成，用时" + (end.getTime() - start.getTime())
				+ "毫秒");
	

		// geneSingleWordFile("c:\\map.txt","c:\\single.txt");
		// geneSingleWordFile("c:\\wordFromInputmethod.txt","c:\\wordFromInputmethod_singleZi.txt");

	}

	private static void test4luceneImpAndIkimp() {
		fentsiOr fc=new fentsiOr();
		for(int i=0;i<100000;i++)
		{
	//System.out.println(fc.fentsiByLuceneImp("基于java语言开发的轻量级的中文分词工具包"));	
			fc.fentsiByIKImp("基于java语言开发的轻量级的中文分词工具包");
	//System.out.println();	
		}
		
	}
	 StringReader sr_cb5;
	 @Deprecated
	public String fentsiByIKImp(String text) {
//		  String text="信件 函件 整机 零件";  
//	        text=	"他说他还爱我，的确，他对我很好。可是他又总喜欢和别的女生聊天。我该不该和他提出分手  欢和好像";
	    	
	       // StringReader
	        sr_cb5=new StringReader(text);  
	        IKSegmenter ik=new IKSegmenter(sr_cb5, true);  
	        
	        Lexeme lex=null; 
	        
	        StringBuilder sb=new StringBuilder();
		
			 
	        try {
				while((lex=ik.next())!=null){  
					sb.append(lex.getLexemeText()).append("|");  
				}
			} catch (IOException e) {
				 
				e.printStackTrace();
			}  
			
		String	rc0=sb.toString();
			// fc.save(rc0,targetTxt);
		
			return rc0;
			
	}
	
	public String segmtByIKImp(String text) {
//		  String text="信件 函件 整机 零件";  
//	        text=	"他说他还爱我，的确，他对我很好。可是他又总喜欢和别的女生聊天。我该不该和他提出分手  欢和好像";
	    	
	       // StringReader
	        sr_cb5=new StringReader(text);  
	        IKSegmenter ik=new IKSegmenter(sr_cb5, true);  
	        
	        Lexeme lex=null; 
	        
	        StringBuilder sb=new StringBuilder();
		
			 
	        try {
				while((lex=ik.next())!=null){  
					sb.append(lex.getLexemeText()).append(" ");  
				}
			} catch (IOException e) {
				 
				e.printStackTrace();
			}  
			
		String	rc0=sb.toString();
			// fc.save(rc0,targetTxt);
		
			return rc0;
			
	}

	private static void fentsi4sincin() {

		List<String> lica7 = new ArrayList<String>();
		// int n=0;
		for (int i = 1; i <= 21; i++) {
			lica7.add(String.valueOf(i));
		}
		int mid = lica7.size() / 2;
		List<String> lica7_half1 = lica7.subList(0, mid);
		List<String> lica7_half2 = lica7.subList(mid, lica7.size());

		// if(true)return;
		String wordLibs = "c:\\wordFromInputmethod_deSingle_addSingword.txt,c:\\word.txt,c:\\word_ext.txt";
		String sourceTxt = "c:\\sincin\\sincin.txt";
		String target = "c:\\sincin\\sincin_fentsi.txt";
		fentsiOr ftc = new fentsiOr();
		ftc.fentsi4bigfile(sourceTxt, target, wordLibs);

	}

	private boolean isUseDefaultWordlib;
	
	
	@SuppressWarnings("unchecked")
	private static void fentsifor3zi4zi() {

		List<String> lica7 = new ArrayList<String>();
		// int n=0;
		for (int i = 1; i <= 21; i++) {
			lica7.add(String.valueOf(i));
		}
		int mid = lica7.size() / 2;
		List<String> lica7_half1 = lica7.subList(0, mid);
		List<String> lica7_half2 = lica7.subList(mid, lica7.size());

		// if(true)return;
		String wordLibs = "c:\\wordFromInputmethod_deSingle_addSingword.txt,c:\\word.txt,c:\\word_ext.txt";
		wordLibs="";
		String sourceTxt = "C:\\word\\exp3zi4zi.txt";
		String target = "C:\\word\\exp3zi4zi_fentsiByIklib.txt";
		fentsiOr ftc = new fentsiOr();
		ftc.isUseDefaultWordlib=true;
		 
		fileC0 fc = new fileC0();
		 
		// if(true)return;		
		 List li2 = fc.fileRead2list(sourceTxt, "utf-8");
		 List li3=filte43zi4zi(li2);
	//	 ftc.returnSplito=" ";
	List result=	ftc.fentsi(li3,  wordLibs);
	fc.saveList2file(result, target);
	

	}


	private static List<String> filte43zi4zi(List<String> li2) {
		List<String> li_r = new ArrayList<String>();
		for(String s:li2)
		{
			String[] a=s.trim().split("\t");
			li_r.add(a[1]);
			
		}
		return li_r;
	}

	private static void deSingleWord(String f) {
		List<String> li_r = new ArrayList<String>();
		fileC0 fc = new fileC0();
		List<String> li = fc.fileRead2list(f);
		for (String line : li) {
			line = line.trim();
			if (line.length() > 1)
				li_r.add(line);
		}
		fc.saveList2file(li_r, "c:\\wordFromInputmethod_deSingle.txt");
	}

	private static void splitBySingeChar() {
		String f = "c:\\常见的单音节字词 2.txt";
		String target = "c:\\常见的单音节字词 2_deDuli.txt";
		fileC0 fc = new fileC0();
		Set<String> set = new HashSet<String>();

		List<String> li = fc.fileRead2list(f);
		for (String line : li) {
			char[] a = line.toCharArray();
			for (char chr : a) {
				set.add(String.valueOf(chr));
			}

		}
		fc.saveSet2file(set, target);

	}

	private static void testDisableIktsiku() {
		String s = "他说他还爱我，的确，他对我很好。可是他又总喜欢和别的女生聊天。我该不该和他提出分手";// 欢和好像";
		// ik 他说|他|还|爱我|的确|他|对|我|很好|可是|他|又|总|喜|欢和|别的|女生|聊天|我|该不该|和|他|提出|分手|
		// mycfg
		// 他说|他|还|爱|我|的确|他|对|我|很好|可是|他|又|总|喜欢|和|别的|女生|聊天|我|该不该|和|他|提出|分手|
		s = "你可以接受交往后多久结婚？";
		s = "恋爱多久后你会带对方和自己的家长见面？";
		s = "你会主动把自己的恋人带给自己的家长见吗？";
		s = "当您遇到一位心仪的对象，但您的条件比TA差很多，您还会追TA吗？";
		s = "您怎么看待一对恋人因为现实的原因而分手的？";
		s = "为什么老天爱看玩笑，我爱的人不爱我，爱我的人我不爱她？";
		s = "如果你的恋人提出和你拍床照，你会答应吗？";
		s = "男友有个红颜知己，他跟我说他俩太相似，是不分性别的亲人，你信么";
		s = "两人都处于发展的阶段，还能不能继续走下去？";
		s = "结婚后可以接受你的另一半有蓝颜知己吗？";
		s = "离了婚有儿子的男人可以嫁吗attilax_split结婚后可以接受你的另一半有蓝颜知己吗";

		String wordLibs = "c:\\wordFromInputmethod_deSingle_addSingword.txt,c:\\word.txt,c:\\word_ext.txt";
		// wordLibs=""; 没字典的时候儿全部单字分割.. 他|说|他|还|爱|我|的|确|他|对|我|很|好|可|是|
		fentsiOr fc = new fentsiOr();
		String r = fc.fentsi_disableDefaultTsiku(s, wordLibs);
		r = fc.syaocyi(r, "c:\\word_syaocyi.txt");
		System.out.println(r);

		// System.out.println("--is exsit :"+fc.isExistWord("欢和"));
		// System.out.println("--is exsit :"+fc.isExistWord("喜"));
		// System.out.println("--is exsit :"+fc.isExistWord("和"));
		// 你|可以|接受|交|往后|多久|结婚|
		System.out.println("--is exsit :" + fc.isExistWord("性别"));
		System.out.println("--is exsit :" + fc.isExistWord("两人"));
		System.out.println("--is exsit :" + fc.isExistWord("能不能"));
		System.out.println("--is exsit :" + fc.isExistWord("还"));
		System.out.println("--is exsit :" + fc.isExistWord("儿子"));
	}

	private String syaocyi(String str, String pathofSyaocyiWordLib) {
		fileC0 fc = new fileC0();
		List<String> li = fc.fileRead2list(pathofSyaocyiWordLib);
		for (String line : li) {
			if (line.trim().length() == 0)
				continue;
			String[] a = line.trim().split("=");
			String old = a[0].trim();
			old = old.replaceAll("\\|", "\\\\|");
			String newstr = a[1].trim();
			newstr = newstr.replaceAll("\\|", "\\\\|");
			str = str.replaceAll(old, newstr);

		}

		return str;
	}

	private static void keywordTonjyi4topicPerTypeFor_filtSyvtsi4tonjyiPinlw() {

		String dirpath = "c:\\exp";
		String targetFilePrefix = "filted";
		File dir = new File(dirpath);
		File file[] = dir.listFiles();
		List<String> li = fileC0.getFilepathsByPrefix(dirpath, "keywords");
		for (int i = 0; i < li.size(); i++) {
			// if (file[i].isDirectory())
			// list.add(file[i]);
			// else
			File file2 = file[i];
			String sourceTxt = li.get(i);

			String filename = fileC0.getFilename(sourceTxt);

			String target = dirpath + "\\" + targetFilePrefix + "_" + filename
					+ ".txt";
			fentsiOr ftc = new fentsiOr();

			String syvtsiLib = "c:\\word_syvtsi.txt";

			ftc.filtSyvtsi4tonjyiPinlw(sourceTxt, target, syvtsiLib);
		}

	}

	private static void keywordTonjyi4topicPerType() {
		String dirpath = "c:\\exp";
		String targetFilePrefix = "keywords";
		File dir = new File(dirpath);
		File file[] = dir.listFiles();
		List<String> li = fileC0.getFilepathsByPrefix(dirpath, "fentsiOK");
		for (int i = 0; i < li.size(); i++) {
			// if (file[i].isDirectory())
			// list.add(file[i]);
			// else
			File file2 = file[i];
			String sourceTxt = li.get(i);

			String filename = fileC0.getFilename(sourceTxt);

			String target = dirpath + "\\" + targetFilePrefix + "_" + filename
					+ ".txt";
			fentsiOr ftc = new fentsiOr();
			tonjyiWordPinlw(sourceTxt, target);
		}

	}

	private static void fentsi4topicAll() {
		String wordLibs = "c:\\wordFromInputmethod_deSingle_addSingword.txt,c:\\word.txt,c:\\word_ext.txt";
		String dirpath = "c:\\exp";

		String sourceTxt = "";// "c:\\sincin.txt";

		String targetFilePrefix = "fentsiOK";

		File dir = new File(dirpath);
		File file[] = dir.listFiles();
		for (int i = 0; i < file.length; i++) {
			// if (file[i].isDirectory())
			// list.add(file[i]);
			// else
			File file2 = file[i];
			sourceTxt = (file2.getAbsolutePath());

			String filename = fileC0.getFilename(sourceTxt);
			if (!god1.isInt(filename))
				continue;
			fentsiOr ftc = new fentsiOr();
			String target = dirpath + "\\" + targetFilePrefix + "_" + filename
					+ ".txt";
			ftc.fentsi(sourceTxt, target, wordLibs);
		}

	}

	private static void filtSyvtsi4WordPinlwTonjyiByPaodin() {
		String article = "c:\\pinlw_by_paodin.txt";
		String syvtsiLib = "c:\\word_syvtsi.txt";
		String target = "c:\\pinlw_by_paodin_filtSyvtsi.txt";

		fentsiOr ftc = new fentsiOr();
		ftc.filtSyvtsi4tonjyiPinlw(article, target, syvtsiLib);

	}

	public void filtSyvtsi4tonjyiPinlw(String articlePath, String target,
			String syvtsiLib) {
		List<String> li_r = new ArrayList<String>();

		Set<String> set = getSyvtsiSet(syvtsiLib);
		fileC0 fc = new fileC0();
		List<String> strLi = fc.fileRead2list(articlePath);
		int n = 0;
		for (String line : strLi) {
			n++;
			if (n % 300 == 0)
				System.out.println("---filtSyvtsi4tonjyiPinlw:" + n);
			String[] a = line.split("\t");
			String word = a[0].trim();
			if (!set.contains(word))
				li_r.add(line);
		}

		fc.saveList2file(li_r, target);

	}

	private Set<String> getSyvtsiSet(String syvtsiLib) {
		fileC0 fc = new fileC0();
		Set set = fc.fileRead2Set(syvtsiLib, "utf-8");
		set = fc.clearAdj(set);
		return set;
	}

	private static void geneSingleWordFile(String string, String target) {
		fileC0 fc = new fileC0();
		List<String> strLi = fc.fileRead2list(string);
		List<String> li_r = new ArrayList<String>();
		for (String line : strLi) {
			String[] a = line.split(",");
			String word = a[0];
			if (word.length() == 1)
				li_r.add(word);
		}

		fc.saveList2file(li_r, target);
	}

	private static void filtSyvtsiFromFentsiokByIk() {
		String article = "c:\\fentsiOK.txt";
		String syvtsiLib = "c:\\word_syvtsi.txt";
		String target = "c:\\fentsiOK_filted.txt";

		fentsiOr ftc = new fentsiOr();
		ftc.filtSyvtsi(article, target, syvtsiLib);
	}

	private void filtSyvtsi(String articlePath, String target, String syvtsiLib) {
		List<String> li_r = new ArrayList<String>();

		fileC0 fc = new fileC0();
		List<String> strLi = fc.fileRead2list(articlePath);
		int n = 0;
		for (String line : strLi) {
			n++;
			if (n % 300 == 0)
				System.out.println("---filtSyvtsi:" + n);
			String filted = filtSyvtsiSingleLine(line, "\\|", syvtsiLib);
			li_r.add(filted);
		}

		fc.saveList2file(li_r, target);

	}

	Set setSyvtsiLib;
	protected Dictionary dictionaryCad;

	public String filtSyvtsiSingleLine(String line, String splitor,
			String syvtsiLib) {

		if (setSyvtsiLib == null) {
			String wordclib = syvtsiLib;
			fileC0 fc = new fileC0();
			Set set = fc.fileRead2Set(wordclib, "utf-8");
			set = fc.clearAdj(set);
			setSyvtsiLib = set;
		}

		String[] wordList = line.split(splitor);
		List<String> li = new ArrayList<String>();

		for (int i = 0; i < wordList.length; i++) {
			String word = wordList[i];

			if (!setSyvtsiLib.contains(word))
				li.add(word);
		}

		String r = list2str(li, splitor);
		return r;

	}
	
	
	/**
	 * change history \r\n
	 * cb9 filt deduli
	 * @param line_arrFmt
	 * @param setSyvtsiLib
	 * @return
	 */
	public String filtSyvtsiSingleLine(String[] line_arrFmt,Set setSyvtsiLib) {

//		if (setSyvtsiLib == null) {
//			String wordclib = syvtsiLib;
//			fileC0 fc = new fileC0();
//			Set set = fc.fileRead2Set(wordclib, "utf-8");
//			set = fc.clearAdj(set);
//			setSyvtsiLib = set;
//		}
		Set<String> set=new HashSet();
		String[] wordList =line_arrFmt;// line.split(splitor);
		List<String> li = new ArrayList<String>();

		for (int i = 0; i < wordList.length; i++) {
			String word = wordList[i];

			if (!setSyvtsiLib.contains(word))
				if(!set.contains(word))
				{
				li.add(word);
				set.add(word);
				}
		}

		String r = list2str(li, " ");
		return r;

	}

	private String list2str(List<String> strLiFilted, String splitor) {
		splitor = splitor.replaceAll("\\\\", "");
		String s = "";
		for (int i = 0; i < strLiFilted.size(); i++) {
			String word = strLiFilted.get(i);
			s = s + splitor + word;
		}
		return s;
	}

	private static void tonjyiSingleWordCount(String path) {
		List<String> li_r = new ArrayList<String>();
		fileC0 fc = new fileC0();
		List<String> li = fc.fileRead2list(path);
		int n = 0;
		for (String str : li) {
			n++;
			if (n % 100 == 0)
				System.out.println("--now:" + n);
			int num = getSingleWordCount(str);
			String s = String.valueOf(num) + "\t" + str;
			li_r.add(s);
		}
		fc.saveList2file(li_r, "c:\\tonjyiSingleWordCount.txt", "utf-8");

	}

	private static int getSingleWordCount(String str) {
		int n = 0;
		String[] a = str.split("\\|");
		for (String strx : a) {
			if (strx.trim().length() == 1)
				n++;
		}
		return n;
	}

	private static void tonjyiWordPinlw(String path) {
		fileC0 fc = new fileC0();
		String s = fc.fileRead(path);
		s = s.replaceAll("\r\n", "|");
		String[] a = s.split(",");
		a = s.split("\\|");
		Map<String, String> mp = new HashMap<String, String>();
		for (String word : a) {
			word = word.trim();
			if (word.length() <= 1)
				continue;
			if (mp.containsKey(word))
				addCount(mp, word);
			else
				mp.put(word, "1");
		}

		String path2 = "c:\\map_tab.txt";
		String s2 = saveMap(mp, path2);
		System.out.println(s2);

	}

	private static void tonjyiWordPinlw(String path, String target) {
		fileC0 fc = new fileC0();
		String s = fc.fileRead(path);
		s = s.replaceAll("\r\n", "|");
		String[] a = s.split(",");
		a = s.split("\\|");
		Map<String, String> mp = new HashMap<String, String>();
		for (String word : a) {
			word = word.trim();
			if (word.length() <= 1)
				continue;
			if (mp.containsKey(word))
				addCount(mp, word);
			else
				mp.put(word, "1");
		}

		List<Map.Entry<String, String>> mapOrdered = listUtil.orderByValue(mp);
		String s2 = saveListMap(mapOrdered, target);
		// System.out.println(s2);

	}

	private static String saveListMap(List<Map.Entry<String, String>> Listmp,
			String target) {

		String sx = "";

		for (Entry<String, String> mp : Listmp) {

			String s = mp.getKey();
			String val = mp.getValue();
			sx = sx + s + "\t" + val + "\r\n";

		}
		fileC0 fc = new fileC0();
		fc.save(sx, target);
		return sx;

	}

	private static String saveMap(Map<String, String> mp, String path) {
		String sx = "";
		Set<String> key = mp.keySet();
		for (Iterator it = key.iterator(); it.hasNext();) {
			String s = (String) it.next();
			String val = (mp.get(s));
			sx = sx + s + "\t" + val + "\r\n";
		}
		fileC0 fc = new fileC0();
		fc.save(sx, path);
		return sx;

	}

	private static void addCount(Map<String, String> mp, String word) {
		// TODO Auto-generated method stub
		String num = mp.get(word);
		int num2 = Integer.parseInt(num);
		num2++;
		mp.put(word, String.valueOf(num2));

	}

	/**
	 * 
	 * @param s
	 *            =aaa eee ddd
	 * @return
	 */
	public String filtSyvtsi(String s) {
		List<String> strLi = String2List(s);
		List<String> strLiFilted = new ArrayList<String>();

		String wordclib = "c:\\word.txt";
		fileC0 fc = new fileC0();
		Set set = fc.fileRead2Set(wordclib, "utf-8");
		set = fc.clearAdj(set);

		for (int i = 0; i < strLi.size(); i++) {
			String word = strLi.get(i);

			if (set.contains(word))
				strLiFilted.add(word);
		}

		String r = list2str(strLiFilted);
		return r;
	}

	/**
	 * @author attilax aaa ccc ddd >>>
	 */
	public String filteSyvtsi(String s, String syvtsiLibPath) {
		List<String> strLi = String2List(s);
		List<String> strLiFilted = new ArrayList<String>();

		String wordclib = "c:\\word.txt";
		fileC0 fc = new fileC0();
		Set set = fc.fileRead2Set(wordclib, "utf-8");
		set = fc.clearAdj(set);

		for (int i = 0; i < strLi.size(); i++) {
			String word = strLi.get(i);

			if (set.contains(word))
				strLiFilted.add(word);
		}

		String r = list2str(strLiFilted);
		return r;
	}

	private String list2str(List<String> strLiFilted) {
		String s = "";
		for (int i = 0; i < strLiFilted.size(); i++) {
			String word = strLiFilted.get(i);
			s = s + " " + word;
		}
		return s;
	}

	private List<String> String2List(String s) {
		List<String> strLiFilted = new ArrayList<String>();
		String[] a = s.split(" ");
		for (int i = 0; i < a.length; i++) {
			String word = a[i].trim();
			if (word.length() > 0)
				strLiFilted.add(word);
		}
		return strLiFilted;
	}

	@SuppressWarnings("unchecked")
	public List<String> getListFromFile(List<String> filelist) {
		List<String> wor = new ArrayList<String>();
		Set<String> set = new HashSet<String>();
		for (int i = 0; i < filelist.size(); i++) {
			String file = filelist.get(i);
			fileC0 fc = new fileC0();
			List<String> li = fc.fileRead2list(file, "utf-8");
			for (int j = 0; j < li.size(); j++) {
				String wordNtsisin = li.get(j);
				String word = clearTsisin(wordNtsisin);
				if (word == null)
					continue;
				if (!set.contains(word)) {
					set.add(word);
					wor.add(word);
				}

			}

		}
		return wor;

	}

	private String clearTsisin(String wordNtsisin) {
		if (wordNtsisin.trim().length() == 0)
			return null;
		wordNtsisin = wordNtsisin.trim();
		String[] a = wordNtsisin.split("\\.");
		if (a.length == 1)
			return a[0].trim();
		String word = a[1].trim();
		return word;
	}

	/**
	 * split by space
	 * @param string
	 * @return
	 */
	public String fentsi(String string) {
		String s = "中文分词工具包";
		// import org.wltea.analyzer.cfg.DefaultConfig;
		Configuration cfg = DefaultConfig.getInstance(); // 加载词库
		cfg.setUseSmart(true); // 设置智能分词
		Dictionary.initial(cfg);

		Dictionary dictionary = Dictionary.getSingleton();
		List<String> words = new ArrayList<String>();
		words.add("基础班");
		words.add("高级会计实务");
		words.add("工具包");
		words.add("java语言");
		words.add("轻量级");
		words.add(" 分词");

		dictionary.addWords(words); // 自动添加自定义词

		String text = string;
		// 创建分词对象 isMaxWordLength
		Analyzer anal = new IKAnalyzer(true);
		StringReader reader = new StringReader(text);
		// 分词
		TokenStream ts = null;
		try {
			ts = anal.tokenStream("", reader);
		} catch (Exception e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		}
		CharTermAttribute term = (CharTermAttribute) ts.getAttribute(CharTermAttribute.class);
		String sx = "";
		// 遍历分词数据
		try {
			while (ts.incrementToken()) {
				sx = sx + " " + term.toString();
				// System.out.print(term.toString() + "|");
			}
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		reader.close();
		// System.out.println("---------\r\n");

		// Hit b = dictionary.matchInMainDict("工具包".toCharArray());

		// System.out.println(b.isMatch());

		return sx;
	}

	public boolean isExistWord(String s, Dictionary dictionary) {

		Hit b = dictionary.matchInMainDict(s.toCharArray());

		return (b.isMatch());
	}

	public boolean isExistWord(String s) {

		Hit b = dictionaryCad.matchInMainDict(s.toCharArray());

		// b.getMatchedDictSegment().

		return (b.isMatch());
	}

	public List<String> getListFromFile(String string) {
		String[] a = string.split(",");
		List<String> words = new ArrayList<String>();
		for (int i = 0; i < a.length; i++) {
			String path = a[i];
			words.add(path);
		}

		List<String> r = getListFromFile(words);
		return r;
	}

	public boolean isLoadDic = false;

	String returnSplito;
	public String fentsi(String string, String wordLibs) {
		try {
			if (isLoadDic == false) {
				String s = "中文分词工具包";
				// import org.wltea.analyzer.cfg.DefaultConfig;
				Configuration cfg = DefaultConfig.getInstance(); // 加载词库
				cfg.setUseSmart(true); // 设置智能分词
				Dictionary.initial(cfg);

				Dictionary dictionary = Dictionary.getSingleton();
				List<String> words = new ArrayList<String>();
				words.add("犀利");
				words.add("犀利");
				words.add("犀利");

				fentsiOr ftc = new fentsiOr();

				List<String> ext = ftc.getListFromFile(wordLibs);
				words.addAll(ext);
				// if (ext.contains("心往何处"))
				// System.out.println("--exist");

				// 让|你|有用|物质|换|不来|的|开心|和|微笑|不会|让|你|冻|着|更|不会|让|你|饿|着|我|没有|钱|但|我|有用|钱|买|不|到|的|真心|和|真情|我|会|让|我爱得人|幸福|不光|只有|物质|保障|---------
				// 让你|有用|物质|换|不来|的开心|和|微笑|不会|让你|冻|着|更|不会|让你|饿|着|我|没|有钱|但|我有|用钱|买|不到的|真心|和|真情|我|会|让|我爱得人|幸福|不光|只有|物质|保障|---------
				dictionary.addWords(words); // 自动添加自定义词
				isLoadDic = true;
			}

			String text = "基于java语言开发的轻量级的中文分词工具包";
			text = "让你有用物质换不来的开心和微笑，不会让你冻着，更不会让你饿着，我没有钱，但我有用钱买不到的真心和真情，我会让我爱得人幸福，不光只有物质保障";
			text = "如果你是一个 现实的人，我想只能用所谓的物质，如果是问我的话，只要你相信，我可能给不了你最好的物质条件，但我会让你每天都开心发自内心的微笑";
			text = "[调皮]真实的感受，不过……，只有最在乎的人牢牢的抓住并保护好风筝的线，风筝才能飞的更高更远";
			text = "这样我不知道怎么安慰你!但是我觉得这个人是否值得你思念.不要想太多了，调整好心态。明天的生活会更美好";
			text = "如果是非常好的朋友他不会介意你说的话的话，你可以说出自己的观点，但不要直接建议啦，毕竟这不是你自己的事，没法充分感觉到当事人的感受";
			text = "特么的我也是，被狗咬了都不给我打针，说美国狗安全，特么的狗生在美国真命好！！！";
			text = "我的情况和你差不多,但是他没说那么难听的话,就是太突然了,有压力,但没办法,肯定要结婚的。但是你男友那么说你,真的替你感到了委屈";
			text = "嗳哟 不要因为某些事而破坏心情嘛 人生中再不好的时光也都是限量版滴~所以表介个样子啦  嘿！给自己一个灿烂的微笑哦~~[微笑][微笑]";
			text = "我信你是一个孝顺的人 生者幸福是对逝去亲人最好的安慰 请记得奶奶会永远在你的心里 她的爱会一直陪伴你 。。。";
			text = "用无所谓的态度 去过好随遇而安的生活 人生不在于身在何处而在于心往何处 不要过于在意他人 不要迷失输掉自己";
			text = "生活中总是充满了他人的目光 但请一定审视一下自己的内心 争取那最珍贵的东西（是为了你自己）";
			text = "故意说的，就想让你主动点或另有隐情，比如他已经有了，只是想让你做第三者，心甘情愿地做那种。乱猜的，仅供参考哦";
			text = "基本上每个人都会有这样的经历吧，过去了就让他过去吧，不要回头，活好现在和未来吧";
			text = "在家肯定会倍感温暖，在外面无论是过得怎样，内心仍然是寒冬..";
			fileC0 fc = new fileC0();

			text = string;
			// 创建分词对象 isMaxWordLength
			Analyzer anal = new IKAnalyzer(true);
			StringReader reader = new StringReader(text);
			// 分词
			TokenStream ts = anal.tokenStream("", reader);
			CharTermAttribute term = (CharTermAttribute) ts.getAttribute(CharTermAttribute.class);
			// 遍历分词数据
			String rc0 = "";
			String returnSplitox = "|";
			if(this.returnSplito!=null)
				returnSplitox=this.returnSplito;
			while (ts.incrementToken()) {
				// System.out.print(term.toString() + "     ");
				rc0 = rc0 + term.toString() + returnSplitox;
			}
			rc0=strUtil.trimx("|", rc0);

			// fc.save(rc0,targetTxt);
			reader.close();
			return rc0;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}

	IKSegmenter ikCae;

	public String fentsi_disableDefaultTsiku(String string, String wordLibs) {
		try {
			if (isLoadDic == false) {
				iniFentsiWordlib(wordLibs);
				System.out.println("--note:ini iniFentsiWordlib");
			}

			// if(true)return "a";
			String text = "基于java语言开发的轻量级的中文分词工具包";
			fileC0 fc = new fileC0();

			text = string;
			// 创建分词对象 isMaxWordLength
			Analyzer anal = new IKAnalyzer(true);// true也标示最大词长
			StringReader reader = new StringReader(text);

			// IKSegmenter ik=new IKSegmenter(reader, true);
			// ikCae=new IKSegmenter(reader, true);
			// if(true)return "a";
			// Lexeme lex=null;
			// while((lex=ik.next())!=null){
			// // System.out.print(lex.getLexemeText()+"|");
			// }

			// 分词 jeigd sinnen haosyeo kme l . Analyzer.tokenStream

			TokenStream ts = anal.tokenStream("", reader);

			CharTermAttribute term = (CharTermAttribute) ts.getAttribute(CharTermAttribute.class);
			// 遍历分词数据
			String rc0 = "";
			StringBuilder sb=new StringBuilder();
			while (ts.incrementToken()) {
				// System.out.print(term.toString() + "     ");
				//rc0 = rc0 + term.toString() + "|";.
				sb.append(term.toString()).append("|");
			}
			rc0=sb.toString();
			// fc.save(rc0,targetTxt);
			reader.close();
			return rc0;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}

	/**
	 * 283ms
	 * @param wordLibs
	 */
	private void iniFentsiWordlib(String wordLibs) {
		String s = "中文分词工具包";
		// import org.wltea.analyzer.cfg.DefaultConfig;
		Configuration cfg = new mycfg();
		// 加载词库
		cfg.setUseSmart(true); // 设置智能分词
		Dictionary.initial(cfg);

		Dictionary dictionary = Dictionary.getSingleton();
		List<String> words = new ArrayList<String>();
		words.add("犀利");
		words.add("犀利");
		words.add("犀利");

		fentsiOr ftc = new fentsiOr();

		List<String> ext = ftc.getListFromFile(wordLibs);
		words.addAll(ext);
		// if (ext.contains("心往何处"))
		// System.out.println("--exist");

		// 让|你|有用|物质|换|不来|的|开心|和|微笑|不会|让|你|冻|着|更|不会|让|你|饿|着|我|没有|钱|但|我|有用|钱|买|不|到|的|真心|和|真情|我|会|让|我爱得人|幸福|不光|只有|物质|保障|---------
		// 让你|有用|物质|换|不来|的开心|和|微笑|不会|让你|冻|着|更|不会|让你|饿|着|我|没|有钱|但|我有|用钱|买|不到的|真心|和|真情|我|会|让|我爱得人|幸福|不光|只有|物质|保障|---------
		dictionary.addWords(words); // 自动添加自定义词
		isLoadDic = true;

		this.dictionaryCad = dictionary;
	}
	
	
	public void iniFentsiWordlibFromdb() {
		String s = "中文分词工具包";
		// import org.wltea.analyzer.cfg.DefaultConfig;
		Configuration cfg = new mycfg();
		// 加载词库
		cfg.setUseSmart(true); // 设置智能分词
		Dictionary.initial(cfg);

		Dictionary dictionary = Dictionary.getSingleton();
		List<String> words = new ArrayList<String>();
		words.add("犀利");
		words.add("犀利");
		words.add("犀利");

		 

		
		
		List<String> ext = getWordlistFromdb(); 
		words.addAll(ext);
		// if (ext.contains("心往何处"))
		// System.out.println("--exist");

		// 让|你|有用|物质|换|不来|的|开心|和|微笑|不会|让|你|冻|着|更|不会|让|你|饿|着|我|没有|钱|但|我|有用|钱|买|不|到|的|真心|和|真情|我|会|让|我爱得人|幸福|不光|只有|物质|保障|---------
		// 让你|有用|物质|换|不来|的开心|和|微笑|不会|让你|冻|着|更|不会|让你|饿|着|我|没|有钱|但|我有|用钱|买|不到的|真心|和|真情|我|会|让|我爱得人|幸福|不光|只有|物质|保障|---------
		dictionary.addWords(words); // 自动添加自定义词
		isLoadDic = true;

		this.dictionaryCad = dictionary;
	}

	protected List<String> getWordlistFromdb() {
		List<String> li=new ArrayList<String>();
		//beclseu db is utf8...so length 15=5g hezi
		//and m.id=489879 DISTINCT    select SQL_NO_CACHE word from word_main   where del=0
		//select  SQL_NO_CACHE  word,id  from word_main force index(PRIMARY)   where     id  > 360000 and  id<400000  and  del=0   
		String sql="	select  SQL_NO_CACHE  word  from word_main where del=0 ";
		
		Dbcontroller c=new DbNdsController();
		ResultSet rs=	c.getrs(sql);
		try {
			while (rs.next()) {

				// 首先使用ISO-8859-1字符集将name解码为字节序列并将结果存储新的字节数组中。
				// 然后使用GB2312字符集解码指定的字节数组
				// name = new String(name.getBytes("ISO-8859-1"),"GB2312");

				// 输出结果
				String txt = rs.getString("word");
				li.add(txt);
			
			}
		} catch (SQLException e) {
			e.printStackTrace();
			 throw new RuntimeException(e);
			
		}
		c.close();
		return li;
	}

	List<String> lica7_ca25;
	@SuppressWarnings("unchecked")
	public void fentsi(String sourceTxt, String targetTxt, String wordLibs) {

		// String wordLibs =
		// "c:\\wordFromInputmethod.txt,c:\\word.txt,c:\\ext.txt";
		// String sourceTxt = "c:\\sincin.txt";
		// String targetTxt = "c:\\fentsiOK.txt";
		int n = 0;
		List<String> li_r = new ArrayList<String>();

		fileC0 fc = new fileC0();
		String text = fc.fileRead(sourceTxt);
		// if(true)return;
		
		  lica7_ca25 = fc.fileRead2list(sourceTxt, "utf-8");
		for (String str : lica7_ca25) {
			n++;
			if (n % 300 == 0)
				System.out.println("---alreay proceed :" + n);
			
			String str_fentsi_oked;
			if(isUseDefaultWordlib==true)
				str_fentsi_oked=fentsi(str, wordLibs);
			else
			  str_fentsi_oked = fentsi_disableDefaultTsiku(str, wordLibs);
			li_r.add(str_fentsi_oked);
		}
		fc.saveList2file(li_r, targetTxt, "utf-8");

	}
	
	public List<String> fentsi(List<String> li, String wordLibs) {

		// String wordLibs =
		// "c:\\wordFromInputmethod.txt,c:\\word.txt,c:\\ext.txt";
		// String sourceTxt = "c:\\sincin.txt";
		// String targetTxt = "c:\\fentsiOK.txt";
		int n = 0;
		List<String> li_r = new ArrayList<String>();

		fileC0 fc = new fileC0();
	//	String text = fc.fileRead(sourceTxt);
		// if(true)return;
		
		  lica7_ca25 =li;
		for (String str : lica7_ca25) {
			n++;
			if (n % 300 == 0)
				System.out.println("---alreay proceed :" + n);
			
			String str_fentsi_oked;
			if(isUseDefaultWordlib==true)
				str_fentsi_oked=fentsi(str, wordLibs);
			else
			  str_fentsi_oked = fentsi_disableDefaultTsiku(str, wordLibs);
			li_r.add(str_fentsi_oked);
		}
		return li_r;
	//	fc.saveList2file(li_r, targetTxt, "utf-8");

	}

	List<String> lica7_half1_result;
	boolean half1_isfinesh=false;
	 List<String> lica7_half1;

	/**
	 * @author attilax 1466519819@qq.com
	 * @param sourceTxt
	 * @param targetTxt
	 * @param wordLibs
	 *            for file line >5w bacause 50w time tooo long ,gujyi
	 *            1min...muset rewrite jeig swefa.
	 * @since cae
	 */
	public void fentsi4bigfile(String sourceTxt, String targetTxt,
			final String wordLibs) {

		// String wordLibs =
		// "c:\\wordFromInputmethod.txt,c:\\word.txt,c:\\ext.txt";
		// String sourceTxt = "c:\\sincin.txt";
		// String targetTxt = "c:\\fentsiOK.txt";
		int n = 0;
		List<String> li_r = new ArrayList<String>();

		fileC0 fc = new fileC0();
		// String text = fc.fileRead(sourceTxt);
		// if(true)return;
		List<String> lica7 = fc.fileRead2list(sourceTxt, "utf-8");
		int mid = lica7.size() / 100;
		int half1_int=mid*55;
		lica7_half1  = lica7.subList(0, half1_int);
		List<String> lica7_half2 = lica7.subList(half1_int, lica7.size());
		List<String> lica7_half2_result;
		// n = fentsiMuilttheard(wordLibs, n, lica7);
//		timeTester t=new timeTester(" cax");
//		fentsiOr ftc2 = new fentsiOr();
//		ftc2.iniFentsiWordlib(wordLibs);
//		t.printUseTime();
//		if(true)
//		return;
		int ax=2;
		if (ax == 2) {
			Thread td=	new Thread(new Runnable() {
				public void run() {

					try {
						fentsiOr ftc = new fentsiOr();
						lica7_half1_result = ftc.fentsiMuilttheardHabinlist(
								lica7_half1, wordLibs, "thd1");
						half1_isfinesh = true;

					} catch (Exception e) {
						e.printStackTrace();
					}
					System.out.println(Thread.currentThread().getName());

				}
			});
			td.setPriority(Thread.MAX_PRIORITY);
			td.setName("--thd ca");
			td.start();
		} else {

			lica7_half1_result = new ArrayList<String>();

			half1_isfinesh = true;
			lica7_half2 = lica7.subList(0, 500000);
		}
		
		//main thread
		fentsiOr ftc = new fentsiOr();
		lica7_half2_result = ftc.fentsiMuilttheardHabinlist(lica7_half2, wordLibs,				"thd_main");	
		
		
		
		//stop whiel half is not ok
		int nn = 0;
		while (true) {
			if (half1_isfinesh) {
				break;
			}
			nn++;
			try {
				Thread.sleep(50);
				System.out.println("--sleep 500 ,num:" + nn);
			} catch (InterruptedException e) {

				e.printStackTrace();
				throw new RuntimeException(e);
			}

		}
		
		
		 List<String> li_all=new ArrayList<String>();
		 li_all.addAll(lica7_half1_result);
		 li_all.addAll(lica7_half2_result);
	//	  fc.saveList2file(li_all, targetTxt, "utf-8");

	}

	private List<String> fentsiMuilttheardHabinlist(List<String> lica7,
			String wordLibs, String threadName) {
		int n = 0;
		List<String> li_r = new ArrayList<String>();
		StringBuilder sb=new StringBuilder();
		String splitor = "asplitor";
		for (String str : lica7) {
			n++;
			if (n % 5000 == 0)
				System.out
						.println("--" + threadName + "-alreay proceed :" + n);
			
			sb.append(str).append(splitor);
			// li_r.add("");
		}
		
		String strAddSplit=sb.toString();
		String str_fentsi_oked = fentsi_disableDefaultTsiku(strAddSplit, wordLibs);
	String[] arr=str_fentsi_oked.split(splitor);
	li_r=Arrays.asList(arr);
	//	li_r.add(str_fentsi_oked);

		return li_r;
	}
	
	private List<String> fentsiMuilttheard(List<String> lica7,
			String wordLibs, String threadName) {
		int n = 0;
		List<String> li_r = new ArrayList<String>();
		StringBuilder sb=new StringBuilder();
		String splitor = "asplitor";
		for (String str : lica7) {
			n++;
			if (n % 5000 == 0)
				System.out
						.println("--" + threadName + "-alreay proceed :" + n);
			
			String str_fentsi_oked = fentsi_disableDefaultTsiku(str, wordLibs);
				 
			li_r.add(str_fentsi_oked);
		}
		
		
 
		return li_r;
	}
	public List<String> s_fentsied_listfmt=new ArrayList<String>();
	/**
	 * 
	 * @param string
	 * @return  s_fentsied_listfmt,  s_splitBy|
	 */
	public String fentsiByDBwordlib(String string) {
		 s_fentsied_listfmt=new ArrayList<String>();
		try {
			if (isLoadDic == false) {
				iniFentsiWordlibFromdb();
				System.out.println("--note:ini iniFentsiWordlib ca25");
			}

			// if(true)return "a";
			String text = "基于java语言开发的轻量级的中文分词工具包";
			fileC0 fc = new fileC0();

			text = string;
			// 创建分词对象 isMaxWordLength
			Analyzer anal = new IKAnalyzer(true);// true也标示最大词长
			StringReader reader = new StringReader(text);

			// IKSegmenter ik=new IKSegmenter(reader, true);
			// ikCae=new IKSegmenter(reader, true);
			// if(true)return "a";
			// Lexeme lex=null;
			// while((lex=ik.next())!=null){
			// // System.out.print(lex.getLexemeText()+"|");
			// }

			// 分词 jeigd sinnen haosyeo kme l . Analyzer.tokenStream

			TokenStream ts = anal.tokenStream("", reader);

			CharTermAttribute term = (CharTermAttribute) ts.getAttribute(CharTermAttribute.class);
			// 遍历分词数据
			String rc0 = "";
			StringBuilder sb=new StringBuilder();
			while (ts.incrementToken()) {
				// System.out.print(term.toString() + "     ");
				//rc0 = rc0 + term.toString() + "|";.
				String w=term.toString();
				sb.append(w).append("|");
				s_fentsied_listfmt.add(w);
				 
			}
			rc0=sb.toString();
			// fc.save(rc0,targetTxt);
			reader.close();
			return rc0;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}
	
	
	
	public String fentsiByLuceneImp(String text) {
		try {


			// if(true)return "a";
			//String text = "基于java语言开发的轻量级的中文分词工具包";
		//	fileC0 fc = new fileC0();

		//	text = string;
			// 创建分词对象 isMaxWordLength
			Analyzer anal = new IKAnalyzer(true);// true也标示最大词长
			StringReader reader = new StringReader(text);

			// IKSegmenter ik=new IKSegmenter(reader, true);
			// ikCae=new IKSegmenter(reader, true);
			// if(true)return "a";
			// Lexeme lex=null;
			// while((lex=ik.next())!=null){
			// // System.out.print(lex.getLexemeText()+"|");
			// }

			// 分词 jeigd sinnen haosyeo kme l . Analyzer.tokenStream

			TokenStream ts = anal.tokenStream("", reader);

			CharTermAttribute term = (CharTermAttribute) ts.getAttribute(CharTermAttribute.class);
			// 遍历分词数据
			String rc0 = "";
			StringBuilder sb=new StringBuilder();
			while (ts.incrementToken()) {
				// System.out.print(term.toString() + "     ");
				//rc0 = rc0 + term.toString() + "|";.
				sb.append(term.toString()).append("|");
			}
			rc0=sb.toString();
			// fc.save(rc0,targetTxt);
			reader.close();
			return rc0;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}

}
