package com.huiquan.sphinx;

import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.List;

import com.huiquan.analysis.domain.AnalysisSentence;

/**
 * 类说明
 *
 * @author leo
 * @time 2017年5月8日 上午10:01:08
 */
public class Test1 {

	public static void main(String[] args) throws SphinxException, IOException {
		/*
		 * File file = new File("G:\\ID_List.txt"); InputStreamReader read = new
		 * InputStreamReader(new FileInputStream(file), "UTF-8"); BufferedReader
		 * bf = new BufferedReader(read); String lineTxt = null; List<Long>
		 * idList = new ArrayList<Long>(); while ((lineTxt = bf.readLine()) !=
		 * null) { if (!"".equals(lineTxt)) { idList.add(Long.valueOf(lineTxt));
		 * } } read.close();
		 */
		// TODO Auto-generated method stub
		String sphinxHost = "172.16.0.147";
		int sphinxPort = 9312;
		SphinxClient sphinxClient = new SphinxClient(sphinxHost, sphinxPort);
		// 设置匹配模式
		// SPH_MATCH_ALL, 匹配所有查询词(默认模式)；
		// SPH_MATCH_ANY, 匹配查询词中的任意一个；
		// SPH_MATCH_PHRASE, 将整个查询看作一个词组，要求按顺序完整匹配；
		// SPH_MATCH_BOOLEAN, 将查询看作一个布尔表达式 ；
		// SPH_MATCH_EXTENDED, 将查询看作一个Sphinx内部查询语言的表达式。
		// 这个选项被选项SPH_MATCH_EXTENDED2代替，它提供了更多功能和更佳的性能。保留这个选项是为了与遗留的旧代码兼容——这样即使Sphinx及其组件包括API升级的时候，旧的应用程序代码还能够继续工作。
		// SPH_MATCH_EXTENDED2, 使用第二版的“扩展匹配模式”对查询进行匹配；
		// SPH_MATCH_FULLSCAN,
		// 强制使用下文所述的“完整扫描”模式来对查询进行匹配。注意，在此模式下，所有的查询词都被忽略，尽管过滤器、过滤器范围以及分组仍然起作用，但任何文本匹配都不会发生；
		// sphinxClient.SetMatchMode(SphinxClient.SPH_MATCH_EXTENDED2);
		// 设置排序模式，根据排序属性排序，属性指定 为group_id
		// SPH_SORT_RELEVANCE 模式, 按相关度降序排列（最好的匹配排在最前面）
		// SPH_SORT_ATTR_DESC 模式, 按属性降序排列 （属性值越大的越是排在前面）
		// SPH_SORT_ATTR_ASC 模式, 按属性升序排列（属性值越小的越是排在前面）
		// SPH_SORT_TIME_SEGMENTS 模式, 先按时间段（最近一小时/天/周/月）降序，再按相关度降序
		// SPH_SORT_EXTENDED 模式, 按一种类似SQL的方式将列组合起来，升序或降序排列。
		// SPH_SORT_EXPR 模式，按某个算术表达式排序。
		// sphinxClient.SetSortMode(SphinxClient.SPH_SORT_ATTR_DESC,
		// "gmt_modified");
		// 分页查询的范围
		sphinxClient.SetLimits(0, 200000, 200000);
		// 设置排序模式
		sphinxClient.SetSortMode(SphinxClient.SPH_SORT_ATTR_DESC, "gmt_modified");
		// sphinxClient.SetSelect("sentence");
		// true为！=，false为=
		sphinxClient.SetFilter("label_status", 0, false);
		// 查询
		// SphinxResult sphinxResult =
		// sphinxClient.Query("(@idStr=15761648|15794419|15720683|15696104|15786231|15778040|15663334|15704294|15745276|15655136|15753471|15737058|15753444|15687930|15704314|15712505|15778023|15761640|15655159|15778025|15655156|15786218|15687923|15720691|15737069|15786221|15704269|15663308|15696076|15696073|15728861|15720669|15737026|15786180|15720665|15679702|15753418|15728843|15696082|15679697|15786190|15728819|15712425|15663272|15745208|15802552|15679652|15745211|15712418|15704225|15737022|15761598|15777982|15802558|15736994|15704252|15696055|15745192|15794345|15663285|15761580|15655088|15655089|15745199|15802513|15786129|15704205|15655049|15745175|15802518|15655047|15663237|15786143|15696031|15712415|15802497|15663258|15769734|15663256|15671448|15663255|15802504|15728779|15736973|15777933|15728783|15786127|15786096|15753330|15736948|15704168|15794294|15679590|15655010|15728764|15695999|15745121|15802464|15769698|15761507|15736937|15761513|15687797|15679603|15794257|15736913|15736916|15786069|15654985|15769689|15704132|15802460|15745118|15777858|15753284|15745093|15769672|15802440|15753290|15720532|15753291|15720530|15786063|15728690|15777842|15794226|15654954|15745077|15687720|15712296|15654950|15745080|15728698|15704099|15736893|15736865|15720509|15687740|15728676|15802405|15769639|15777831|15663159|15704118|15704117|15745066|15679539|15704079|15753236|15769620|15786004|15712264|15777816|15794203|15786011|15695900|15720476|15736837|15745031|15663127|15761418|15777802|15671315|15663120|15745522|15688171|15770101|15753718|15655397|15802876|15679969|15794687|15663584|15778303|15802878|15761889|15737315|15671803|15696378|15770085|15786470|15663606|15753706|15671792|15753711|15770068|15770073|15778265|15802843|15745501|15794652|15704543|15655391|15696350|15704538|15671768|15712728|15720920|15761864|15745484|15770060|15753678|15778254|15696336|15720912|15745487|15720879|15671725|15778228|15778231|15663527|15696293|15778236|15802815|15712672|15688127|15720895|15679934|15770017|15794592|15720889|15704504|15753642|15729067|15655346|15720883|15770028|15778220|15802797|15745453|15753645|15712688|15696271|15712655|15729040|15778192|15802771|15704458|15720841|15761817|15761818|15720836|15761819|15786398|15655325|15712666|15794564|15712664|15720852|15737228|15794573|15679890|15688080|15769970|15671657|15712616|15712615|15761784|15712614|15671651|15679871|15728992|15802721|15737186|15794531|15655291|15696247|15704439|15663477|15712629|15745386|15794539|15712628|15679857|15663437|15745366|15761753|15745373|15671616|15753567|15704415|15704414|15778113|15728967|15778119|15761736|15712598|15737161|15761717|15786293|15663400|15655206|15655203|15679777|15679805|15720761|15761702|15728936|15737128|15786280|15769897|15663411|15704371|15753518|15745297|15679757|15728915|15769876|15696138|15679749|15778075|15769885|1570435000000)",
		// "sentence1");
		// SphinxResult sphinxResult = sphinxClient.Query("(@vocabulary
		// =\"^测试$\")", "vocabulary1");
		SphinxResult sphinxResult = sphinxClient.Query("(@vocabulary=\"^头$\")&(@property=O)", "vocabulary1|vocabulary1_delta");
		
		// 检索后匹配到的内容
		// OutputStreamWriter out = new OutputStreamWriter(new
		// FileOutputStream("G:\\out.txt"), "UTF-8");
		for (SphinxMatch m : sphinxResult.getMatches()) {
			// System.out.println(sphinxClient.UpdateAttributesMVA("vocabulary1|vocabulary1_delta",
			// m.docId,
			// new String[] { "label_status" }, new int[][] { new int[] { -1 }
			// }));
			long[][] value = new long[1][2];
			value[0][0] = m.docId;
			value[0][1] = -1L;
			/*
			 * System.out.println(sphinxClient.UpdateAttributes("vocabulary1",
			 * new String[] { "label_status" }, value));
			 */
			// 将更新写入到硬盘的索引中
			// sphinxClient.FlushAttributes();
			System.out.println(m.getAttrValues().get(3).toString());
			// 得到所到记录Id
			/*
			 * if (!idList.contains(m.getDocId())) {
			 * out.write(String.valueOf(m.getDocId())); out.write("\n"); }
			 */

		}
		// out.flush();
		// out.close();

		System.out.println(sphinxResult.getMatches().length);

	}

	public static void output(List<AnalysisSentence> list) throws IOException {
		OutputStreamWriter out = new OutputStreamWriter(new FileOutputStream("G:\\sentence2.txt"), "UTF-8");
		for (AnalysisSentence a : list) {
			out.write(a.getSentence());
			out.write("\n");
		}
		out.flush();
		out.close();
	}

}
