package esdemo;

import java.net.InetAddress;
import java.net.UnknownHostException;

import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.junit.Before;
import org.junit.Test;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.sort.SortParseElement;

import static org.elasticsearch.index.query.QueryBuilders.*;

public class SearchApi {
	private TransportClient client;

	@Before
	public void init() throws UnknownHostException {
		client = TransportClient.builder().build()
				.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("localhost"), 9300));
	}

	/**
	 * The body of the search request is built using the SearchSourceBuilder
	 */
	@Test
	public void test01() {
		SearchResponse response = client.prepareSearch("twitter", "index").setTypes("tweet", "type")
				.setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(QueryBuilders.termQuery("user", "kimchy2")) // Query
				// .setPostFilter(QueryBuilders.rangeQuery("age").from(12).to(18))
				// // Filter
				.setFrom(0).setSize(60).setExplain(true).execute().actionGet();

		System.out.println(response);
	}

	/**
	 * 缓存查询到的结果，滚动查询
	 */
	@Test
	public void usingSrcolls() {
		QueryBuilder qb = termQuery("user", "kimchy2");
		SearchResponse scrollResp = client.prepareSearch("twitter")
				// sort
				.addSort(SortParseElement.DOC_FIELD_NAME, SortOrder.ASC)
				// how long it should keep the search context alive
				.setScroll(new TimeValue(60000)).setQuery(qb)
				// hits per shard will be returned for each scroll
				.setSize(2).execute().actionGet(); // 100

		// Scroll until no hits are returned
		while (true) {
			System.out.println("scroll...............");
			for (SearchHit hit : scrollResp.getHits().getHits()) {
				// Handle the hit...
				System.out.println(hit.getSourceAsString());
			}
			scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(60000)).execute()
					.actionGet();
			// Break condition: No hits are returned
			if (scrollResp.getHits().getHits().length == 0) {
				break;
			}
		}
	}

	@Test
	public void multiSearch() {
		SearchRequestBuilder srb1 = client.prepareSearch().setQuery(QueryBuilders.queryStringQuery("name")).setSize(1);
		SearchRequestBuilder srb2 = client.prepareSearch().setQuery(QueryBuilders.matchQuery("name", "kimchy2"))
				.setSize(1);
		SearchRequestBuilder srb3 = client.prepareSearch().setQuery(termQuery("user", "kimchy2")).setSize(1);

		MultiSearchResponse sr = client.prepareMultiSearch().add(srb1).add(srb2).add(srb3).execute().actionGet();

		// You will get all individual responses from
		// MultiSearchResponse#getResponses()
		long nbHits = 0;
		for (MultiSearchResponse.Item item : sr.getResponses()) {
			SearchResponse response = item.getResponse();
			nbHits += response.getHits().getTotalHits();
			System.out.println(response);
		}
		System.out.println(nbHits);
	}

	/**
	 * 类似 sql 的group by
	 */
	@Test
	public void aggregations() {
		SearchResponse sr = client.prepareSearch().setQuery(QueryBuilders.matchAllQuery())
				.addAggregation(AggregationBuilders.terms("agg1").field("user"))
				.addAggregation(
						AggregationBuilders.dateHistogram("agg2").field("birth").interval(DateHistogramInterval.YEAR))
				.execute().actionGet();

		// Get your facet results
		Terms agg1 = sr.getAggregations().get("agg1");
		// Terms agg2 = sr.getAggregations().get("agg2");
		System.out.println(agg1);
	}

	/**
	 * 设置每个分片查询的最大可查询数<br>
	 * the maximum number of documents to collect for each shard,
	 * upon reaching which the query execution will terminate early. If set, you
	 * will be able to check if the operation terminated early by asking for
	 * isTerminatedEarly() in the SearchResponse onject:
	 */
	@Test
	public void terminateAfter() {
		//Finish after 1000 docs
		SearchResponse sr = client.prepareSearch("twitter")
			    .setTerminateAfter(1)    
			    .get();

			if (sr.isTerminatedEarly()) {
			    // We finished early
			}
		System.out.println(sr);
	}
}
