package com.seven.search;

import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Map;
import java.util.concurrent.ExecutionException;

import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * @类名: ElasticsearchHandler
 * @描述: es的基本的操作和实现
 * @作者: zfh
 * @时间: 2016年12月29日 下午5:31:25
 */
public class ElasticsearchHandler {

	private static final Logger logger = LoggerFactory.getLogger(ElasticsearchHandler.class);

	/**
	 * @方法名: addNode
	 * @描述: 增加节点信息
	 * @param ipAddress
	 *            ip地址
	 * @param port
	 *            端口号
	 * @return void
	 */
	public void addNode(String ipAddress, int port) {

		try {
			ElasticsearchFactory.getInstance()
					.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("192.168.88.244"), 9300));
		} catch (UnknownHostException e) {
			e.printStackTrace();
			logger.error("add node error {}", e.getMessage());
		}

	}

	public void closeSearch() {

		ElasticsearchFactory.getInstance().close();

	}

	/**
	 * @方法名: removeNode
	 * @描述: 移除节点信息
	 * @param ipAddress
	 *            Ip地址
	 * @param port
	 *            void 端口号
	 */
	public void removeNode(String ipAddress, int port) {
		try {
			ElasticsearchFactory.getInstance().removeTransportAddress(
					new InetSocketTransportAddress(InetAddress.getByName("192.168.88.244"), 9300));
		} catch (UnknownHostException e) {
			e.printStackTrace();
			logger.error("remove node error {}", e.getMessage());
		}
	}

	/**
	 * 
	 * @方法名: addIndex
	 * @描述: 进行添加索引的处理
	 * @param index
	 *            索引
	 * @param type
	 *            类型
	 * @param data
	 *            数据
	 * @return boolean 返回是否创建成功
	 */
	public boolean addIndex(String index, String type, String data) {
		TransportClient client = ElasticsearchFactory.getInstance();
		IndexResponse response = client.prepareIndex(index, type).setSource(data).get();
		if (response.status() == RestStatus.CREATED)
			return true;
		return false;
	}
	//ik_max_word更多的用在做索引的时候，但是在搜索的时候
	//ik_smart更加常用语对于输入词的分析，用户用户的查询
	//ik_max_word分词相对来说效率更加迅速，而ik_smart的效率比不上ik_max_word(个人做索引的时候将两种分词器进行尝试得出的结果，有误的话，望指正)
	//最终发现三种方法来指定分词器
	//进行IK分词，拼音分词，分页查询和搜索的的实现
	/*
	 * analyzer 定义索引和搜索分析器的名称
	 * 
	 * 
	 * **/
	public XContentBuilder  addIkAnalysizMapping(String indexType){
		
		XContentBuilder mapping = null;
		  try {  
            mapping = XContentFactory.jsonBuilder()
            		.startObject()  
                    // 索引库名（类似数据库中的表)
			            .startObject(indexType)
				            .startObject("properties")
				            //分词器的使用
						        .startObject("product_name").field("type", "text").field("analyzer","ik").field("search_analyzer","ik_smart").endObject()
						        
						        .startObject("title_sub").field("type", "text").field("analyzer","ik").field("search_analyzer","ik_smart").endObject()  
						        
						        .startObject("title_primary").field("type", "text").field("analyzer","ik").field("search_analyzer","ik_smart").endObject()  
						        
						        .startObject("publisher").field("type", "text").field("analyzer","ik").field("search_analyzer","ik_smart").endObject()
						        
						        .startObject("author_name").field("type", "text").field("analyzer","ik").field("search_analyzer","ik_smart").endObject()  
				            
				            .endObject()
			            .endObject()
			         .endObject();  
	        } catch (IOException e) {  
	            e.printStackTrace();  
	        }  
	        return mapping;  
	}

	/**
	 * @方法名: searIndexData
	 * @描述: 讲索引转化为Hits的结果数据返回 创建单个的index的KeyWorkd的Fileld的查询的时候只需调用
	 *      QueryBuilders.termQuery("title", "hibernate")
	 * @param index
	 * @param type
	 * @param keyWorld
	 * @param fields
	 * @return SearchHits
	 */
	public SearchHits searIndexData(String index, String type, String keyWorld, String... fields) {

		TransportClient client = ElasticsearchFactory.getInstance();
		QueryBuilder bulider = QueryBuilders.multiMatchQuery(keyWorld, fields);

		SearchResponse response = client.prepareSearch(index).setTypes(type).setQuery(bulider).setFetchSource(true)
				.clearRescorers().setRequestCache(false).execute().actionGet();
		SearchHits hits = response.getHits();

		if (hits.totalHits() > 0)
			return hits;
		return null;

	}

	/**
	 * @方法名: updateIndexData
	 * @描述: 安装map的形式进行更新字段的方法
	 * @param index
	 * @param type
	 * @param id
	 * @param map
	 *            void
	 * @throws IOException
	 */
	public void updateIndexData(String index, String type, String id, Map<String, Object> map) throws IOException {

		TransportClient client = ElasticsearchFactory.getInstance();

		UpdateRequest updateRequest = new UpdateRequest();

		updateRequest.index(index).type(type).id(id).doc(map);

		try {
			client.update(updateRequest).get().getGetResult();
			BulkRequestBuilder bulkRequest = client.prepareBulk().setRefreshPolicy(RefreshPolicy.IMMEDIATE);
			bulkRequest.add(updateRequest);
			bulkRequest.execute().actionGet();
		} catch (InterruptedException e) {
			e.printStackTrace();
			logger.error("update node error {}", e.getMessage());
		} catch (ExecutionException e) {
			e.printStackTrace();
			logger.error("update node error {}", e.getMessage());
		}

	}

	/**
	 * @方法名: deleteIndex
	 * @描述: 删除索引库文件的操作
	 * @param index
	 * @return boolean
	 */
	public boolean deleteIndex(String index) {
		if (!isExistIndex(index)) {
			logger.info(index + " not exists");

		} else {
			TransportClient client = ElasticsearchFactory.getInstance();

			DeleteIndexResponse dResponse = client.admin().indices().prepareDelete(index).execute().actionGet();

			if (dResponse.isAcknowledged()) {
				logger.info("delete index {}  successfully!", index);
			} else {
				logger.info("Fail to delete index {}", index);
				return false;
			}
		}
		return true;
	}

	/**
	 * @方法名: deleteIndexsType
	 * @描述: 进行删除index中的type中的一条记录
	 * @param index
	 * @param type
	 * @param doc
	 * @return boolean
	 */
	public boolean deleteIndexsType(String index, String type, String docId) {

		TransportClient client = ElasticsearchFactory.getInstance();

		DeleteResponse dResponse = client.prepareDelete(index, type, docId).execute().actionGet();
		if (dResponse.status() == RestStatus.OK) {
			logger.info("delete index {}  successfully!", index);
		} else {
			logger.info("Fail to delete index {} type {} docId " + docId, index, type);
			return false;
		}
		return true;
	}

	/**
	 * @方法名: isExistIndex
	 * @描述: 判断索引是否存在的操作
	 * @param index
	 * @return boolean
	 */
	public boolean isExistIndex(String index) {

		TransportClient client = ElasticsearchFactory.getInstance();

		IndicesExistsRequest inExistsRequest = new IndicesExistsRequest(index);

		IndicesExistsResponse inExistsResponse = client.admin().indices().exists(inExistsRequest).actionGet();
		if (inExistsResponse.isExists())
			return true;
		return false;
	}

	/**
	 * @方法名: searchIndexByHightHits
	 * @描述: 进行高亮显示和搜索 "git", "title","content" for(SearchHit hit:searchHits){
	 *      System.out.println("String方式打印文档搜索内容:");
	 *      System.out.println(hit.getSourceAsString());
	 *      System.out.println("Map方式打印高亮内容");
	 *      System.out.println(hit.getHighlightFields());
	 * 
	 *      System.out.println("遍历高亮集合，打印高亮片段:"); hit.getHighlightFields().
	 *      Text[] text = hit.getHighlightFields().get("title").getFragments();
	 *      for (Text str : text) { System.out.println(str.string()); } Text[]
	 *      text1 = hit.getHighlightFields().get("content").getFragments(); for
	 *      (Text str : text1) { System.out.println(str.string()); } }
	 * @param keywords
	 * @param fields
	 * @return SearchHits
	 */
	public SearchHits searchIndexByHightHits(String keywords, String... fields) {

		TransportClient client = ElasticsearchFactory.getInstance();

		QueryBuilder matcher = QueryBuilders.multiMatchQuery(keywords, fields);

		HighlightBuilder hiBuilder = new HighlightBuilder();
		hiBuilder.preTags("<h2>");
		hiBuilder.postTags("</h2>");
		hiBuilder.field("title");
		hiBuilder.field("content");
		SearchResponse response = client.prepareSearch("blog").setQuery(matcher).highlighter(hiBuilder).execute()
				.actionGet();
		SearchHits searchHits = response.getHits();

		logger.info("共搜到:" + searchHits.getTotalHits() + "条结果!");

		return searchHits;
	}

}
