package com.walker.service.meta;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.WildcardQueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;

import com.walker.util.ESUtils;

import net.sf.json.JSONObject;

/**
 * 全文检索，实现多字段模糊匹配查询
 * @author gaolei
 *
 */

@Component
public class AssetCatalogueFullSearchServ {
	
	@Autowired
	@Qualifier("dmJdbcTemplate")
	private JdbcTemplate gJdbc;
	
	@Autowired
	ESUtils es;
	
	@Value("${es.host}")
	private String esHost;
	
	@Value("${es.port}")
	private String esPort;
	
	@Value("${es.user}")
	private String esUser;
	
	@Value("${es.passwd}")
	private String esPasswd;
	
	private RestHighLevelClient client;
	private static String route;
	
	   static{
//		   System.err.println(esHost);
//		   client = es.initSession(esHost, Integer.parseInt(esPort), esUser, esPasswd);
		   route = "_pri";
	   }
	   
	   private void getESSession() {
		   if (null == client) {
			   client = es.initSession(esHost, Integer.parseInt(esPort), esUser, esPasswd);
//			   client= ESUtils.initSession("172.30.132.191", 29200, "elastic", "o4DUrtqarWqZg289XHYG");
		   }
	   }
	
	   public void tableColumnAddEs() throws Exception {
		   try {
				  List<Map<String, Object>> tableList = gJdbc.queryForList("select 'table' as object_type,db_type,table_id as object_id,table_code as object_code, table_name as object_name,DATE_FORMAT(sync_time, \"%Y-%m-%d %H:%i:%s\") as uptime "
					   		+ "from t_asset_meta_tab_collect");
					  addIndexDocument(tableList, "table");
					  
					  List<Map<String, Object>> columnList = gJdbc.queryForList("select 'column' as object_type,c.db_type,c.column_id as object_id,c.column_code as object_code,c.column_name as object_name,DATE_FORMAT(c.sync_time, \"%Y-%m-%d %H:%i:%s\") as uptime "
					  		+ "from t_asset_meta_col_collect c,t_asset_meta_tab_collect t "
					  		+ "where c.table_id = t.table_id");
					  addIndexDocument(columnList, "column");
					  
					  //异步写入回滚库,修改使用主索引
					  if (syncSegmentFullSearch()) {
						  route = "_pri";
					  }
		} catch (Exception e) {
			// TODO: handle exception
		} finally {
			client.close();
		}

	   }
	   
	   public boolean addIndexDocument(List<Map<String,Object>> dataList,String object_type) throws Exception{
//		   Map<String,Object> rowMap = new HashMap<String, Object>();
//		   dataList = new ArrayList<>();
//		   rowMap.put("object_type", "datasource");
//		   rowMap.put("object_id", "bc169ff114ac415fac0686664c2efd81");
//		   rowMap.put("object_code", "hive");
//		   rowMap.put("obejct_name", "191集群hive数据源");
//		   rowMap.put("uptime","2023-04-26 17:00:02");
//		   dataList.add(rowMap);
//		   rowMap = new HashMap<String, Object>();
//		   rowMap.put("object_type", "datasource");
//		   rowMap.put("object_id", "bc169ff114ac415fac0686664c23333");
//		   rowMap.put("object_code", "hive");
//		   rowMap.put("obejct_name", "192集群hive数据源");
//		   rowMap.put("uptime","2023-04-27 17:00:02");
//		   dataList.add(rowMap);
		   
		 try {
			 route = "_seg";
			 getESSession();
			 System.out.println(client);
			 if (!multipleDeleteDocument(object_type)) {
			 	throw new Exception("写入ES错误-写入[" + object_type + "]前,清理历史数据环节错误");
			 }
			 
			 multipleAddIndexDocument(dataList, object_type);
			 return true;
		  } catch (Exception e) {
			  e.printStackTrace();
				throw new Exception(e.getMessage());
		  } 
	   }
	   
	public boolean multipleAddIndexDocument(List<Map<String,Object>> dataList,String object_type) throws Exception{
		   Map<String,Object> taskMap = new HashMap<String, Object>();
	       BulkRequest request = new BulkRequest();
	       BulkRequest taskRequest = new BulkRequest();
	       StringBuffer idBuffer = new StringBuffer();
	       for (Map<String,Object> map : dataList) {
	    	   try {
		    	   request.add(new IndexRequest("meta_full_search").id(map.get("object_id").toString())
		    			   .source(map));
		    	   request.add(new UpdateRequest("meta_full_search",map.get("object_id").toString())
		    			   .doc(map));
		    	   
		    	   idBuffer.append(map.get("object_id").toString()).append(",");
			} catch (Exception e) {
				e.printStackTrace();
				System.err.println(map);
			}

	       }
	       idBuffer = idBuffer.length()>0? idBuffer.deleteCharAt(idBuffer.length()-1):idBuffer;
	       
	       //保留插入id进任务控制表
	       taskMap.put("object_ids", String.valueOf(idBuffer));
	       taskMap.put("uptime", new Date());
	       taskRequest.add(new IndexRequest("meta_full_task").id(object_type)
				   .source(taskMap));
	       taskRequest.add(new UpdateRequest("meta_full_task",object_type)
				   .doc(taskMap));
	       BulkResponse taskResponse = client.bulk(taskRequest,RequestOptions.DEFAULT);
	       
	       if (taskResponse.hasFailures()) { 
	    	   System.err.println(taskResponse.buildFailureMessage());
	    	   throw new Exception("写入ES错误-写入[" + object_type + "]前,保留写入任务环节错误");
	       }
	       
	       BulkResponse bulkResponse = client.bulk(request,RequestOptions.DEFAULT);
	       if (bulkResponse.hasFailures()) { 
	    	   System.err.println(bulkResponse.buildFailureMessage());
	    	   throw new Exception("写入ES错误-写入[" + object_type + "]错误");
	       }
	       return true;
	   }
	
	   public boolean multipleDeleteDocument(String index, String[] ids) throws IOException{
		   BulkRequest request = new BulkRequest();
		   for (int i=0; i<ids.length; i++) {
			   request.add(new DeleteRequest().index(index).id(ids[i]));
		   }
		   
	       BulkResponse response = client.bulk(request, RequestOptions.DEFAULT);
		   if (response.hasFailures()) { 
			   System.err.println(response.buildFailureMessage());
	    	   return false;
	       }
		   return true;
	   }
	   
	   public boolean multipleDeleteDocument(String object_type) throws IOException{
		   try {
			   GetRequest getRequest = new GetRequest(
				        "meta_full_task", 
				        object_type);
			   GetResponse getResponse = client.get(getRequest, RequestOptions.DEFAULT);
			   if (getResponse.isSourceEmpty()) {
				   return true;
			   }
			   Object ids = getResponse.getSource().get("object_ids");
			   if (null != ids && !ids.equals("")) {
	 			  return multipleDeleteDocument("meta_full_search", ids.toString().split(","));
	 		   }
		       return true;
		   } catch (Exception e) {
			   e.printStackTrace();
			   return false;
		   }
	   }
	   
	   @SuppressWarnings({ "unchecked" })
	   public List<Object> getClassDocument(String index,String keyword,int size) throws Exception{
		   getESSession();
		   List<Object> rowList = new ArrayList<>();
		   try {
			   List<SearchHits> hitsList = new ArrayList<SearchHits>();
			   SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
	           BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
//	           //数据库类型、表code、字段code、api地址等使用通配符,分值大于对象描述分值
	           String[] kw = keyword.split(" ");
	           for (int i=0; i<kw.length; i++) {
	        	   if ("tidb hive mysql hive hbase clickhouse ceph s3 elasticsearch es gbase oracle redis".contains(kw[i].trim().toLowerCase())) {
	        		   boolQueryBuilder.should(new WildcardQueryBuilder("db_type", "*" + kw[i].trim() + "*"));
	        	   }
	        	   else if (kw[i].trim().matches("^[a-zA-Z_/]*")) {
	        		   boolQueryBuilder.should(new WildcardQueryBuilder("object_code", "*" + kw[i].trim() + "*"));
	        	   }
	           }
////	         WildcardQueryBuilder与multiMatchQuery 在boolQueryBuilder中or方式不好使
////	           boolQueryBuilder.should(QueryBuilders.multiMatchQuery(keyword, "object_name").type("best_fields"));
	           
	           HighlightBuilder highlightBuilder=new HighlightBuilder();
	           highlightBuilder.preTags("<span style='color:red'>").postTags("</span>");
	           highlightBuilder.field("object_code").field("db_type");
	
	           searchSourceBuilder.query(boolQueryBuilder);
	           searchSourceBuilder.highlighter(highlightBuilder);
	
		   SearchRequest request = new SearchRequest(index);
		   request.source(searchSourceBuilder);
		   request.source().size(size);

		   SearchResponse response = client.search(request, RequestOptions.DEFAULT);
		   System.out.println(response);
		   SearchHits hits = response.getHits();
		   hitsList.add(hits);
		   
		   //对象描述等都使用分词匹配 .analyzer("ik_max_word")
		   request.source().query(QueryBuilders.multiMatchQuery(keyword, "db_type","object_code", "object_name").type("best_fields")).highlighter(new HighlightBuilder().field("object_code").field("object_name").field("db_type").preTags("<span style='color:red'>").postTags("</span>"));
		   request.source().size(size);
		   response = client.search(request, RequestOptions.DEFAULT);
		   hits = response.getHits();
		   hitsList.add(hits);
		   
		   int projectRecords=0,datasourceRecords =0,tableRecords=0,columnRecords=0,apiRecords=0,quotaRecords=0,functionRecords=0;
		   List<Map<String, Object>> projectList = new ArrayList<>();
		   List<Map<String, Object>> datasourceList = new ArrayList<>();
		   List<Map<String, Object>> tableList = new ArrayList<>();
		   List<Map<String, Object>> columnList = new ArrayList<>();
		   List<Map<String, Object>> apiList = new ArrayList<>();
		   List<Map<String, Object>> quotaList = new ArrayList<>();
		   List<Map<String, Object>> functionList = new ArrayList<>();
		   Map<String, Object> projectMap = new HashMap<>();
		   Map<String, Object> datasourceMap = new HashMap<>();
		   Map<String, Object> tableMap = new HashMap<>();
		   Map<String, Object> columnMap = new HashMap<>();
		   Map<String, Object> apiMap = new HashMap<>();
		   Map<String, Object> quotaMap = new HashMap<>();
		   Map<String, Object> functionMap = new HashMap<>();
		   
		   for (SearchHits shits : hitsList) {
			   Iterator<SearchHit> it = shits.iterator();
			   while (it.hasNext()) {
				   SearchHit hit = it.next();
				   Map<String, Object> hitMap = JSONObject.fromObject(hit.toString());
				   hitMap.remove("_index");
				   hitMap.remove("_type");
				   Map<String, Object> sourceMap = (Map<String, Object>) hitMap.get("_source");
				   switch (String.valueOf(sourceMap.get("object_type"))) {
				   case "project":
					   projectList.add(hitMap);
					   projectRecords ++;
					   break;
				   case "datasource":
					   datasourceRecords ++;
					   datasourceList.add(hitMap);
					   break;
				   case "table":
					   tableRecords ++;
					   tableList.add(hitMap);
					   break;
				   case "column":
					   columnRecords ++;
					   columnList.add(hitMap);
					   break;
				   case "api":
					   apiRecords ++;
					   apiList.add(hitMap);
					   break;
				   case "quota":
					   quotaRecords ++;
					   quotaList.add(hitMap);
					   break;
				   case "function":
					   functionRecords ++;
					   functionList.add(hitMap);
					   break;
				   default:
					   break;
				   }
			   }
		   }
		   projectMap.put("project", projectList);
		   projectMap.put("records", projectRecords);
		   datasourceMap.put("datasource", datasourceList);
		   datasourceMap.put("records", datasourceRecords);
		   tableMap.put("table", tableList);
		   tableMap.put("records", tableRecords);
		   columnMap.put("column", columnList);
		   columnMap.put("records", columnRecords);
		   apiMap.put("api", apiList);
		   apiMap.put("records", apiRecords);
		   quotaMap.put("quota", quotaList);
		   quotaMap.put("records", quotaRecords);
		   functionMap.put("function", functionList);
		   functionMap.put("records", functionRecords);
		   rowList.add(projectMap);
		   rowList.add(datasourceMap);
		   rowList.add(tableMap);
		   rowList.add(columnMap);
		   rowList.add(apiMap);
		   rowList.add(quotaMap);
		   rowList.add(functionMap);
		   JSONObject json = new JSONObject();
	       json.put("data", rowList);
	       System.err.println(json);
//	       rowList.forEach(System.err::println);
		   } catch (Exception e) {
			   e.printStackTrace();
				throw new Exception(e.getMessage());
			} finally {
//				client.close();
			}
	       return rowList;
	   }
	   
	   @SuppressWarnings("unchecked")
	   private List<Object> getIndexDocument(String index,String keyword,int size) throws Exception{
		   List<Object> rowList = new ArrayList<>();
		   SearchRequest request = new SearchRequest(index);
		   request.source().query(QueryBuilders.multiMatchQuery(keyword, "db_type","object_code", "object_name").type("best_fields")).highlighter(new HighlightBuilder().field("object_code").field("object_name").field("db_type").preTags("<span style='color:red'>").postTags("</span>"));
		   request.source().size(size);
		   try {
		   getESSession();
		   SearchResponse response = client.search(request, RequestOptions.DEFAULT);
		   SearchHits hits = response.getHits();
		   Iterator<SearchHit> it = hits.iterator();
		   int projectRecords = 0,datasourceRecords = 0,tableRecords=0,columnRecords=0,apiRecords=0,quotaRecords=0,functionRecords=0;
		   List<Map<String, Object>> projectList = new ArrayList<>();
		   List<Map<String, Object>> datasourceList = new ArrayList<>();
		   List<Map<String, Object>> tableList = new ArrayList<>();
		   List<Map<String, Object>> columnList = new ArrayList<>();
		   List<Map<String, Object>> apiList = new ArrayList<>();
		   List<Map<String, Object>> quotaList = new ArrayList<>();
		   List<Map<String, Object>> functionList = new ArrayList<>();
		   Map<String, Object> projectMap = new HashMap<>();
		   Map<String, Object> datasourceMap = new HashMap<>();
		   Map<String, Object> tableMap = new HashMap<>();
		   Map<String, Object> columnMap = new HashMap<>();
		   Map<String, Object> apiMap = new HashMap<>();
		   Map<String, Object> quotaMap = new HashMap<>();
		   Map<String, Object> functionMap = new HashMap<>();
		   while (it.hasNext()) {
			   SearchHit hit = it.next();
			   Map<String, Object> hitMap = JSONObject.fromObject(hit.toString());
			   hitMap.remove("_index");
			   hitMap.remove("_type");
			   Map<String, Object> sourceMap = (Map<String, Object>) hitMap.get("_source");
			   switch (String.valueOf(sourceMap.get("object_type"))) {
			   case "project":
				   projectList.add(hitMap);
				   projectRecords ++;
				   break;
			   case "datasource":
				   datasourceRecords ++;
				   datasourceList.add(hitMap);
				   break;
			   case "table":
				   tableRecords ++;
				   tableList.add(hitMap);
				   break;
			   case "column":
				   columnRecords ++;
				   columnList.add(hitMap);
				   break;
			   case "api":
				   apiRecords ++;
				   apiList.add(hitMap);
				   break;
			   case "quota":
				   quotaRecords ++;
				   quotaList.add(hitMap);
				   break;
			   case "function":
				   functionRecords ++;
				   functionList.add(hitMap);
				   break;
			   default:
				   break;
			   }
			   
		   }
		   projectMap.put("project", projectList);
		   projectMap.put("records", projectRecords);
		   datasourceMap.put("datasource", datasourceList);
		   datasourceMap.put("records", datasourceRecords);
		   tableMap.put("table", tableList);
		   tableMap.put("records", tableRecords);
		   columnMap.put("column", columnList);
		   columnMap.put("records", columnRecords);
		   apiMap.put("api", apiList);
		   apiMap.put("records", apiRecords);
		   quotaMap.put("quota", quotaList);
		   quotaMap.put("records", quotaRecords);
		   functionMap.put("function", functionList);
		   functionMap.put("records", functionRecords);
		   rowList.add(projectMap);
		   rowList.add(datasourceMap);
		   rowList.add(tableMap);
		   rowList.add(columnMap);
		   rowList.add(apiMap);
		   rowList.add(quotaMap);
		   rowList.add(functionMap);
		   JSONObject json = new JSONObject();
	       json.put("data", rowList);
	       System.err.println(json);
//	       rowList.forEach(System.err::println);
		   } catch (Exception e) {
				throw new Exception(e.getMessage());
			} finally {
//				client.close();
			}
	       return rowList;
	   }
	   
	   public List<Object> getIndexDocument(String keyword) throws Exception{
		   int size = 10;
		   //数据同步期间，使用回滚段
		   String index = route.equals("_pri")? "meta_full_search":"meta_full_search_seg";
		   return getIndexDocument(index,keyword,size);
	   }
	   
	   /**
	    * 异步写入回滚段库
	 * @throws Exception 
	    */
	   @Async
	   public boolean syncSegmentFullSearch() throws Exception {
		   try {
			   SearchRequest request = new SearchRequest("meta_full_search");
			   request.source().trackTotalHits(true);
//			   request.source().size(8000);
//			   trackTotalHits(true)
			   BulkRequest bulkSeg = new BulkRequest();
			   getESSession();
			   SearchResponse response = client.search(request, RequestOptions.DEFAULT);
			   SearchHits hits = response.getHits();
			   //缺省10条，trackTotalHits不起作用下，重新设置条数，再查询一次
			   request.source().size(new Long(hits.getTotalHits().value).intValue());
			   response = client.search(request, RequestOptions.DEFAULT);
			   hits = response.getHits();
			   Iterator<SearchHit> it = hits.iterator();
			   while (it.hasNext()) {
				   SearchHit hit = it.next();
				   Map<String, Object> sourceMap = hit.getSourceAsMap();
				   bulkSeg.add(new IndexRequest("meta_full_search_seg").id(hit.getId())
		    			   .source(sourceMap));
			   }
			   BulkResponse bulkResponse = client.bulk(bulkSeg,RequestOptions.DEFAULT);
		       if (bulkResponse.hasFailures()) {
		    	   System.err.println(bulkResponse.buildFailureMessage());
		    	   throw new Exception("ES错误-同步[meta_full_search_seg]错误");
		       }
		   } catch (Exception e) {
				throw new Exception(e.getMessage());
			} finally {
//				close();
			}
		   return true;
	   }
	   
	   /**
	    * 同步标准、标准集到分词库
	    */
	   public void syncStdDic(){
		   
	   }
}
