package com.nwpu.bigdata.dao.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.nwpu.bigdata.dao.EsLogDao;
import com.nwpu.bigdata.model.ExceptLog;
import com.nwpu.bigdata.model.FaultKeyExtend;
import org.apache.commons.lang.builder.ReflectionToStringBuilder;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.main.MainResponse;
import org.elasticsearch.action.search.*;
import org.elasticsearch.action.support.replication.ReplicationResponse;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.threadpool.ThreadPool;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.ImportResource;
import org.springframework.stereotype.Repository;

import java.io.IOException;
import java.net.ConnectException;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;

/**
 * @author carlin
 */
@Repository
@ImportResource(locations = {"classpath:/xmlConfig/spring/applicationContext-util.xml"})
public class EsLogDaoImpl implements EsLogDao {
	/**
	 * 超时时间间隔
	 */
	private static final int DURATION = 60;
	private static final int INITIAL_CAPACITY = 16;
	private static Logger logger = LoggerFactory.getLogger(EsLogDaoImpl.class);

	private final Scroll scroll = new Scroll(TimeValue.timeValueMinutes(1L));
	private RestHighLevelClient restHighLevelClient;

	@Value("${es.collection.interval}")
	private String colInterval;

	@Autowired
	public void setRestHighLevelClient(RestHighLevelClient restHighLevelClient) {
		this.restHighLevelClient = restHighLevelClient;
	}

	@Override
	public void createIndex(String index, String type, ExceptLog exceptLog) {

		String logJson = JSONObject.toJSONString(exceptLog);
		IndexRequest indexRequest = new IndexRequest(index, type).source(logJson, XContentType.JSON)
				.opType(DocWriteRequest.OpType.CREATE);

		restHighLevelClient.indexAsync(indexRequest, new ActionListener<IndexResponse>() {

			@Override
			public void onResponse(IndexResponse arg0) {
				// Handle (if needed) the case where the document was created for the first time
				// Handle (if needed) the case where the document was rewritten as it was
				// already existing
				if (arg0.getResult() == DocWriteResponse.Result.CREATED) {
					logger.info("[index: {} type: {}  id:{}]  first insert successful! ", arg0.getIndex(),
							arg0.getType(), arg0.getId());
				} else if (arg0.getResult() == DocWriteResponse.Result.UPDATED) {
					logger.info("[index: {} type: {}  id:{}]  update insert successful! ", arg0.getIndex(),
							arg0.getType(), arg0.getId());
				}

				// Handle the situation where number of successful shards is less than total
				// shards
				ReplicationResponse.ShardInfo shardInfo = arg0.getShardInfo();
				if (shardInfo.getTotal() != shardInfo.getSuccessful()) {
					logger.info("[shards total: {} successful: {} failed:{}  ", shardInfo.getTotal(),
							shardInfo.getSuccessful(), shardInfo.getFailed());
				}

				if (shardInfo.getFailed() > 0) {
					for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) {
						logger.warn(failure.reason());
					}
				}

			}

			@Override
			public void onFailure(Exception arg0) {
				if (arg0 instanceof ElasticsearchException) {
					if (((ElasticsearchException) arg0).status() == RestStatus.CONFLICT) {
						logger.warn(arg0.getMessage());
					}
				} else {
					logger.error(arg0.getMessage());
				}
			}
		});
	}

	@Override
	public List<ExceptLog> searchLog(String index, String type, FaultKeyExtend faultKeyExtend, String[] includes,
			String[] excludes) {
		ArrayList<ExceptLog> exceptLogs = new ArrayList<>();
		Map<String, String[]> conditions = new ConcurrentHashMap<>(INITIAL_CAPACITY);
		String[] keywords = faultKeyExtend.getKeyword().split(",");
		conditions.put("log", keywords);

		SearchRequest searchRequest = new SearchRequest(index);
		searchRequest.types(type);
		searchRequest.scroll(scroll);
		SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
		searchSourceBuilder.size(100);
		searchSourceBuilder.query(createBoolQuery(index, type, conditions, null,
				new DateTime(System.currentTimeMillis()), Long.parseLong(colInterval)));
		searchSourceBuilder.fetchSource(includes, excludes);
		searchSourceBuilder.sort(new FieldSortBuilder("@timestamp").order(SortOrder.DESC));
		searchSourceBuilder.timeout(new TimeValue(DURATION, TimeUnit.SECONDS));
		searchRequest.source(searchSourceBuilder);

		try {
			SearchResponse searchResponse = restHighLevelClient.search(searchRequest);
			String scrollId = searchResponse.getScrollId();
			SearchHit[] searchHits;
			SearchHits hits;
			do {
				hits = searchResponse.getHits();
				searchHits = hits.getHits();
				long totalHits = hits.getTotalHits();
				logger.debug("total hits is {}", hits.getTotalHits());
				if (searchHits == null || searchHits.length <= 0) {
					logger.debug("the spec search [{},{},{}] result is empy!", index, type,
							faultKeyExtend.getKeyword());
					break;
				}
				logger.debug(
						"the spec search [{},{},{}] result. total:{},  restStatus:{}, "
								+ "the time:{}, isTerminatedEarly:{}, isTimeOut:{}",
						index, type, faultKeyExtend.getKeyword(), totalHits, searchResponse.status().getStatus(),
						searchResponse.getTook().getSeconds(), searchResponse.isTerminatedEarly(),
						searchResponse.isTimedOut());

				int totalShards = searchResponse.getTotalShards();
				int successfulShards = searchResponse.getSuccessfulShards();
				int failedShards = searchResponse.getFailedShards();
				logger.debug(
						"the spec search [{},{},{}] result has faild shard. totalShard:{},"
								+ " successfulShard:{}, failedShard:{}.",
						index, type, faultKeyExtend, totalShards, successfulShards, failedShards);
				for (ShardSearchFailure failure : searchResponse.getShardFailures()) {
					logger.debug(
							"the spec search [{},{},{}] result has faild shard. failture Num: {}, the failure is :{}",
							index, type, faultKeyExtend, failure.index(), failure.reason());
				}

				getExceptLog(exceptLogs, searchHits, faultKeyExtend);

				SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
				scrollRequest.scroll(scroll);
				searchResponse = restHighLevelClient.searchScroll(scrollRequest);
				scrollId = searchResponse.getScrollId();
				searchHits = searchResponse.getHits().getHits();
			} while (searchHits != null && searchHits.length > 0);
			clearScrollRequest(scrollId);
		} catch (ConnectException e) {
			logger.error("ElasticSearch Cluster Connection Fail! Please Check the Connection!");
		} catch (IOException e) {
			e.printStackTrace();
		}
		return exceptLogs;
	}

	/**
	 * 获取日志信息转化为序列化对象
	 * 
	 * @param exceptLogs
	 *            存放日志信息的list
	 * @param searchHits
	 *            日志源信息
	 * @param faultKeyExtend
	 *            日志关键字信息，包括用户信息
	 */
	private void getExceptLog(ArrayList<ExceptLog> exceptLogs, SearchHit[] searchHits, FaultKeyExtend faultKeyExtend) {
		for (SearchHit hit : searchHits) {
			JSONObject jsonSource = JSON.parseObject(hit.getSourceAsString());
			JSONObject jsonKubernetes = jsonSource.getJSONObject("kubernetes");
			if (jsonKubernetes == null) {
				continue;
			}

			ExceptLog exceptLog = new ExceptLog();
			exceptLog.setContent(jsonSource.getString("log"));
			exceptLog.setHostip(jsonKubernetes.getString("host"));
			exceptLog.setLogtime(Date.from(Instant.from(DateTimeFormatter
					.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSSXXX").parse(jsonSource.getString("@timestamp")))));
			exceptLog.setSoftware(jsonKubernetes.getString("pod_name"));
			exceptLog.setProcessname(jsonKubernetes.getString("container_name"));

			exceptLog.setUser(faultKeyExtend.getUser().getUsername());
			exceptLog.setFaulttype(faultKeyExtend.getKeyword());
			exceptLogs.add(exceptLog);
		}
	}

	@Override
	public ExceptLog getSingleLog(String index, String type, String id, String[] includes, String[] excludes,
			FaultKeyExtend faultKeyExtend) {
		GetRequest request = new GetRequest(index, type, id);
		FetchSourceContext fetchSourceContext = new FetchSourceContext(true, includes, excludes);
		request.fetchSourceContext(fetchSourceContext);
		ExceptLog exceptLog = new ExceptLog();
		try {
			GetResponse getResponse = restHighLevelClient.get(request);
			JSONObject jsonSource = JSON.parseObject(getResponse.getSourceAsString());
			JSONObject jsonKubernetes = jsonSource.getJSONObject("kubernetes");
			exceptLog.setContent(jsonSource.getString("log"));
			exceptLog.setHostip(jsonKubernetes.getString("host"));
			exceptLog.setLogtime(jsonSource.getDate("@timestamp"));
			exceptLog.setSoftware(jsonKubernetes.getString("pod_name"));
			exceptLog.setProcessname(jsonKubernetes.getString("container_name"));
			exceptLog.setUser(faultKeyExtend.getUser().getUsername());
			exceptLog.setFaulttype(faultKeyExtend.getKeyword());
		} catch (IOException e1) {
			e1.printStackTrace();
		}
		return exceptLog;
	}

	@Override
	public void bulkCreateIndex(String index, String type, ExceptLog[] exceptLogs) {
		ThreadPool threadPool = new ThreadPool(Settings.EMPTY);
		BulkProcessor.Listener listener = new BulkProcessor.Listener() {

			@Override
			public void beforeBulk(long executionId, BulkRequest request) {
				int numberOfActions = request.numberOfActions();
				logger.info("Executing bulk [{}] with {} requests", executionId, numberOfActions);
			}

			@Override
			public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
				logger.error("Failed to execute bulk", failure);
			}

			@Override
			public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
				if (response.hasFailures()) {
					logger.warn("Bulk [{}] executed with failures", executionId);
				} else {
					logger.info("Bulk [{}] completed in {} milliseconds", executionId, response.getTook().getMillis());
				}
			}
		};
		BulkProcessor bulkProcessor = BulkProcessor.builder(restHighLevelClient::bulkAsync, listener)
				.setBulkActions(500).build();

		for (ExceptLog exceptLog : exceptLogs) {
			bulkProcessor
					.add(new IndexRequest(index, type).source(JSONObject.toJSONString(exceptLog), XContentType.JSON));
		}

		try {
			boolean terminated = bulkProcessor.awaitClose(30L, TimeUnit.SECONDS);
			if (terminated) {
				logger.info("All Bulk  executed with successful!");
			}
		} catch (InterruptedException e) {
			logger.error(e.getMessage());
		}

	}

	private void clearScrollRequest(String scrollId) {
		ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
		clearScrollRequest.addScrollId(scrollId);
		restHighLevelClient.clearScrollAsync(clearScrollRequest, new ActionListener<ClearScrollResponse>() {
			@Override
			public void onResponse(ClearScrollResponse response) {
				if (response.isSucceeded()) {
					logger.info("clear scroll sucessful ! the clear num is : {}", response.getNumFreed());
				} else {
					logger.info("clear scroll fail ! ");
				}
			}

			@Override
			public void onFailure(Exception e) {
				if (e instanceof ElasticsearchException) {
					if (((ElasticsearchException) e).status() == RestStatus.NOT_FOUND) {
						logger.warn(e.getMessage());
					}
				} else {
					logger.error(e.getMessage());
				}
			}

		});
	}

	@Override
	public void clusterInfo() {
		try {
			MainResponse response = restHighLevelClient.info();
			ClusterName clusterName = response.getClusterName();
			String clusterUuid = response.getClusterUuid();
			String nodeName = response.getNodeName();
			Version version = response.getVersion();
			logger.info("cluster name: {},  cluster uuid: {} node name: {} version: {}", clusterName.value(),
					clusterUuid, nodeName, version);
		} catch (IOException e) {
			logger.error(e.getMessage());
		}

	}

	/**
	 * @param index
	 * @param type
	 * @param startTime
	 * @param endTime
	 * @param interval
	 * @return
	 */
	private BoolQueryBuilder createBoolQuery(String index, String type, Map<String, String[]> conditions,
			DateTime startTime, DateTime endTime, long interval) {
		if (startTime == null) {
			startTime = new DateTime(endTime.getMillis() - interval);
		}
		logger.info("current Time : {}", System.currentTimeMillis());
		logger.info("start Time : {}", startTime.getMillis());
		logger.info("end Time : {}", endTime.getMillis());
		BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
		for (Entry<String, String[]> mapEntry : conditions.entrySet()) {
			boolQueryBuilder.must(QueryBuilders.termsQuery(mapEntry.getKey(), mapEntry.getValue()));
		}
		boolQueryBuilder.filter(QueryBuilders.rangeQuery("@timestamp").from(startTime).to(endTime));
		return boolQueryBuilder;
	}

	@Override
	public List<ExceptLog> searchLogByUser(String index, String type, Map<String, String[]> conditions,
			DateTime startTime, DateTime endTime, FaultKeyExtend faultKeyExtend) {
		ArrayList<ExceptLog> exceptLogs = new ArrayList<>();

		SearchRequest searchRequest = new SearchRequest();
		searchRequest.scroll(scroll);
		SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
		searchSourceBuilder.size(500);
		searchSourceBuilder.query(createBoolQuery(index, type, conditions, startTime, endTime, 0L));
		searchRequest.source(searchSourceBuilder);

		try {
			SearchResponse searchResponse = restHighLevelClient.search(searchRequest);
			String scrollId = searchResponse.getScrollId();
			SearchHit[] searchHits;
			do {
				searchHits = searchResponse.getHits().getHits();

				if (searchHits == null || searchHits.length <= 0) {
					logger.info("the spec search [{},{}] result is empy!", index, type,ReflectionToStringBuilder.toString(conditions));
					break;
				}

				getExceptLog(exceptLogs, searchHits, faultKeyExtend);

				SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
				scrollRequest.scroll(scroll);
				searchResponse = restHighLevelClient.searchScroll(scrollRequest);
				scrollId = searchResponse.getScrollId();
				searchHits = searchResponse.getHits().getHits();
			} while (searchHits != null && searchHits.length > 0);
			clearScrollRequest(scrollId);
		} catch (IOException e) {
			logger.error(e.getMessage());
		}

		return exceptLogs;
	}

}
