/*
 * Copyright 2012 The LoadTestAnalyzer Project
 *
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 *
 */

package org.yantsu.loadtestanalyzer.ltaimporter;

import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;

import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.support.replication.ReplicationType;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.node.Node;
import org.yantsu.loadtestanalyzer.ltaimporter.importers.Importer;
import org.yantsu.loadtestanalyzer.ltaimporter.importers.Importer.Data;


/**
 * Import a data file for a certain agent type into the analyzer database.
 * exit code:
 *   0: 	all fine
 *   255: 	illegal parameter
 *   2: 	an import for that agent type is currently running
 *   3: 	an datarecord with such a timestamp is already imported
 * 
 * TODO implement remove lock
 * 
 * @author cthiele
 */
public class Import {
	private static final String INDEX_NAME = "ltadata";
	private static final int BULK_SIZE = 4096; 
	
	private Node node;
	private Client client;
	private String agenttype; 
	
	private String numOfReplicas;
	
	private static class Range {
		public long start;
		public long end;
		
		public Range(long start, long end) {
			this.start = start;
			this.end = end;
		}
	}
	
	private Range[] ranges = new Range[0];
	
	/**
	 * @param args testname agenttype file [host [port [clustername]]]
	 */
	public static void main(String[] args) {
		if(args.length < 3) {
			System.out.println("Usage: java -jar ltaimporter.jar <testname> <agenttype> <file> [<host> [<port> [<clustername>]]]");
			System.exit(255);
		}
		String testname = args[0];
		String agenttype = args[1];
		String fileName = args[2];
		String host = "localhost";
		if(args.length > 3) {
			host = args[3];
		}

		int port = 9300;
		if(args.length > 4) {
			String portStr = args[4];
			try {
				port = Integer.valueOf(portStr);
				if(port < 1 || port > 65535) {
					System.err.println("Parameter port has to be a number between 1 and 65535, was " + portStr);
					System.exit(255);
				}
			} catch(NumberFormatException e) {
				System.err.println("Parameter port has to be a number between 1 and 65535, was " + portStr);
				System.err.println(e.getMessage());
				System.exit(255);
			}
		}

		String clustername = "elasticsearch";
		if(args.length > 5) {
			clustername = args[5];
		}
		
		Import importer = new Import(agenttype, host, port, clustername);
		try {
			importer.importData(testname, fileName);
		} catch(ConcurrentImportException e) {
			System.err.println(e.getMessage());
			System.exit(2);
		} catch(AlreadyImportedException e) {
			System.err.println(e.getMessage());
			System.exit(3);
		} 
		importer.close();
	}

	public Import(String agenttype,
			      String host,
			      int port,
			      String clustername) {
		
		node = nodeBuilder().clusterName(clustername).client(true).node();
		
		client = node.client();
		this.agenttype = agenttype;
	}
	
	private int writeBulk(BulkRequestBuilder bulkRequest) {
		bulkRequest.setReplicationType(ReplicationType.ASYNC);
		BulkResponse bulkResponse = bulkRequest.execute().actionGet();
		if (bulkResponse.hasFailures()) {
			throw new IllegalStateException("Writing data failed: " + bulkResponse.buildFailureMessage());
		}
		return 0;
	}
	
	private void importData(String testname, String fileName) {
		Importer importer = ImporterFactory.createImporter(agenttype);
		String type = "data_" + importer.getClass().getSimpleName();
		updateMapping(type, importer.getMapping());
		String id = getSeriesDocAndLock(testname);
		
		optimizeForBulk();
		
		InputStream in;
		
		long start = -1;
		long end = -1;
		try {
			if("-".equals(fileName)) {
				in = System.in;
			} else {
				in = new FileInputStream(fileName);
			}
			importer.initialize(testname, in);
			
			Data data = importer.next();
			start = data.timestamp;
			
			int bulkCount = 0;
			BulkRequestBuilder bulkRequest = client.prepareBulk();

			while(data != null) {
				XContentBuilder builder = jsonBuilder();
				builder.startObject();
				builder.field("timestamp", data.timestamp);
				builder.field("hostname", data.hostname);	
				builder.field("category", data.category);	
				builder.startObject("values");
				for (Iterator<Entry<String, Double>> i = data.values.entrySet().iterator(); i.hasNext();) {
					Entry<String, Double> entry = i.next();
					builder.field(entry.getKey(), entry.getValue());
				}
				builder.endObject();
				builder.endObject();
				builder.close();
				
				bulkRequest.add(client.prepareIndex(INDEX_NAME, type).setParent(id).setSource(builder));
				if(++bulkCount > BULK_SIZE) {
					bulkCount = writeBulk(bulkRequest);
					bulkRequest = client.prepareBulk();
				}
				
				end = data.timestamp;
				data = importer.next();
			}			
			if(bulkCount != 0) {
				writeBulk(bulkRequest);
			}
		} catch (FileNotFoundException e) {
			throw new IllegalStateException("Datafile not found: " + fileName);
		} catch (IOException e) {
			throw new IllegalStateException("Error writing json doc", e);
		} finally {
			revertBulkOptimization();
			finalizeSeriesDoc(testname, agenttype, start, end);
		}
	}
	
	@SuppressWarnings("unchecked")
	private void finalizeSeriesDoc(String testname, String agenttype, long start, long end) {
		GetResponse response = client.get(Requests.getRequest(INDEX_NAME).id(testname)).actionGet();
		if(response.isExists()) {
			response.getSource().put(agenttype + "_lock", false);
			
			Object ranges = response.getSource().get(agenttype + "_ranges");
			if(ranges == null) {
				ranges = new ArrayList<Map<String, Long>>(1);
				response.getSource().put(agenttype + "_ranges", ranges);
			}
 			try {
				@SuppressWarnings("rawtypes")
				List rangesList = (List)ranges;
				Map<String, Long> range = new HashMap<String, Long>(2);
				range.put("start", start);
				range.put("end", end);
				rangesList.add(range);
			} catch(ClassCastException e) {
				throw new IllegalStateException("Illegal Format of series doc - cannot finalize series data");
			}
			
 			//calculate new overall start timestamp
 			Object startObj = response.getSource().get("start");
 			long oldStart = Long.MAX_VALUE;
 			if(startObj != null) {
 				try {
 					oldStart = Long.parseLong(startObj.toString());
 					if(oldStart > start ) {
 						response.getSource().put("start", start);
 					}
 				} catch(NumberFormatException e) {
 					throw new IllegalStateException("Illegal Format of start param - cannot finalize series data", e);
 				}
 			} else {
 				response.getSource().put("start", start);
 			}
 			
			client.prepareIndex(INDEX_NAME, "series", testname)
					.setVersion(response.getVersion())
					.setSource(response.getSource()).execute().actionGet();
		} else {
			throw new IllegalStateException("Cannot finalize series data - doc not found");
		}
	}

	/**
	 * 
	 * @param testname
	 * @param agenttype
	 * @return doc id
	 */
	private String getSeriesDocAndLock(String testname) {
		GetResponse response = client.get(Requests.getRequest(INDEX_NAME).id(testname)).actionGet();
		if(response.isExists()) {
			Object lock = response.getSource().get(agenttype + "_lock");
			boolean locked = lock != null && Boolean.parseBoolean(lock.toString());
			if(!locked) {
				response.getSource().put(agenttype + "_lock", true);
				client.prepareIndex(INDEX_NAME, "series", testname)
						.setVersion(response.getVersion())
						.setSource(response.getSource()).execute().actionGet();
			} else {
				throw new ConcurrentImportException("An import for the agenttype " + agenttype + " is already runnning");
			}
		} else {
			XContentBuilder builder;
			try {
				builder = jsonBuilder();
				builder.startObject();
					builder.field(agenttype + "_lock", true);
				builder.endObject();
				builder.close();
			} catch (IOException e) {
				throw new IllegalStateException("Error writing json doc", e);
			}
			client.prepareIndex(INDEX_NAME, "series")
					.setSource(builder)
					.setCreate(true)
					.setId(testname).execute().actionGet();
		}
		
		//Get ranges already imported for later check for double import
		Object rangesList = null;
		if(response.getSource() != null) {
			rangesList = response.getSource().get(agenttype + "_ranges"); 
		}
		if(rangesList != null) {
			try {
				@SuppressWarnings("unchecked")
				List<Object> rangeArray = (List<Object>)rangesList;
				ranges = new Range[rangeArray.size()];
				for (int i = 0; i < ranges.length; i++) {
					try {
						@SuppressWarnings("rawtypes")
						Map rangeMap = (Map)rangeArray.get(i);
						Object startObj = rangeMap.get("start");
						Object endObj = rangeMap.get("end");
						if(startObj == null || endObj == null) {
							throw new IllegalStateException("Unable to read series data for test " + testname + " and agenttype " + agenttype + ": format error, missing start or end in range");
						}
						ranges[i] = new Range(Long.valueOf(startObj.toString()), Long.valueOf(endObj.toString()));
					} catch(ClassCastException e) {
						throw new IllegalStateException("Unable to read series data for test " + testname + " and agenttype " + agenttype + ": format error", e);
					}
				}
				
			} catch(ClassCastException e) {
				throw new IllegalStateException("Unable to read series data for test " + testname + " and agenttype " + agenttype + ": format error", e);
			}			
		}
		
		return testname;
	}

	private void updateMapping(String type, String mapping) {
		client.admin().indices().putMapping(Requests.putMappingRequest(INDEX_NAME).type(type).source(mapping)).actionGet();
	}

	public void close() {
		if(client != null) {
			client.close();
		}
		if(node != null) {
			node.close();
		}
	}
	
	private void optimizeForBulk() {
		numOfReplicas = client.admin().cluster().prepareState().execute().actionGet().getState().getMetaData().index(INDEX_NAME).getSettings().get("number_of_replicas");
		Map<String, Object> settings = new HashMap<String, Object>();
		settings.put("refresh_interval", "-1");
		settings.put("number_of_replicas", "0");		
		client.admin().indices().prepareUpdateSettings(INDEX_NAME).setSettings(settings).execute().actionGet();
	}
	
	private void revertBulkOptimization() {
		Map<String, Object> settings = new HashMap<String, Object>();
		settings.put("refresh_interval", "1s");
		settings.put("number_of_replicas", numOfReplicas);		
		client.admin().indices().prepareUpdateSettings(INDEX_NAME).setSettings(settings).execute().actionGet();
		client.admin().indices().prepareOpen(INDEX_NAME).execute().actionGet();
	}
}
