package com.ibm.cps.processors;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;

import org.apache.log4j.Logger;
import org.joda.time.DateTime;

import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.KryoSerializable;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.gson.Gson;
import com.ibm.cps.dft.Complex;
import com.ibm.cps.dft.FFT;
import com.ibm.cps.message.MessageFields;
import com.ibm.cps.message.ParsedDataMessage;
import com.ibm.cps.newmessage.AbstractMetadata;
import com.ibm.cps.newmessage.MetadataInputSchema;
import com.ibm.datamodel.timeseries.TimeSeriesSchema;
import com.ibm.interfaces.ITimeSeriesSchema;
import com.ibm.util.JSONUtility;
import com.ibm.util.RandomUtil;
import com.ibm.util.exception.CPSException;

/*
 * to do
 */

@SuppressWarnings({"unchecked", "rawtypes"})
public class TrickleProcessor extends BasicProcessor implements KryoSerializable{
	private ITimeSeriesSchema outputSchema;									// Construct after processing the raw data
	private ITimeSeriesSchema parentSchema;
	private String [] outputFieldName = {"LoF", "Spike", "HiF"};			// Hard Code
	private Logger logger = Logger.getLogger(TrickleProcessor.class);;
	
	// Trickle Compression Parameters 
	private int M_LoF = 60;
	private int T_Spike = 3;
	private int K_HiF = 30;
	
	public TrickleProcessor(String tenantId, String processorId, String source, ITimeSeriesSchema parentSchema) throws CPSException{
		super(tenantId, processorId, source, parentSchema);
		logger.info("parent schema for trickle compression is " + parentSchema.getSchemaID());
	}
	
	@Override
	public void open() throws CPSException {
		this.outputSchema = constructSchema();
	}
	
	// Form the data to meet FFT requirements
	private Double [] formData(Double [] data){
		int len = data.length;
		int powerIndex = 0;
		while (Math.pow(2, powerIndex)<len){
			powerIndex++;
		}
		
		int newLen = (int) Math.pow(2, powerIndex);
		Double [] newData = new Double [newLen];
		for(int i =0; i<len; i++){
			newData[i] = data[i];
		}
		for(int i=len; i<newLen; i++){
			newData[i] = data[len-1];
		}
		return newData;
	}
	
	// Low pass filter to get LoF trickle
	private Double [] lowPass(Double [] data, int M_LoF){
		int rawLen = data.length;
		Double [] re_Data = formData(data);
		int newLen = re_Data.length;
		
		// Form into Complex form and FFT
		Complex [] com_Data = new Complex[newLen];
		for (int i =0; i<newLen; i++){
			com_Data[i] = new Complex (re_Data[i], 0);
		}
		Complex [] fre_Data = FFT.fft(com_Data);
		
		// Pass the low-pass filter
		int threshold = (int) rawLen/M_LoF;
		for (int i =rawLen-threshold-1; i>=threshold; i--){
			fre_Data[i] =new Complex(0, 0);
		}
		
		// Inverse FFT
		com_Data = FFT.ifft(fre_Data);
		Double [] result = new Double [rawLen];
		for (int i =0; i<rawLen; i++){
			result[i] = com_Data[i].re();
		}
		return result;
	}
	
	// Generate Random Projection Matrix
	private Double [][] projectionMatrixGenerate(int N){
		int K = (int) N/K_HiF;
		Double [][] R = new Double [K][N];
		double l1 = (double) 1/6;
		double l2 = (double) 1/3;
		for(int i=0; i<K; i++){
			for(int j=0; j<N; j++){
				double indicator = Math.random();
				if (indicator<l1){
					R[i][j] = Math.sqrt(3)/K;
				}
				else if (indicator<l2){
					R[i][j] = -1*(Math.sqrt(3)/K);
				}
				else{
					R[i][j] = (double) 0;
				}
			}
		}
		return R;
	}
	
	// Matrix multiplication
	private Double [] multiple(Double [][] R, Double [] inputVector){
		Double [] result = new Double [R.length];
		for (int i =0; i<R.length; i++){
			result[i] = (double) 0;
			for (int j =0; j<inputVector.length; j++){
				result[i] +=(R[i][j]*inputVector[j]);
			}
		}
		return result;
	}
	
	// Combine arrays into one
	private Double [] combine(Double [] vector_1, Double [] vector_2, Double [] vector_3){
		int len_1 = vector_1.length;
		int len_2 = vector_2.length;
		int len_3 = vector_3.length;
		Double [] result = new Double[len_1 + len_2 + len_3+3];
		result[0] = (double) len_1;
		result[1] = (double) len_2;
		result[2] = (double) len_3;
		for (int i =0; i<len_1; i++){
			result[i+3] = vector_1[i];
		}
		for (int i =len_1; i<len_2; i++){
			result[i+3] = (double) 0;
		}
		for (int i=0; i<len_2; i++){
			result[i+len_2+3] = vector_2[i];
		}
		for (int i=0; i<len_3; i++){
			result[i+len_2*2+3] = vector_3[i];
		}
		for (int i=len_3; i<len_2; i++){
			result[i+len_2*2+3] = (double) 0;
		}
		return result;
	}
	
	// Divide the raw data into trickles
	protected Double [] divide(Double [] rawData) throws CPSException{
		// Initial and call variable
		int rawData_len = rawData.length;				
		Double [] lofData = new Double [rawData_len];
		Double [] spikeTrickle = new Double [rawData_len];
		Double [] hifData = new Double [rawData_len];
		Double [] restData = new Double [rawData_len];
		
		// LoF trickle divide
		int lof_len = (int) rawData_len/M_LoF;
		Double [] lofTrickle = new Double [lof_len];
		lofData = lowPass(rawData, M_LoF);
		for (int i =0; i<rawData_len; i++){
			restData[i] = rawData[i] - lofData[i];
			if (i%M_LoF == 0){
				lofTrickle[(int) i/M_LoF] = lofData[i];
			}
		}
		
		// Spike trickle divide
		double Expectation = 0;
		for (int i =0; i<rawData_len; i++){
			Expectation +=restData[i];
		}
		Expectation /= rawData_len;
		double derivative = 0;
		for (int i =0; i<rawData_len; i++){
			derivative += Math.pow((restData[i]-Expectation), 2);
		}
		derivative = Math.sqrt(derivative/rawData_len);
		double threshold = T_Spike*derivative;
		for (int i =0; i<rawData_len; i++){
			if (Math.abs(restData[i]-Expectation) >= threshold){
				spikeTrickle[i] = restData[i] - Expectation;
				hifData[i] = Expectation;
			}
			else{
				spikeTrickle[i] =(double) 0;
				hifData[i] = restData[i];
			}
		}
		
		// HiF sketch HiF trickle compress
		Double [][] R = projectionMatrixGenerate(rawData_len);
		Double [] hifTrickle = multiple(R, hifData);
		
		// append the three trickles and construct output schema
		Double [] result = combine(lofTrickle, spikeTrickle, hifTrickle);
		return result;
	}
	
	@Override
	public Collection<ParsedDataMessage> execute(ParsedDataMessage dataMessage) throws CPSException{
		Collection<ParsedDataMessage> retMsgs = new ArrayList<ParsedDataMessage> ();
		ParsedDataMessage message = (ParsedDataMessage) dataMessage;
		
		// Get the raw data in to the double array
		Comparable [] windowValues = message.getValues();
		Double [] stepdata = new Double [windowValues.length -2];
		for (int i = 0; i < stepdata.length; i++){
			Comparable v = windowValues[i+2];
			if (v == null){
				logger.error("TrickleProcessor: received null value in" + windowValues);
				return null;
			}
			stepdata[i] = ((Number) v).doubleValue();
		}
		
		Double [] trickleArr = divide(stepdata);
		
		ParsedDataMessage retmesg = null;
		if (trickleArr != null){
			int length = trickleArr.length;
			Comparable [] values = new Comparable[length+2];
			values[0] =message.getTsKey();
			values[1] = message.getTimestamp();
			
			for (int i=0; i<length; i++){
				values[i+2] = trickleArr[i];
			}
			retmesg = outputSchema.buildParsedDataMessage(values);
		}
		else{
			return null;
		}
		
		retMsgs.add(retmesg);
		
		System.out.println("==============DFT output============");
		System.out.println(Arrays.toString(outputSchema.getTagNames()));
		System.out.println(Arrays.toString(retmesg.getObjectValues()));
		return retMsgs;
	}
	
	private ITimeSeriesSchema constructSchema() throws CPSException{
		ArrayNode arrayNode = JSONUtility.newArrayNode();
		for (int i=0; i<3; i++){
			ObjectNode node = JSONUtility.newObjectNode();
			node.put(MessageFields.NAME,  "arr" + i + "length");
			node.put(MessageFields.DATATYPE, "long");
			arrayNode.add(node);
		}
		
		int length = parentSchema.getTagCount() - 2;
		for (int j=0; j<3; j++){
			for (int i=0; i<length; i++){
				ObjectNode node = JSONUtility.newObjectNode();
				node.put(MessageFields.NAME, outputFieldName[j] + "(" + i + ")");
				node.put(MessageFields.DATATYPE, "double");
				arrayNode.add(node);
			}
		}
		String schemaid = RandomUtil.getRandomId();
		return new TimeSeriesSchema(tenantId, schemaid, arrayNode, (new DateTime()).getMillis());
	}
	
	@Override
	public void dispose() {
	}

	@Override
	public String toString() {
		Gson gson = new Gson();
		return gson.toJson(this);
	}
	
	@Override
	public void write(Kryo kryo, Output output){
		kryo.writeClass(output, outputSchema.getClass());
		kryo.writeObject(output, outputSchema);
		kryo.writeClass(output, parentSchema.getClass());
		kryo.writeObject(output, parentSchema);
	}
	
	@Override
	public void read (Kryo kryo, Input input){
		Class<? extends ITimeSeriesSchema> outputSchemaClass =kryo.readClass(input).getType();
		this.outputSchema = kryo.readObject(input, outputSchemaClass);
		Class<? extends ITimeSeriesSchema> parentSchemaClass =kryo.readClass(input).getType();
		this.parentSchema = kryo.readObject(input, parentSchemaClass);
	}
	
	@Override
	public ITimeSeriesSchema getTimeSeriesSchema() {
		return outputSchema;
	}

}
