/** To eliminate duplicate tuples from the result **/
/**
 *  added by akzing(zengzh@comp.nus.edu.sg)
 */
package qp.operators;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Vector;

import qp.utils.Attribute;
import qp.utils.Batch;
import qp.utils.Schema;
import qp.utils.Tuple;

public class Distinct extends Operator
{

	Operator base;

	int batchsize; // number of tuples per outbatch
	Vector<Tuple> oldTuples; // record distinct tuples
	boolean isfirst; // initial state

	int cur; // cursor for the input batch
	boolean eos; // get the end of the object stream
	ObjectInputStream SortedObejctStream;

	/**
	 * The following fields are required during execution of the Distinct
	 * Operator
	 **/

	Batch inbatch;
	Batch outbatch;

	public Distinct(Operator base, int type)
	{
		super(type);
		this.base = base;
		this.isfirst = true;
		this.oldTuples = new Vector<Tuple>();
	}

	public void setBase(Operator base)
	{
		this.base = base;
	}

	public Operator getBase()
	{
		return base;
	}

	/**
	 * Opens the connection to the base operator Also figures out what the
	 * tuples to be ordered
	 **/

	public boolean open()
	{
		/** set number of tuples per batch **/
		int tuplesize = schema.getTupleSize();
		batchsize = Batch.getPageSize() / tuplesize;

		if (!base.open())
			return false;

		/**
		 * if the base operation is orderby, then just reuse the result of
		 * orderby else we need to sort on one attribute to find the duplicate
		 * tuples.
		 */
		if (base.getOpType() != OpType.Orderby)
		{
			String unSortedFile = "DistinctTemp.tem";
			try
			{
				FileOutputStream fos = new FileOutputStream(unSortedFile);
				ObjectOutputStream oos = new ObjectOutputStream(fos);
				Batch tempOutputBatch;

				while ((tempOutputBatch = base.next()) != null)
				{
					oos.writeObject(tempOutputBatch);
				}
				fos.close();
				oos.close();
			} catch (Exception e)
			{
				System.out.println("Distinct Open():write " + unSortedFile + " error");
				return false;
			}
			String sortedFile = "";
			Vector<Attribute> attrVector = new Vector<Attribute>();// convert attr to Vector

			Attribute attr = base.getSchema().getAttribute(0); // sort on the first attribute

			attrVector.add(attr);
			ExternalMergeSort es = new ExternalMergeSort(unSortedFile, attrVector, base.getSchema());
			try
			{
				sortedFile = es.MergeSort();
			} catch (Exception e)
			{
				System.out.println("Distinct Open():MergeSort error");
				return false;
			}
			if (sortedFile != null)
			{
				try
				{
					FileInputStream fileIn = new FileInputStream(sortedFile);
					SortedObejctStream = new ObjectInputStream(fileIn);
				} catch (IOException e)
				{
					System.out.println("Distinct Open():read " + sortedFile + " error");
					return false;
				}
			}
			if (!base.close())
				return false;
			return true;
		} else
		{
			return true;
		}
	}

	/** Read next tuple from operator */

	public Batch next()
	{
		//System.out.println("Distinct--------------------next------------------------");
		if (eos)
			return null;
		outbatch = new Batch(batchsize);
		while (true)
		{
			if (cur == 0)
			{
				try
				{
					if (base.getOpType() == OpType.Orderby)
					{
						inbatch = base.next();
						if (inbatch == null)
						{
							eos = true;
							return outbatch;
						}
					} else
					{
						inbatch = (Batch) SortedObejctStream.readObject();
					}
				} catch (IOException io)
				{
					eos = true;
					return outbatch;
				} catch (ClassNotFoundException c)
				{
					System.out.println("Orderby next():read SortedObejctStream error");
					System.exit(1);
				}
			}
			Tuple oldTuple;
			if (isfirst) // initialize the oldTuples
			{
				oldTuple = inbatch.elementAt(0);
				isfirst = false;
				cur++;
				oldTuples.add(oldTuple);
				outbatch.add(inbatch.elementAt(0));
				if (outbatch.isFull())
				{
					return outbatch;
				}
			}
			for (int i = cur; i < inbatch.size(); i++)
			{
				int originalsize = oldTuples.size();
				for (int j = 0; j < originalsize; j++)
				{
					int compare = Tuple.AreTuplesEqual(inbatch.elementAt(i), oldTuples.elementAt(j), schema.getNumCols());
					if (compare == 0)// equal
					{
						break;// skip out of the for loop
					} else if (compare == -1)// partly equal
					{
						if (j == (originalsize - 1))
						{
							oldTuples.add(inbatch.elementAt(i));
							outbatch.add(inbatch.elementAt(i));
							if (outbatch.isFull())
							{
								cur = i + 1;
								return outbatch;
							}
						}
					} else
					// compare==1,not equal
					{
						oldTuples.remove(j);
						j--;
						originalsize--;

						if (j == (originalsize - 1))
						{
							oldTuples.add(inbatch.elementAt(i));
							outbatch.add(inbatch.elementAt(i));
							if (outbatch.isFull())
							{
								cur = i + 1;
								return outbatch;
							}
						}

					}
				}
			}
			cur = 0;
		}
	}

	/** Close the operator */
	public boolean close()
	{
		File f = new File("DistinctTemp.tem");
		f.deleteOnExit();
		return true;
	}

	public Object clone()
	{
		Operator newbase = (Operator) base.clone();
		Distinct newdistinct = new Distinct(newbase, optype);
		Schema newSchema = newbase.getSchema();
		newdistinct.setSchema(newSchema);
		return newdistinct;
	}
}
