package com.dana1.nos.collections;

import java.io.Serializable;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Set;
import java.util.UUID;

import org.apache.commons.collections.Transformer;
import org.apache.commons.collections.iterators.TransformIterator;
import org.apache.commons.collections.keyvalue.AbstractMapEntry;

import com.dana1.nos.Storage;
import com.dana1.nos.generator.TimeBasedUUIDGenerator;
import com.dana1.nos.util.IteratorOfIterators;
import com.dana1.nos.util.MetaSupport;
import com.dana1.nos.util.MetaSupport.MetaKey;

/**
 * A Map which synchronizes its entries to an underlying storage mechanism using a typical hashing strategy.
 * @author Dana
 *
 * @param <K>
 * @param <V>
 */
public class TransparentHashMap<K extends Serializable, V extends Serializable> extends AbstractMap<K,V>  {

	private transient String identifier = TimeBasedUUIDGenerator.timeBasedUUID().toString();
	private transient Storage storage;

	private transient MetaKey<LinkedList<HashRecord<K,V>>> BUCKET = new MetaKey<>("Records List"); 	// using a hashmap for bucket would be same as a list, because all entries in the bucket will share the same hash code. Better to use list in this case.
	private transient MetaKey<HashRecord<K,V>> RECORD = new MetaKey<>("Record of Key");

	// hash buckets are stored as LinkedLists of hash records:
	private static class HashRecord<K extends Serializable,V extends Serializable> implements Serializable {
		private static final long serialVersionUID = 1L;
		K key;
		V value;
		public HashRecord(K key, V value) {
			super();
			this.key = key;
			this.value = value;
		}
	}

	@Override
	public Set<Entry<K, V>> entrySet() {
		return new AbstractSet<Entry<K,V>>() {
			@SuppressWarnings("unchecked")
			@Override
			public Iterator<Entry<K, V>> iterator() {
				return new IteratorOfIterators<>(new TransformIterator(storage.getByPrefix(LinkedList.class, identifier + "/"), new Transformer(){
					@Override
					public Object transform(Object input) {
						return entriesOf((LinkedList<HashRecord<K, V>>)input);
					}
				}));
			}
			
			/**
			 * Inefficient implementation of the size operation, which iterates the whole map in order to count elements.
			 */
			@Override
			public int size() {
				int size = 0;
				for (Iterator<LinkedList> li = storage.getByPrefix(LinkedList.class,identifier+"/"); li.hasNext(); ) 
					size += li.next().size();
				return size;
			}
		};
	}

	/**
	 * Naive implementation of the size operation which relies on entrySet.size()
	 */
	@Override
	public int size() {
		return super.size();
	}
	
	/** 
	 * Method to iterate records in a bucket, adding the appropriate metas.
	 * @param records
	 * @return
	 */
	@SuppressWarnings("unchecked")
	private Iterator<Entry<K,V>> entriesOf(final LinkedList<HashRecord<K,V>> records)
	{
		if (records==null)
			return Collections.emptyIterator();
		return new TransformIterator(records.iterator(), new Transformer() {
			@Override
			public Object transform(Object input) {
				HashRecord<K,V> record = (HashRecord<K, V>)input;
				MetaSupport.meta(record,BUCKET,records);
				MetaSupport.meta(record.value,RECORD,record);
				MetaSupport.meta(record.key,RECORD,record);
				return new AbstractMapEntry(record.key,record.value) {
					@Override
					public Object setValue(Object value) {
						return put((K)getKey(),(V)value);
					}
				};
			}
		});
	}
	
	public V get(K key) {
		String ident = identifier+"/"+key.hashCode();
		for (Iterator<Entry<K,V>> i = entriesOf(storage.get(LinkedList.class, ident)); i.hasNext(); )
		{
			Entry<K,V> candidate = i.next();
			if (candidate.getKey().equals(key))
				return candidate.getValue();
		}
		return null;
	};

	@SuppressWarnings({ "unchecked" })
	public V put(K key, V value) {
		if (key==null)
			throw new IllegalArgumentException("Null keys are not supported");

		HashRecord<K,V> hashRecord = MetaSupport.meta(key, RECORD);
		if (hashRecord==null)
			hashRecord = MetaSupport.meta(value,RECORD);
		if (hashRecord == null)
			hashRecord = new HashRecord<K, V>(key, null);

		// warn if attempting to push a live object with a different key
		if (!hashRecord.key.equals(key))
			throw new ConcurrentModificationException("Value is associated with a different key");
		
		V oldValue = hashRecord.value;
		hashRecord.value = value;

		String ident = identifier+"/"+key.hashCode();
		
		// find existing bucket, and add record if it doesnt exist
		LinkedList<HashRecord<K,V>> hashBucket = MetaSupport.meta(hashRecord,BUCKET);
		if (hashBucket == null) { // this is an insert case:
			hashBucket = storage.get(LinkedList.class, ident);
			if (hashBucket == null)
				hashBucket = new LinkedList<>();

			// complain if record already in bucket
			for (HashRecord<K,V> record: hashBucket)
				if (record.key.equals(key))
					throw new ConcurrentModificationException("Inserting over existing record");
			
			// add new record to bucket
			hashBucket.add(hashRecord);
		}
		
		// push it
		storage.put(ident,hashBucket);

		// update metadata
		MetaSupport.meta(hashRecord, BUCKET, hashBucket);
		MetaSupport.meta(key, RECORD, hashRecord);
		MetaSupport.meta(value, RECORD, hashRecord); 

		// clean reference to old metadata
		if (oldValue!=null)
			MetaSupport.meta(oldValue,RECORD,null);

		// return as expected
		return oldValue;
	};

	
	@Override
	public V remove(Object key) {
		if (key==null)
			throw new IllegalArgumentException("Null keys are not supported");

		if (!(key instanceof Serializable))
			throw new IllegalArgumentException("Key is not serializable");
		
		HashRecord<K,V> hashRecord = MetaSupport.meta((K)key, RECORD);
		if (hashRecord == null)
			throw new IllegalArgumentException("Key is not associated with this map");
		
		// warn if attempting to remove a live object with a different key
		if (!hashRecord.key.equals(key))
			throw new ConcurrentModificationException("Key does not match record");
		
		String ident = identifier+"/"+key.hashCode();

		V oldValue = hashRecord.value;

		// get bucket for record
		LinkedList<HashRecord<K,V>> hashBucket = MetaSupport.meta(hashRecord,BUCKET);
		if (hashBucket == null) 
			throw new ConcurrentModificationException("Key not associated with hash bucket");

		// remove record from bucket matching key
		for (Iterator<HashRecord<K,V>> recs = hashBucket.iterator(); recs.hasNext();) {
			HashRecord<K,V> rec = recs.next();
			if (rec.key.equals(key)) {
				recs.remove();
				if (!hashBucket.isEmpty())
					storage.put(ident,hashBucket);
				else
					storage.remove(hashBucket);
				MetaSupport.meta((K)key,RECORD, null);
				MetaSupport.meta(rec.key,RECORD, null);
				MetaSupport.meta(rec.value,RECORD, null);
				MetaSupport.meta(rec,BUCKET, null);
				return rec.value;
			}
		}
		return null;
	}
}
