package com.nulldev.util.concurrency.threadIt.v4.sched.impl.CAS;

import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;

import com.nulldev.util.VariableAPI.MathUtil;
import com.nulldev.util.concurrency.threadIt.v4.emapi.ExecutorManager;
import com.nulldev.util.concurrency.threadIt.v4.emapi.IExecutorManager;
import com.nulldev.util.concurrency.threadIt.v4.emapi.extensions.ExecutorFeatureExtensions;
import com.nulldev.util.concurrency.threadIt.v4.emapi.extensions.ExtendedExecutor;
import com.nulldev.util.concurrency.threadIt.v4.emapi.extensions.IFutureTaskFlags;
import com.nulldev.util.concurrency.threadIt.v4.emapi.uft.Executable;
import com.nulldev.util.concurrency.threadIt.v4.emapi.uft.Returnable;
import com.nulldev.util.concurrency.threadIt.v4.sched.ASyncScheduler;
import com.nulldev.util.data.Variables;
import com.nulldev.util.data.Arrays.queues.pbqe.PriorityBlockingQueueEnhanced;
import com.nulldev.util.internal.backport.concurrency9.Lists;

/**
 * This is an experiment but seems stable enough. <br>
 * <br>
 * Differences over CAAS 1.0:
 * <ul>
 * <li>Priority sorting applies to ALL submissions.</li>
 * <li>Larger internal queues.</li>
 * <li>Inherits some L1 optimizations from CAAS 1.0</li>
 * </ul>
 */
public class ContextuallyAwareAsyncScheduler2 implements ASyncScheduler, IFutureTaskFlags {

	private final PriorityBlockingQueueEnhanced<Runnable> QUEUE;
	private static final int EXTENDED_BATCH_SIZE = 64;
	private static final int QUEUE_SIZE = 384;

	private static final Comparator<Runnable> PRIORITY_SORTER = new Comparator<Runnable>() {

		@Override
		public int compare(final Runnable o1, final Runnable o2) {
			if (o1 == null || o2 == null)
				return 0;
			if (!(o1 instanceof Executable<?>) || !(o2 instanceof Executable<?>))
				return 0;
			/* Do not batch anything high priority */
			if ((int) ((Executable<?>) o1).getFlag(FT_HIGH_PRIORITY) == FT_TRUE)
				return -1;
			if ((int) ((Executable<?>) o2).getFlag(FT_HIGH_PRIORITY) == FT_TRUE)
				return 1;
			/* Do not batch anything that wants low latency */
			if ((int) ((Executable<?>) o1).getFlag(FT_PREFER_LOW_LATENCY) == FT_TRUE)
				return -1;
			if ((int) ((Executable<?>) o2).getFlag(FT_PREFER_LOW_LATENCY) == FT_TRUE)
				return 1;
			/* Do not batch anything that has a synchronous dependency */
			if ((int) ((Executable<?>) o1).getFlag(FT_HAS_SYNCHRONOUS_DEPENDENCY) == FT_TRUE)
				return -1;
			if ((int) ((Executable<?>) o2).getFlag(FT_HAS_SYNCHRONOUS_DEPENDENCY) == FT_TRUE)
				return 1;
			return 0;
		}
	};

	private final IExecutorManager execMan;
	private final AtomicBoolean allowL1Batching = new AtomicBoolean(ExecutorManager.ENABLE_BATCH_OPERATIONS),
			allowL2Batching = new AtomicBoolean(ExecutorManager.ENABLE_BATCH_OPERATIONS),
			allowL1Optimizations = new AtomicBoolean(ExecutorManager.ENABLE_BATCH_OPERATIONS), executorSupportsBatching = new AtomicBoolean(),
			isExtended = new AtomicBoolean();
	private final AtomicInteger preferredBatchSize = new AtomicInteger(EXTENDED_BATCH_SIZE);

	public ContextuallyAwareAsyncScheduler2(final IExecutorManager execMan) {
		Variables.requireNonNull(execMan, "execMan");
		this.execMan = execMan;
		this.QUEUE = new PriorityBlockingQueueEnhanced<Runnable>(QUEUE_SIZE, PRIORITY_SORTER);
	}

	@Override
	public void executorUpdated(final ExecutorService ex) {
		Variables.requireNonNull(ex, "ex");
		this.isExtended.set(ex instanceof ExtendedExecutor);
		this.allowL1Batching.set(this.execMan.supportsNativeBatching() && !this.execMan.isWorkingAroundBatching()
				&& !this.execMan.isWorkingAroundLevelledBatching() && ExecutorManager.ENABLE_BATCH_OPERATIONS && this.isExtended.get());
		this.allowL2Batching.set(this.execMan.supportsNativeBatching() && this.execMan.supportsTaskDrain() && !this.execMan.isWorkingAroundLevelledBatching()
				&& !this.execMan.isWorkingAroundBatching() && ExecutorManager.ENABLE_BATCH_OPERATIONS && this.isExtended.get());
		this.allowL1Optimizations.set(this.execMan.supportsNativeBatching() && this.execMan.supportsTaskDrain() && this.execMan.supportsSchedQueueBypass()
				&& !this.execMan.isWorkingAroundBatching() && ExecutorManager.ENABLE_BATCH_OPERATIONS && this.isExtended.get());
		this.executorSupportsBatching.set(this.execMan.supportsNativeBatching());
		if (this.isExtended.get() && ((ExtendedExecutor) ex).supportsFeature(ExecutorFeatureExtensions.EEX_query_executor_large_batches)) {
			this.preferredBatchSize.set(((ExtendedExecutor) ex).executorPreferredBatchSize());
		} else {
			this.preferredBatchSize.set(EXTENDED_BATCH_SIZE);
		}
	}

	private static boolean __should_be_batched(final Runnable r) {
		if (!(r instanceof Executable<?>))
			return true;

		final int FLAGS = ((Executable<?>) r).getFlags();

		if ((FLAGS & FT_HIGH_PRIORITY) != 0)
			return false;

		if ((FLAGS & FT_PREFER_LOW_LATENCY) != 0)
			return false;

		if ((FLAGS & FT_HAS_SYNCHRONOUS_DEPENDENCY) != 0)
			return false;

		return true;
	}

	@Override
	public void run() {
		this.isExtended.set(this.execMan.executor() instanceof ExtendedExecutor);
		this.allowL1Batching.set(this.execMan.supportsNativeBatching() && !this.execMan.isWorkingAroundLevelledBatching()
				&& !this.execMan.isWorkingAroundBatching() && ExecutorManager.ENABLE_BATCH_OPERATIONS && this.isExtended.get());
		this.allowL2Batching.set(this.execMan.supportsNativeBatching() && this.execMan.supportsTaskDrain() && !this.execMan.isWorkingAroundLevelledBatching()
				&& !this.execMan.isWorkingAroundBatching() && ExecutorManager.ENABLE_BATCH_OPERATIONS && this.isExtended.get());
		this.allowL1Optimizations.set(this.execMan.supportsNativeBatching() && this.execMan.supportsTaskDrain() && this.execMan.supportsSchedQueueBypass()
				&& !this.execMan.isWorkingAroundBatching() && ExecutorManager.ENABLE_BATCH_OPERATIONS && this.isExtended.get());
		this.executorSupportsBatching.set(this.execMan.supportsNativeBatching());
		if (this.isExtended.get() && ((ExtendedExecutor) this.execMan.executor()).supportsFeature(ExecutorFeatureExtensions.EEX_query_executor_large_batches)) {
			this.preferredBatchSize.set(((ExtendedExecutor) this.execMan.executor()).executorPreferredBatchSize());
		} else {
			this.preferredBatchSize.set(EXTENDED_BATCH_SIZE);
		}

		while (this.execMan.isRunning()) {
			try {
				final Runnable x;
				try {
					x = this.QUEUE.take();
				} catch (InterruptedException e) {
					continue;
				}

				if (x == null)
					continue;

				final int SIZE = this.QUEUE.unsyncSize();
				final boolean allowBatching = __should_be_batched(x);

				if (allowBatching && SIZE >= Math.min(BATCH_SIZE, this.preferredBatchSize.get()) && this.executorSupportsBatching.get()
						&& this.allowL2Batching.get()) {
					final int batchSize = MathUtil.clamp(1, this.preferredBatchSize.get(), SIZE - 1);
					((ExtendedExecutor) this.execMan.executor()).batch(x);
					((ExtendedExecutor) this.execMan.executor()).executor_drainTasks(this.QUEUE, batchSize);
				} else if (allowBatching && this.allowL1Batching.get() && this.executorSupportsBatching.get()) {
					((ExtendedExecutor) this.execMan.executor()).batch(x);
				} else {
					this.execMan.executor().execute(x);

				}
			} catch (Throwable t) {
				t.printStackTrace();
			}
		}
	}

	@Override
	public void submit_async(final Runnable r) {
		if (r == null)
			return;
		this.QUEUE.put(r);
	}

	@Override
	public <T> Executable<T> submit_async(final Executable<T> r) {
		if (r == null)
			throw new NullPointerException("r == null!");

		r.signalQueued();
		putInQueue(r);
		return r;
	}

	private <T> void putInQueue(final Executable<T> f) {
		/* We already do prioritization in the queue. */
		this.QUEUE.put((Runnable) f);
	}

	@Override
	public <T> Executable<T> submit_async(final Returnable<T> r) {
		if (r == null)
			throw new NullPointerException("r == null!");
		try {
			final Executable<T> f = IExecutorManager.makeTaskNoAccel(r, IFutureTaskFlags.FT_PREFER_BATCHING);
			f.signalQueued();
			putInQueue(f);
			return f;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}

	@Override
	public <T> boolean attempt_async_submit(final Executable<T> r) {
		if (r == null)
			throw new NullPointerException("r == null!");
		try {
			r.signalQueued();
			putInQueue(r);
			return true;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}

	private void submit_all_async_batch(final Runnable... r) {
		final List<Runnable> s = Arrays.asList(r);
		/* Don't sort, we do that automagically. */
		s.forEach(QUEUE::put);
	}

	@Override
	public void submit_all_async(final Runnable... r) {
		if (r == null || r.length == 0) {
			return;
		} else if (r.length > EXTENDED_BATCH_SIZE) {
			final int safeToFit = EXTENDED_BATCH_SIZE - this.QUEUE.size();
			if (safeToFit <= 0)
				throw new ArrayStoreException("submit_all_async: Queue is full!");
			final Runnable[] safe = new Runnable[safeToFit];
			System.arraycopy(r, 0, safe, 0, safeToFit);
			final Runnable[] late = new Runnable[r.length - safeToFit];
			System.arraycopy(r, safeToFit, late, 0, (r.length - safeToFit));
			if (this.allowL1Optimizations.get()) {
				submit_all_async_batch(safe);
				/*
				 * L1 Optimization: Don't bother submitting now, also allow for more fine grain
				 * batching
				 */
				submit_all_async(late);
			} else {
				submit_all_async_batch(safe);
				submit_all_async_batch(late);
			}
		} else if (r.length > 8) {
			submit_all_async_batch(r);
		} else {
			this.QUEUE.addAll(Lists.of(r));
		}
	}

	@Override
	public void clearQueues() {
		this.QUEUE.clear();
	}

	@Override
	public boolean supportsDebugQuery() {
		return true;
	}

	@Override
	public Object debug_query(final ASyncSchedulerDebugQuery query) {
		switch (Variables.requireNonNullObject(query, "query")) {
			case ASYNC_SCHED_DEBUG_QUEUE_SIZE:
				return this.QUEUE.size();
			case ASYNC_SCHED_DEBUG_L1_BATCHING_ENABLED:
				return this.allowL1Batching.get();
			case ASYNC_SCHED_DEBUG_L2_BATCHING_ENABLED:
				return this.allowL2Batching.get();
			case ASYNC_SCHED_DEBUG_L1_OPTIMIZATIONS_ENABLED:
				return this.allowL1Optimizations.get();
			case ASYNC_SCHED_DEBUG_EXECUTOR_BATCHING_SUPPORTED:
				return this.executorSupportsBatching.get();
			default:
				return null;
		}
	}

}
