package example.conc;

import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;

/**
 * a task queue example:
 * 1. when the queue is full, it cant not accept new tasks.
 * 2. if the task is already in the queue, ignor the request.
 * 3. There will be one worker running to empty the queue.
 *
 * <p/>
 * zhu.tan@gmail.com
 * 18-Nov-2009
 */
public class BackgroundTaskQueue {

    static final int CAPACITY = 2;
    private final BlockingQueue<Request> queue = new LinkedBlockingQueue<Request>(CAPACITY);
    private final Worker worker = new Worker("W1");

    public BackgroundTaskQueue() {
    }

    public void workerStart(){
        worker.start();
    }

    /*
        once a work is stop it can not be started again.
     */
    public void workerStop() {
        worker.setStop();
    }

    /* Adds the specified element to this queue if it is possible to do so immediately,
returning true upon success, else throwing an IllegalStateException. */
    public void accept(Request request) {

        synchronized (queue) {
            if (!queue.contains(request))
                queue.add(request);
        }
    }

    /*  Adds the specified element to this queue, waiting if necessary for space to become available. */
    public void acceptUntil(Request request) {
        synchronized (queue) {
            if (!queue.contains(request)) {
                try {
                    queue.put(request);
                } catch (InterruptedException e) {
                }
            }
        }
    }

    /*
        nserts the specified element into this queue,
        waiting if necessary up to the specified wait time for space to become available.
    */
    public boolean acceptWait4(Request request, long timeInSecond) {
        synchronized (queue) {
            if (!queue.contains(request)) {
                Boolean result = false;
                try {
                    result = queue.offer(request, timeInSecond, TimeUnit.SECONDS);
                } catch (InterruptedException e) {
                }
                return result;
            } else {
                return false;
            }
        }
    }

    public Request takeUntil() {
        Request result = null;
        try {
            result = queue.take();
        } catch (InterruptedException e) {
        }
        return result;
    }

    public Request takeWait4(long timeInSecond) {
        Request result = null;
        try {
            result = queue.poll(timeInSecond, TimeUnit.SECONDS);
        } catch (InterruptedException e) {
        }
        return result;
    }

    public int getTaskNum(){
        return queue.size();
    }


    /*
         worker execute requests coming off the queue by spawning them in an entirely new thread.
         But, it might make more sense to constrain the overall number of threads concurrently executing.
         It's not terribly efficient to execute large numbers of threads simultaneously.
         using thread pool is more approciated.
    */
    private class Worker extends Thread {
        volatile Boolean isStopped = false;

        Worker(String name) {
            super(name);
        }

        @Override
        public void run() {
            while (! isStopped) {
                execute(takeUntil());
            }
        }

        private void execute(final Request request) {
            new Thread(new Runnable() {
                @Override
                public void run() {
                    request.execute();
                }
            }).start();
        }

        private void setStop(){
            isStopped = true;
        }
    }
}

interface Request {
    void execute();
}
