/*
 * ***** BEGIN LICENSE BLOCK *****
 * Version: MPL 1.1
 *
 * The contents of this file are subject to the Mozilla Public License Version
 * 1.1 (the "License"); you may not use this file except in compliance with
 * the License. You may obtain a copy of the License at
 * http://www.mozilla.org/MPL/
 *
 * Software distributed under the License is distributed on an "AS IS" basis,
 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
 * for the specific language governing rights and limitations under the
 * License.
 *
 * The Original Code is
 *
 * java-concurrency-presentation:
 * concurrency presentation tutorial.
 *
 * The Initial Developer of the Original Code is
 * Marco Antonio Villalobos, Jr.
 * Portions created by the Initial Developer are Copyright (C) 2010
 * the Initial Developer. All Rights Reserved.
 *
 * Contributor(s): None.
 *
 * ***** END LICENSE BLOCK *****
 */

package presentation.concurrency.indexer.nonblocking;

import presentation.concurrency.indexer.Index;
import presentation.concurrency.indexer.Indexer;
import presentation.concurrency.indexer.Location;
import presentation.concurrency.indexer.Word;
import presentation.concurrency.util.ContextSwitcher;

import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicLong;

/**
 * An example of how to incorrectly use concurrency.
 */
public class SafeNonBlockingIndexer extends Indexer {

    private class IndexImpl implements Index {
        private Map<String, WordImpl> values = new HashMap<String, WordImpl>();
        private long wordCount = 0;
        private ContextSwitcher contextSwitcher = new ContextSwitcher();

        public void add(String word, String file, int line) {
            Word previous = values.get(word);
            if (previous != null) {
                previous.add(file, line);
            } else {
                WordImpl current = new WordImpl();
                current.add(file, line);
                values.put(word, current);
            }
            long w = wordCount + 1;
            contextSwitcher.forceSometimes();
            wordCount = w;
        }

        public Set<Map.Entry<String, Word>> getWords() {
            return new TreeMap<String, Word>(values).entrySet();
        }

        public long getWordCount() {
            return wordCount;
        }

        public synchronized void reduce(IndexImpl index) {
            for (Map.Entry<String,WordImpl> entry: index.values.entrySet()) {
                String word = entry.getKey();
                WordImpl current = entry.getValue();
                WordImpl previous = values.get(word);
                if (previous!=null) {
                    previous.reduce(current);
                } else {
                    values.put(word, current);
                }
            }
            wordCount+=index.wordCount;
        }
    }

    private class WordImpl implements Word {
        private AtomicLong count = new AtomicLong(0);
        private Set<Location> locations = new LinkedHashSet<Location>();

        public void add(String file, int line) {
            locations.add(new Location(file, line));
            count.incrementAndGet();
        }

        public long getCount() {
            return count.get();
        }

        public Set<Location> getLocations() {
            return locations;
        }

        public void reduce(WordImpl word) {
            locations.addAll(word.locations);
            count.addAndGet(word.count.get());
        }
    }

    public Index execute(final File[] files) {

        final int no_of_threads = 25;
        final ArrayBlockingQueue<File> fileProducer = new ArrayBlockingQueue<File>(files.length);
        final CountDownLatch latch = new CountDownLatch(files.length);

        Thread fileProducerThread = new Thread() {
            public void run() {
                Collections.addAll(fileProducer, files);
            }
        };
        fileProducerThread.start();

        final ExecutorService pool = Executors.newFixedThreadPool(no_of_threads);
        final CompletionService<IndexImpl> completionService = new ExecutorCompletionService<IndexImpl>(pool);

        final IndexImpl index = new IndexImpl();

        for (int i=0;i<files.length;i++) {
            Callable<IndexImpl> fileProcessor = new Callable<IndexImpl>() {
                @Override
                public IndexImpl call() throws Exception {
                    File file = fileProducer.take();
                    IndexImpl local = new IndexImpl();
                    index(local, file);
                    return local;
                }
            };
            completionService.submit(fileProcessor);
        }

        Thread indexConsumer = new Thread() {
            public void run() {
                try {
                    while(!Thread.currentThread().isInterrupted()) {
                        Future<IndexImpl> future = completionService.take();
                        try {
                            IndexImpl current = future.get();
                            index.reduce(current);
                        } catch (ExecutionException e) {
                            //
                        }
                        latch.countDown();
                    }
                } catch (InterruptedException e) {
                    //
                }
            }
        };
        indexConsumer.start();

        try {
            latch.await();
        } catch (InterruptedException e) {
            //this is okay
        }
        pool.shutdown();
        try {
            pool.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e) {
            //this is okay too
        }
        indexConsumer.interrupt();



        return index;
    }
}