/**
 * File: CollectAndFilter.java
 * Created by: mhaimel
 * Created on: 18 Nov 2009
 * CVS:  $Id: CollectAndFilter.java,v 1.1 2009/11/24 16:24:00 mhaimel Exp $
 */
package uk.ac.ebi.curtain.exec;

import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;

import org.apache.commons.lang.time.StopWatch;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import uk.ac.ebi.curtain.util.FileHelper;
import uk.ac.ebi.curtain.utils.CurtainUncheckedException;
import uk.ac.ebi.curtain.utils.data.FileType;
import uk.ac.ebi.curtain.utils.data.ReadType;
import uk.ac.ebi.curtain.utils.file.EntryMapper;
import uk.ac.ebi.curtain.utils.file.EntryMapperBean;
import uk.ac.ebi.curtain.utils.file.FileInfo;
import uk.ac.ebi.curtain.utils.file.WriteUtil;
import uk.ac.ebi.curtain.utils.io.impl.FileIO;
import uk.ac.ebi.velvet.model.Supercontig;

/**
 * @author mhaimel
 *
 */
public class CollectAndFilter {
	
	private static final Integer MIN_OUTPUT_LENGTH = 60;
	private final AtomicLong contigCnt = new AtomicLong(0);
	private Log log = LogFactory.getLog(this.getClass());
	private final File baseDir;
	private final File outFile;
	private final AtomicLong start = new AtomicLong(0l);
	private final AtomicLong end = new AtomicLong(0l);
	private volatile ExecutorService es;
	private final int parallel;
	private final BlockingQueue<Long> queue = new LinkedBlockingQueue<Long>(4000);
	private final BlockingQueue<EntryMapperBean> refQueue = new LinkedBlockingQueue<EntryMapperBean>(40000);
	private final BlockingQueue<EntryMapperBean> outQueue = new LinkedBlockingQueue<EntryMapperBean>(40000);
	private final String tDir;

	public CollectAndFilter(String tDir, File outFile, File baseDir) {
		this.tDir = tDir;
		this.outFile = outFile;
		this.baseDir = baseDir;
		int nProc = Runtime.getRuntime().availableProcessors()*2;
		this.parallel = Math.max(5, nProc);
		setParallel(this.parallel);
	}

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		int i = 0;
		if(args.length < 3){
			System.err.println("<Kmer> <output-fa-file> <base-bucket-dir>");
			System.exit(1);
		}
		Integer kmer = Integer.valueOf(args[i++]);
		String tDir = "test_"+kmer;
		File outFile = new File(args[i++]);
		File baseDir = new File(args[i++]);
		CollectAndFilter coll = new CollectAndFilter(tDir,outFile,baseDir);
		coll.setStart(Integer.valueOf(args[i++]));
		coll.setEnd(Integer.valueOf(args[i++]));
		StopWatch sw = new StopWatch();
		sw.start();
		coll.run();
		sw.stop();
		System.out.println(sw);
	}

	private void run() {
		try{
			es.execute(new MyWriter(refQueue, new File(outFile.getAbsoluteFile()+".ref")));
			es.execute(new MyWriter(outQueue, outFile));
			for(int i = 2; i < this.parallel; ++i){
				es.execute(new MyCollector(tDir));
			}
			es.shutdown();
			try {
				while(start.get() <= end.get()){
					if(start.get()%1000==0){
						log.debug("Submitted " + start.get());
					}
					queue.put(start.getAndIncrement());
				}		
			} catch (InterruptedException e) {
				Thread.currentThread().interrupt();
			}
			while(!(queue.isEmpty() && refQueue.isEmpty() && outQueue.isEmpty()) && !Thread.currentThread().isInterrupted()){
				try {
					TimeUnit.SECONDS.sleep(5);
				} catch (InterruptedException e) {
					Thread.currentThread().interrupt();
				}
			}
		}finally{
			es.shutdownNow();
		}
	}

	private void setParallel(int i) {
		log.info("Use " + i + " threads for Executor!");
		es = Executors.newFixedThreadPool(i);
	}

	private void setEnd(Integer end) {
		this.end.set(end);
	}
	private void setStart(Integer start) {
		this.start.set(start);		
	}

	private class MyWriter implements Runnable{
		
		private final BlockingQueue<EntryMapperBean> myqueue;
		private final File myOutput;

		public MyWriter(BlockingQueue<EntryMapperBean> queue, File output) {
			this.myqueue = queue;
			this.myOutput = output;
		}

		@Override
		public void run() {
			FileInfo oFile = wrapFile(myOutput);
			WriteUtil writer = oFile.getFileType().getWriter();
			PrintWriter out = null;
			try{
				out = new FileIO(myOutput,false).getPrintWriter();
				while(true && !Thread.currentThread().isInterrupted()){
					EntryMapperBean bean = myqueue.take();
					writer.write(out, bean);
				}
			} catch (IOException e) {
				throw new CurtainUncheckedException("Problems writing to "+ myOutput);
			} catch (InterruptedException e) {
				Thread.currentThread().interrupt();
			} finally{
				FileIO.closeQuietly(out);
			}			
		}
		
	}


	private FileInfo wrapFile(File fFile) {
		return new FileInfo(fFile,FileType.fasta,ReadType.Contig);
	}
	
	private class MyCollector implements Runnable{

		private final String tDir;

		public MyCollector(String tDir) {
			this.tDir = tDir;
		}

		@Override
		public void run() {
			try {
				while(true && !Thread.currentThread().isInterrupted()){
					Long id = queue.take();
					process(id);
				}
			} catch (InterruptedException e) {
				Thread.currentThread().interrupt();
				// done
			}
		}
		
		private File buildFile(Long id){
			return FileHelper.getSubDirectory(baseDir, id.intValue());
		}

		private void process(Long id) {
			File dir = buildFile(id);
			FileInfo ctgFile = Supercontig.getContigDumpFile(dir);
			File cFile = ctgFile.getFile();
			File tFile = new File(new File(dir,tDir),"contigs.fa");
			if(cFile.exists() && tFile.exists() && cFile.length() > 0 && tFile.length() > 0){
				readContig(ctgFile);
				readOutput(tFile, id);
			}
		}

		private void readContig(FileInfo ctgFile) {
			ctgFile.parse(new EntryMapper() {
				@Override
				public void mapEntry(EntryMapperBean bean) {
					refQueue.add(bean);
				}
			});	
		}

		private void readOutput(File file, final Long id) {
			FileInfo info = wrapFile(file);
			info.parse(new EntryMapper() {
				@Override
				public void mapEntry(EntryMapperBean bean) {
					if(isValid(bean, MIN_OUTPUT_LENGTH)){
						bean = rebuildId(bean, id);
						outQueue.add(bean);
					}
				}
			});	
		}
		
		private EntryMapperBean rebuildId(EntryMapperBean bean, Long id){
			EntryMapperBean nBean = new EntryMapperBean(bean);
			long nId = contigCnt.getAndIncrement();
			int sequLen = bean.getSequenceLength();
			nBean.setId("NODE_"+nId+"_length_"+sequLen+"_bucket_"+id);
			return nBean;
		}

		private boolean isValid(EntryMapperBean bean, Integer minLen) {
			return bean.getSequence().length() >= minLen;
		}
	}
	
}
