/**
 * Copyright (C) 2010, 2011 Neofonie GmbH
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package eu.dicodeproject.blogimport.importer;

import java.util.ArrayList;
import java.util.List;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dicodeproject.blogimport.exceptions.FileAccessException;
import eu.dicodeproject.blogimport.exceptions.DocumentStoreException;
import eu.dicodeproject.blogimport.exceptions.DocumentStoreNotAccessibleException;
import eu.dicodeproject.blogimport.filereader.FileSource;
import eu.dicodeproject.blogimport.filereader.ImportCounter;
import eu.dicodeproject.blogimport.parser.BlogDocument;
import eu.dicodeproject.blogimport.parser.BlogParser;
import eu.dicodeproject.blogimport.parser.ParsingException;

/**
 * Reads files from 
 * 
 * {@link FileSource}, parses the content of the files with an
 * {@link BlogParser} implementation and afterwards imports the parsed document
 * to hbase by using the {@link HBaseBlogSink}.
 * 
 */
public class ImportRunner extends Thread {

	/** The Constant LOG. */
	private static final Logger LOG = LoggerFactory.getLogger(ImportRunner.class);

	/** The shall stop. */
	private boolean shallStop = false;

	/** The bulksize. 
	 * Choose the appropriate bulksize for your use case. If freshness is an issue, the bulksize should
	 * not be too big - all entries of a bulk will only be written if the bulk is full. If you read a huge
	 * number of files, a small bulksize slows the importer down. 
	 * If you both want freshness and speed, choose a medium value.
	 * */
	private int bulksize = 100;

	/** The source. */
	private FileSource source;

	/** The parser. */
	private BlogParser parser;

	/** The sink. */
	private HBaseBlogSink hbaseSink;
	
	private int pause = 1000;

	/**
	 * This method should be invoked when the import should be stopped.
	 */
	public void requestStop() {
		this.shallStop = true;
		this.source.shutdown();
	}

	/**
	 * @see java.lang.Thread#run()
	 */
	@Override
  public void run() {

		// initialize the source
		this.source.init();

		List<BlogDocument> queue = new ArrayList<BlogDocument>();
		ImportCounter counter = new ImportCounter();
		long lastBulk = 0;

		// TODO: think about shutdown, currently this one is using a hard abort

		while (this.source.hasNext() && !this.shallStop) {
		  
			try {
				BlogDocument doc = this.parser.parse(this.source.next());
				queue.add(doc);
				
				if (queue.size() == this.bulksize) {
				  LOG.debug("Adding a full bulk: bulksize "+this.bulksize);
					try {
						if(System.currentTimeMillis() - lastBulk < this.pause) {
							try {
								Thread.sleep(this.pause);
							} catch (InterruptedException e) {
								//can be ignored
							}
						}
						bulk(queue);
						lastBulk = System.currentTimeMillis();
						// counter for some statistics
						counter.count(queue.size());
						// log counter with defined frequency
						counter.logCounter(this.bulksize > counter.getFrequency());
						queue.clear();
					} catch (DocumentStoreNotAccessibleException e) {
						requestStop();
						LOG.error("Cannot access DocumentStore, aborting.", e);
					} catch (DocumentStoreException e) {
						requestStop();
						LOG.error("Cannot store documents in DocumentStore, aborting.", e);
					}
				}
			} catch (ParsingException e) {
				LOG.warn("Cannot parse current document, skipping it.", e);
			} catch (FileAccessException e) {
				LOG.warn("Error while accessing file source, checking if there is more content.", e);
			}
		}
		// if some documents in queue, because of shutdown or queue-size is less
		// than configured bulksize
		
		if (queue.size() > 0) {		  
			try {
				bulk(queue);
				counter.count(queue.size());				
				counter.logCounter(this.bulksize > counter.getFrequency());
				queue.clear();
			} catch (DocumentStoreNotAccessibleException e) {
				requestStop();
				LOG.error("Cannot access DocumentStore, aborting.", e);
			} catch (DocumentStoreException e) {
				requestStop();
				LOG.error("Cannot store documents in DocumentStore, aborting.", e);
			}
		}
		LOG.info(this.hbaseSink.printIDCounts());
	}
	/**
	 * Bulk import a list of {@link BlogDocument} objects.
	 * 
	 * @param queue
	 *            the queue
	 * @throws DocumentStoreNotAccessibleException
	 *             the document store not accessible exception
	 * @throws DocumentStoreException
	 *             the document store exception
	 */
	private void bulk(final List<BlogDocument> queue) throws DocumentStoreNotAccessibleException, DocumentStoreException {
		boolean success = false;
		while (!success) {
			try {
				LOG.debug("storing a bulk of " + queue.size() + " doucments");
			  this.hbaseSink.store(queue);
				success = true;
			} catch (DocumentStoreException e) {
				LOG.error("DoumentStore not reachable - sleeping.");
			} catch (DocumentStoreNotAccessibleException e) {
				LOG.error("DocumentStore not accessible - sleeping.");
			}
			if (!success) {
				try {
					Thread.sleep(10 * 60 * 1000L);
				} catch (InterruptedException e) {
					LOG.info("Interrupted - retrying import.");
				}
			}
		}		
	}

	/**
	 * Sets the source.
	 * 
	 * @param source
	 *            the source
	 */
	public void setSource(final FileSource source) {
		this.source = source;
	}

	/**
	 * Sets the bulksize.
	 * 
	 * @param bulksize
	 *            the bulksize
	 */
	public void setBulksize(final int bulksize) {
		this.bulksize = bulksize;
	}

	/**
	 * Sets the parser.
	 * 
	 * @param parser
	 *            the parser
	 */
	public void setParser(final BlogParser parser) {
		this.parser = parser;
	}

	/**
	 * Sets the sink.
	 * 
	 * @param sink
	 *            the sink
	 */
	public void setSink(final HBaseBlogSink hbaseSink) {
		this.hbaseSink = hbaseSink;
	}

	/**
	 * Gets the source.
	 * 
	 * @return the source
	 */
	public FileSource getSource() {
		return source;
	}

	public void setPause(int pause) {
		this.pause = pause;
	}
}
