package file;

import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.security.InvalidKeyException;
import java.util.ArrayList;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.microsoft.azure.storage.CloudStorageAccount;
import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.blob.CloudBlobContainer;
import com.microsoft.azure.storage.blob.CloudBlobDirectory;
import com.microsoft.azure.storage.blob.CloudBlockBlob;

/**
 * Baseado no algoritmo
 * https://github.com/pyongjoo/MapReduce-Example/blob/master
 * /mysrc/XmlInputFormat.java
 * 
 * @author Brasileiro
 * 
 */
public class OWLInputFormat extends SequenceFileInputFormat<LongWritable, OWLClass> {

	private static final Logger log = LoggerFactory.getLogger(OWLInputFormat.class);

	@Override
	public RecordReader<LongWritable, OWLClass> createRecordReader(InputSplit split, TaskAttemptContext context) {
		try {
			return new OwlRecordReader((FileSplit) split, context.getConfiguration());
		} catch (IOException ioe) {
			log.warn("Error while creating OwlRecordReader", ioe);
			return null;
		}
	}

	/**
	 * OwlRecordReader class to read through a given owl document to output
	 * files blocks as records as specified by the start tag and end tag
	 * 
	 */
	public static class OwlRecordReader extends RecordReader<LongWritable, OWLClass> {

		private final long start;
		private final long end;
		private int current;
		private ArrayList<OWLClass> listOfConcepts;
		private OWLOntology ontology;
		private LongWritable currentKey;
		private OWLClass currentValue;
		public static final String IS_AZURE = "isAzure";

		public OwlRecordReader(FileSplit split, Configuration conf) throws IOException {
			ontology = null;
			int isAzure = conf.getInt(IS_AZURE, 0);
			
			try {
				ontology = readOWL(split.getPath(), isAzure);
			} catch (Exception e) {
				System.err.println("Erro ao ler o arquivo da ontologia");
			}

			listOfConcepts = new ArrayList<OWLClass>(ontology.getClassesInSignature());
			start = 0;
			current = 0;
			end = listOfConcepts.size();
		}

		private boolean next() throws IOException {
			if (current < end) {
				currentKey.set(current);
				currentValue = listOfConcepts.get(current);
				current++;
				return true;
			}
			return false;
		}

		@Override
		public void close() throws IOException {
			ontology = null;
		}

		@Override
		public float getProgress() throws IOException {
			return (current - start) / (float) (end - start);
		}

		@Override
		public LongWritable getCurrentKey() throws IOException, InterruptedException {
			return currentKey;
		}

		@Override
		public OWLClass getCurrentValue() throws IOException, InterruptedException {
			return currentValue;
		}

		@Override
		public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
		}

		@Override
		public boolean nextKeyValue() throws IOException, InterruptedException {
			currentKey = new LongWritable();
			currentValue = null;
			return next();
		}
	}

	/**
	 * Method to read owl file.
	 * 
	 * @param path
	 * @param isAzure
	 * @return OWL Ontology
	 * @throws OWLOntologyCreationException
	 * @throws URISyntaxException
	 * @throws InvalidKeyException
	 */
	private static OWLOntology readOWL(Path path, int isAzure) throws Exception {
//		if (isAzure == 0) {
//			// Create our ontology manager in the usual way.
//			OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
//			// Load a copy of the people+pets ontology. We'll load the ontology
//			// from
//			// the web (it's acutally located in the TONES ontology repository).
//			IRI docIRI = IRI.create(path.toUri());
//			// We load the ontology from a document - our IRI points to it
//			// directly
//			OWLOntology ontology = manager.loadOntologyFromOntologyDocument(docIRI);
//
//			return ontology;
//		} else {
			final String storageConnectionString = "DefaultEndpointsProtocol=http;" + "AccountName=testematcher;"
					+ "AccountKey=TqIn51pzE5r8CyAv0QeTTBqtd7LbS7bksSv/C8CMXAzvabqjtobID4qk5rw9yiY55p4mx5pwUC0/I1l7b19Zqw==";

			CloudStorageAccount account = CloudStorageAccount.parse(storageConnectionString);
			CloudBlobClient serviceClient = account.createCloudBlobClient();

			// Container name must be lower case.
			CloudBlobContainer container = serviceClient.getContainerReference("matchercluster");
			// container.createIfNotExists();
			CloudBlobDirectory sdproj = container.getDirectoryReference("SDProject").getSubDirectoryReference("Input");
			CloudBlockBlob arq = sdproj.getBlockBlobReference(path.getName());
			// Download the image file.

			File destinationFile = new File("TEMPONTO.owl");
			arq.downloadToFile(destinationFile.getAbsolutePath());

			// Create our ontology manager in the usual way.
			OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
			// Load a copy of the people+pets ontology. We'll load the ontology
			// from
			// the web (it's acutally located in the TONES ontology repository).
			IRI docIRI = IRI.create(destinationFile);
			// We load the ontology from a document - our IRI points to it
			// directly
			OWLOntology ontology = manager.loadOntologyFromOntologyDocument(docIRI);
			
			destinationFile.delete();
			return ontology;
//		}

	}

}