package file;

import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.security.InvalidKeyException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLLiteral;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.microsoft.azure.storage.CloudStorageAccount;
import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.blob.CloudBlobContainer;
import com.microsoft.azure.storage.blob.CloudBlobDirectory;
import com.microsoft.azure.storage.blob.CloudBlockBlob;

import file.FileSD;
import file.OWLInputFormat;

/**
 * Baseado no algoritmo
 * https://github.com/pyongjoo/MapReduce-Example/blob/master
 * /mysrc/XmlInputFormat.java
 * 
 * @author Brasileiro
 * 
 */
public class OWLInputFormat extends
		SequenceFileInputFormat<Text, FileSD> {

	private static final Logger log = LoggerFactory
			.getLogger(OWLInputFormat.class);

	@Override
	public RecordReader<Text, FileSD> createRecordReader(
			InputSplit split, TaskAttemptContext context) {
		try {
			return new OwlRecordReader((FileSplit) split,
					context.getConfiguration());
		} catch (IOException ioe) {
			log.warn("Error while creating OwlRecordReader", ioe);
			return null;
		}
	}

	/**
	 * OwlRecordReader class to read through a given owl document to output
	 * files blocks as records as specified by the start tag and end tag
	 * 
	 */
	public static class OwlRecordReader extends
			RecordReader<Text, FileSD> {

		private final long start;
		private final long end;
		private int current;
		private ArrayList<OWLClass> listOfConcepts;
		private OWLOntology ontology;
		private Text currentKey;
		private FileSD currentValue;
		private static String SOURCE = "owl.source";
		private static String TARGET = "owl.target";
		private boolean isSource = false;
		private boolean isTarget = false;
		private boolean flag = true;
		
		ArrayList<Integer> indexPairSource;
		ArrayList<Integer> indexPairTarget;
		private int currentSourceIndex;
		private final int endSourceIndex;
		private int currentTargetIndex;
		private final int endTargetIndex;

		public OwlRecordReader(FileSplit split, Configuration conf)
				throws IOException {
			ontology = null;
			List<String> ontologySource = Arrays.asList(conf.getStrings(SOURCE));
			List<String> ontologyTarget = Arrays.asList(conf.getStrings(TARGET));
			
			String[] pathFile = split.getPath().toString().split("/");
			String nameFile = pathFile[pathFile.length - 1];
			
			indexPairSource = getArrayIndex(ontologySource, nameFile);			
			if (!indexPairSource.isEmpty()) {
				isSource = true;
			}
			
			indexPairTarget = getArrayIndex(ontologyTarget, nameFile);
			if (!indexPairTarget.isEmpty()) {
				isTarget = true;
			}
			
			try {
				ontology = readOWL(split.getPath());
			} catch (Exception e) {
				System.err.println("Erro ao ler o arquivo da ontologia");
			}

			listOfConcepts = new ArrayList<OWLClass>(
					ontology.getClassesInSignature());
			start = 0;
			current = 0;
			end = listOfConcepts.size() - 1;
			
			currentSourceIndex = 0;
			endSourceIndex = indexPairSource.size();
			currentTargetIndex = 0;
			endTargetIndex = indexPairTarget.size();
			
		}

		private ArrayList<Integer> getArrayIndex(List<String> ontologySource, String nameFile) {
			ArrayList<Integer> out = new ArrayList<Integer>();
			for (int i = 0; i < ontologySource.size(); i++) {
				if (nameFile.equals(ontologySource.get(i))) {
					out.add(i);
				}
			}
				
			return out;
		}
		
		/**
		 * Na funcao next eh feita toda a logica de geracao da chave: "comparacao.job.dataset".
		 * Se vier do source o dataset eh 1, caso contrario, eh 2.
		 * Quando uma ontologia (ou subontologia) eh tida como source para uma comparacao e target em outra, os conceitos ja sao gerados para ambas as comparacoes,
		 * ou seja, por esse motivo o current nao eh incrementado ... para que o conceito seja replicado com as chaves de source e target.
		 * @return
		 * @throws IOException
		 */
		private boolean next() throws IOException {
			if (current < end) {
				boolean isSource = false;
				if (currentSourceIndex < endSourceIndex) {
					currentKey.set(indexPairSource.get(currentSourceIndex) + "." + current + "." + 1);
					currentSourceIndex++;
					isSource = true;
				} else {
					if (currentTargetIndex < endTargetIndex) {
						currentKey.set(indexPairTarget.get(currentTargetIndex) + "." + current + "." + 2);
						currentTargetIndex++;
					}
				}
				
				currentValue = new FileSD(new Text(getLabel(listOfConcepts.get(current), ontology)), new Text(listOfConcepts.get(current).getIRI().getShortForm()), new IntWritable(), listOfConcepts.get(current)) ;
				if (currentValue.getConcept().isOWLThing()) {
					listOfConcepts.remove(current);
					if (isSource) {
						currentSourceIndex--;
					} else {
						currentTargetIndex--;
					}
					return next();
				}
				System.out.println("Key: " + currentKey);
				if (currentSourceIndex == endSourceIndex && currentTargetIndex == endTargetIndex) {
					currentSourceIndex = 0;
					currentTargetIndex = 0;
					current++;
				}
				
				return true;
			}
			return false;
		}

		@Override
		public void close() throws IOException {
			ontology = null;
		}

		@Override
		public float getProgress() throws IOException {
			return (current - start) / (float) (end - start);
		}

		@Override
		public Text getCurrentKey() throws IOException,
				InterruptedException {
			return currentKey;
		}

		@Override
		public FileSD getCurrentValue() throws IOException,
				InterruptedException {
			return currentValue;
		}

		@Override
		public void initialize(InputSplit split, TaskAttemptContext context)
				throws IOException, InterruptedException {
		}

		@Override
		public boolean nextKeyValue() throws IOException, InterruptedException {
			currentKey = new Text();
			currentValue = null;
			return next();
		}
	}
	
	private static String getLabel(OWLClass owlClass, OWLOntology ontology) {
		OWLLiteral val = null;
		for (OWLAnnotationAssertionAxiom annotation : ontology.getAnnotationAssertionAxioms(owlClass.getIRI())) {
			if (annotation.getValue() instanceof OWLLiteral && annotation.getProperty().getIRI().getShortForm().contains("label")) {
				val = (OWLLiteral) annotation.getValue();
			}
		}
		if (val == null) {
			return owlClass.getIRI().getShortForm();
		}
		
		return val.getLiteral();
	}
	
	/**
	 * Method to read owl file.
	 * 
	 * @param path
	 * @param isAzure
	 * @return OWL Ontology
	 * @throws OWLOntologyCreationException
	 * @throws URISyntaxException
	 * @throws InvalidKeyException
	 */
	 private static OWLOntology readOWL(Path path) throws
	 Exception {
//	 // if (isAzure == 0) {
//	 // // Create our ontology manager in the usual way.
//	 // OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
//	 // // Load a copy of the people+pets ontology. We'll load the ontology
//	 // // from
//	 // // the web (it's acutally located in the TONES ontology repository).
//	 // IRI docIRI = IRI.create(path.toUri());
//	 // // We load the ontology from a document - our IRI points to it
//	 // // directly
//	 // OWLOntology ontology =
//	 manager.loadOntologyFromOntologyDocument(docIRI);
//	 //
//	 // return ontology;
//	 // } else {
	 final String storageConnectionString = "DefaultEndpointsProtocol=http;" +
	 "AccountName=testematcher;"
	 +
	 "AccountKey=TqIn51pzE5r8CyAv0QeTTBqtd7LbS7bksSv/C8CMXAzvabqjtobID4qk5rw9yiY55p4mx5pwUC0/I1l7b19Zqw==";
	
	 CloudStorageAccount account =
	 CloudStorageAccount.parse(storageConnectionString);
	 CloudBlobClient serviceClient = account.createCloudBlobClient();
	
	 // Container name must be lower case.
	 CloudBlobContainer container =
	 serviceClient.getContainerReference("superclusterlqd");
	 // container.createIfNotExists();
	 CloudBlobDirectory sdproj =
	 container.getDirectoryReference("SDProject").getSubDirectoryReference("Input");
	 CloudBlockBlob arq = sdproj.getBlockBlobReference(path.getName());
	 // Download the image file.
	
	 File destinationFile = new File("TEMPONTO.owl");
	 arq.downloadToFile(destinationFile.getAbsolutePath());
	
	 // Create our ontology manager in the usual way.
	 OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
	 // Load a copy of the people+pets ontology. We'll load the ontology
	 // from
	 // the web (it's acutally located in the TONES ontology repository).
	 IRI docIRI = IRI.create(destinationFile);
	 // We load the ontology from a document - our IRI points to it
	 // directly
	 OWLOntology ontology = manager.loadOntologyFromOntologyDocument(docIRI);
	
	 destinationFile.delete();
	 return ontology;
	 // }
	
	 }

}