package comparator;

import java.io.File;
import java.io.IOException;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

import matchers.LinguisticMatchersFacade;
import matchers.StructureNeighborhood;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyManager;

import uk.ac.shef.wit.simmetrics.similaritymetrics.CosineSimilarity;
import uk.ac.shef.wit.simmetrics.similaritymetrics.DiceSimilarity;
import uk.ac.shef.wit.simmetrics.similaritymetrics.JaccardSimilarity;
import uk.ac.shef.wit.simmetrics.similaritymetrics.JaroWinkler;
import uk.ac.shef.wit.simmetrics.similaritymetrics.Levenshtein;
import uk.ac.shef.wit.simmetrics.similaritymetrics.QGramsDistance;
import uk.ac.shef.wit.simmetrics.similaritymetrics.Soundex;

import com.microsoft.azure.storage.CloudStorageAccount;
import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.blob.CloudBlobContainer;
import com.microsoft.azure.storage.blob.CloudBlobDirectory;
import com.microsoft.azure.storage.blob.CloudBlockBlob;

import file.FileSD;

public class BDM {

	public static HashMap<String, Integer> blockingkeyIndexer = new HashMap<String, Integer>();
	public static final String ONTOLOGY_SOURCE = "owl.source";
	public static final String ONTOLOGY_TARGET = "owl.target";
	public static final String THRESHOLD = "threshold";
	private static final String REPLICATIONS = "numberOfReplications";

	public static class Map extends Mapper<LongWritable, OWLClass, Text, FileSD> {

		private final static IntWritable one = new IntWritable(1);
		private Integer token = 0;
		private Text word = new Text();
		private Text k = new Text();

		@Override
		public void map(LongWritable key, OWLClass value, Context context) throws IOException, InterruptedException {
			String source = context.getConfiguration().get(ONTOLOGY_SOURCE).replaceAll(".owl", "");
			String target = context.getConfiguration().get(ONTOLOGY_TARGET).replaceAll(".owl", "");
			String replications = context.getConfiguration().get(REPLICATIONS);
			int numberOfReplications = Integer.parseInt(replications);
			
			if (value.getIRI().getNamespace().contains(source)) {
				k.set(key + ""/* + "." + numberOfFile */);
				word.set(value.getIRI().getFragment());
				//System.err.println("Key: " + k.toString() + " - Valor: " + value.getIRI().getFragment());
				context.write(k, new FileSD(word, new IntWritable(1), value));
			} else if (value.getIRI().getNamespace().contains(target)) {
				for (int i = 0; i < numberOfReplications; i++) {
					k.set(i + ""/* + "." + numberOfFile */);
					word.set(value.getIRI().getFragment());
					System.err.println("Key: " + k.toString() + " - Valor: " + value.getIRI().getFragment());
					context.write(k, new FileSD(word, new IntWritable(2), value));
				}
			}

		}

	}

	public static class Reduce extends Reducer<Text, FileSD, Text, Text> {
		private Text result = new Text();
		private Text k = new Text();
		private int log = 1;
		public static final String IS_AZURE = "isAzure";
		public int cont = 0;

		@Override
		public void reduce(Text key, Iterable<FileSD> values, Context context) throws IOException, InterruptedException {
			Double threshold = Double.valueOf(context.getConfiguration().get(THRESHOLD));
			int isAzure = context.getConfiguration().getInt(IS_AZURE, 0);
			Path source = new Path("Ontologies/" + context.getConfiguration().get(ONTOLOGY_SOURCE));
			Path target = new Path("Ontologies/" + context.getConfiguration().get(ONTOLOGY_TARGET));

			OWLOntology s = null;
			OWLOntology t = null;
			
//	    	String DirPathMT = System.getProperty("user.dir") + "/onto1.owl";
//    		FileSystem fs = FileSystem.get(new Configuration());
//			BufferedReader buffReader = new BufferedReader(new InputStreamReader(fs.open(new Path(DirPathMT))));
//			File f = new File("temp.owl");
//			FileWriter fw = new FileWriter(f);
//			BufferedWriter bw = new BufferedWriter(fw);
//            while (buffReader.readLine() != null) {
//            	bw.write(buffReader.readLine()+"\n");
//            }
//            buffReader.close();
//            bw.close();
//            fw.close();
//            System.out.println(f.length());
//            
//			try {
//				s = readOWL2(f, isAzure);
//			} catch (Exception e2) {
//				// TODO Auto-generated catch block
//				e2.printStackTrace();
//			}
			
			
			try {
				s = readOWL(source, isAzure);
				t = readOWL(target, isAzure);
			} catch (Exception e1) {
				// TODO Auto-generated catch block
				e1.printStackTrace();
			}

			FileSD filesDataSource = null;
			List<FileSD> filesDataSetTarget = new ArrayList<FileSD>();
			for (FileSD val : values) {
				if (val.getDataset().get() == 1) {
					filesDataSource = generateFileSD(val);
				} else {
					filesDataSetTarget.add(generateFileSD(val));
				}
			}

			String out = "";
			Double similarity;
			Double maxSimilarity = 0.0;
			if (filesDataSource != null) {
				for (FileSD fileTarget : filesDataSetTarget) {
					try {
						similarity = ComputeMatchers(filesDataSource, fileTarget, threshold, isAzure, s, t);
						if (similarity != null && similarity > maxSimilarity) {
							maxSimilarity = similarity;
							out = filesDataSource.getName() + " - " + fileTarget.getName() + " = " + formatDecimal(similarity) + "\n";
						}
						cont++;
					} catch (Exception e) {
						System.err.println("Erro ao computar as similaridades.");
						e.printStackTrace();
					}
				}
			} else if (filesDataSetTarget.isEmpty()) {
				System.err.println("Alguem nao foi replicado.");
			} else {
				System.err.println("Alguem nao foi computado.");
			}
			if (!out.isEmpty()) {
				result.set(out);
				k.set(log++ + "");
				context.write(k, result);
			}
			System.out.println("COMP: " + cont);

		}

		private String formatDecimal(Double similarity) {
			return (similarity == null ? "" : new DecimalFormat("#.##").format(similarity));
		}

		private FileSD generateFileSD(FileSD file) {
			Text name = new Text(file.getName());
			IntWritable dataset = new IntWritable(file.getDataset().get());
			FileSD fileSd = new FileSD();
			fileSd.setName(name);
			fileSd.setDataset(dataset);
			fileSd.setConcept(file.getConcept());
			return fileSd;
		}

		private Double ComputeMatchers(FileSD conceptSource, FileSD conceptTarget, Double threshold, int isAzure, OWLOntology source, OWLOntology target) {
			LinguisticMatchersFacade linguisticMatchers = new LinguisticMatchersFacade();
			Double structureNeighborhood = new StructureNeighborhood(conceptSource.getConcept(), conceptTarget.getConcept(), source, target).getSimilarity();
			
//			double gramSimilarity = linguisticMatchers.get2GramSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString(), 2);
//			double cosineSimilarity = linguisticMatchers.getCosineSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
//			double jaccardSimilarity = linguisticMatchers.getJaccardSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
//			double jaroWinklerSimilarity = linguisticMatchers.getJaroWinklerSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
//			double sorensenSimilarity = linguisticMatchers.getSorensenSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
			
			double gramSimilarity = new QGramsDistance().getSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
			double cosineSimilarity = new CosineSimilarity().getSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
			double jaccardSimilarity = new JaccardSimilarity().getSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
			double jaroWinklerSimilarity = new JaroWinkler().getSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
			double levenshteinSimilarity = new Levenshtein().getSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
			double soundexSimilarity = new Soundex().getSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
			double diceSimilarity = new DiceSimilarity().getSimilarity(conceptSource.getNameToString(), conceptTarget.getNameToString());
			
			QGramsDistance metric = new QGramsDistance();
			
			double linguisticSimilarity = (gramSimilarity + cosineSimilarity + jaccardSimilarity + jaroWinklerSimilarity + levenshteinSimilarity + soundexSimilarity + diceSimilarity) / 7;

			Double result = (0.7 * linguisticSimilarity) + (0.3 * structureNeighborhood);

			if (result >= threshold) {
				return result;
			}
			return null;

		}
		
		private static OWLOntology readOWL2(File file, int isAzure) throws Exception {
			//Create our ontology manager in the usual way.
			OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
			// Load a copy of the people+pets ontology. We'll load the
			// ontology
			// from
			// the web (it's acutally located in the TONES ontology
			// repository).
			IRI docIRI = IRI.create(file);
			// We load the ontology from a document - our IRI points to it
			// directly
			OWLOntology ontology = manager.loadOntologyFromOntologyDocument(docIRI);

			return ontology;
		}

		private static OWLOntology readOWL(Path path, int isAzure) throws Exception {
//			if (isAzure == 0) {
//				// Create our ontology manager in the usual way.
//				OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
//				// Load a copy of the people+pets ontology. We'll load the
//				// ontology
//				// from
//				// the web (it's acutally located in the TONES ontology
//				// repository).
//				IRI docIRI = IRI.create(new File(path.toString()));
//				// We load the ontology from a document - our IRI points to it
//				// directly
//				OWLOntology ontology = manager.loadOntologyFromOntologyDocument(docIRI);
//
//				return ontology;
//			} else {
				final String storageConnectionString = "DefaultEndpointsProtocol=http;" + "AccountName=testematcher;"
						+ "AccountKey=TqIn51pzE5r8CyAv0QeTTBqtd7LbS7bksSv/C8CMXAzvabqjtobID4qk5rw9yiY55p4mx5pwUC0/I1l7b19Zqw==";

				CloudStorageAccount account = CloudStorageAccount.parse(storageConnectionString);
				CloudBlobClient serviceClient = account.createCloudBlobClient();

				// Container name must be lower case.
				CloudBlobContainer container = serviceClient.getContainerReference("matchercluster");
				// container.createIfNotExists();
				CloudBlobDirectory sdproj = container.getDirectoryReference("SDProject").getSubDirectoryReference("Input");
				CloudBlockBlob arq = sdproj.getBlockBlobReference(path.getName());
				// Download the image file.

				File destinationFile = new File("TEMPONTO.owl");
				arq.downloadToFile(destinationFile.getAbsolutePath());

				// Create our ontology manager in the usual way.
				OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
				// Load a copy of the people+pets ontology. We'll load the
				// ontology
				// from
				// the web (it's acutally located in the TONES ontology
				// repository).
				IRI docIRI = IRI.create(destinationFile);
				// We load the ontology from a document - our IRI points to it
				// directly
				OWLOntology ontology = manager.loadOntologyFromOntologyDocument(docIRI);

				destinationFile.delete();
				return ontology;
//			}

		}

	}

}