package com.fluidops.datacatalog.reltables;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;

import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.openrdf.model.BNode;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.ValueFactory;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.ntriples.NTriplesWriter;
import org.openrdf.rio.turtle.TurtleWriter;
import org.openrdf.sail.Sail;
import org.openrdf.sail.nativerdf.NativeStore;

import com.fluidops.datacatalog.reltables.acsDB.ACSDB;
import com.fluidops.datacatalog.reltables.adapter.EurostatExtractor;
import com.fluidops.datacatalog.reltables.adapter.ExtractionAdapter;
import com.fluidops.datacatalog.reltables.adapter.WorldbankAdapter;
import com.fluidops.datacatalog.reltables.data.EurostatLabelProvider;
import com.fluidops.datacatalog.reltables.data.WorldbankLabelProvider;
import com.fluidops.datacatalog.reltables.labelbase.Freebase;
import com.fluidops.datacatalog.reltables.labelbase.ILabelSource;
import com.fluidops.datacatalog.reltables.model.Dataset;
import com.fluidops.datacatalog.reltables.model.Dataset.RelatednessEntry;
import com.fluidops.datacatalog.reltables.schemacompare.SchemaMatcher;
import com.fluidops.datacatalog.reltables.schemascore.SchemaSumScore;
import com.fluidops.datacatalog.reltables.sim.AveragePair;
import com.fluidops.datacatalog.reltables.sim.entity.UniformWeight;

public class Starter {

	public static final String catalogSubject = "http://keywordsearch.index/";
	public static final String catalogProperty = "http://keywordsearch.has.catalog/";

	public static final String BNODE = "relTablesBNode";
	
	public static final String SCHEMA_COMPLEMENT_META = "http://relatedTables/schema_complement/meta";

	public static final String ACS_DB_SINGLE_QUERY_TRIES = "http://relatedTables/singleFrequency/tries";
	public static final String ACS_DB_SINGLE_QUERY_HITS = "http://relatedTables/singleFrequency/hits";
	public static final String ACS_DB_COMBO_QUERY_HITS = "http://relatedTables/comboFrequency/hits";

	public static final String ENTITY_COMPLEMENT_SCORE = "http://relatedTables/entity_complement/score/";
	public static final String ENTITY_COMPLEMENT_DATASET = "http://relatedTables/entity_complement/dataset/";
	public static final String ENTITY_COMPLEMENT_RELATION = "http://relatedTables/entity_complement/ec_relation/";

	public static final String SCHEMA_COMPLEMENT_SCORE = "http://relatedTables/schema_complement/score/";
	public static final String SCHEMA_COMPLEMENT_DATASET = "http://relatedTables/schema_complement/dataset/";
	public static final String SCHEMA_COMPLEMENT_RELATION = "http://relatedTables/schema_complement/ec_relation/";

	public static final String ACS_DB_PATH_PROPERTY = "acsDB";
	public static final String SCHEMA_REPO_PROPERTY = "schemaRepo";
	public static final String DATA_REPO_PROPERTY = "dataRepo";

	public static final String ES_INPUT_PROPERTY = "esInput";
	public static final String WB_INPUT_PROPERTY = "wbInput";
	public static final String WB_LABELS_PROPERTY = "wbLabels";

	public static final String FREEBASE_CACHE_PROPERTY = "freebaseCache";
	
	public static final String RELATEDNESS_THRESHOLD_PROPERTY = "relatednessThreshold";
	public static Double RELATEDNESS_THRESHOLD = 0.1d;

	public static final String ES_OUTPUT_PROPERTY = "esOutput";
	public static final String WB_OUTPUT_PROPERTY = "wbOutput";

	public static final String ENTITY_SCORE_OUTPUT_PROPERTY = "entityScoreOutput";
	public static final String SCHEMA_SCORE_OUTPUT_PROPERTY = "schemaScoreOutput";

	public static final String ES_DATASET_IDENTIFIER = "eurostat";
	public static final String WB_DATASET_IDENTIFIER = "worldbank";

	public static final String LOGGER_CONFIG_PATH = "resources/log4j.properties";
	public static final String DEFAULT_CONFIG_PATH = "resources/config.prop";
	public static final String DEFAULT_OWLIM_LICENSE_PATH = "resources/owlim-se-5.1.license";

	private static Logger logger = Logger.getLogger(Starter.class.getName());


	public static void main(String[] args) throws Exception{
		initLogger();
		
		boolean loadedByDefault = true;
		Properties prop = null;
		if(args.length > 0 && new File(args[0]).exists()){
			prop = loadProperties(new File(args[0]));
			loadedByDefault = false;
			logger.info("Properties loaded from file: " + args[0]);
		}
		if(prop == null){
			prop = loadProperties(DEFAULT_CONFIG_PATH);
			loadedByDefault = true;
			logger.info("Loaded default configuration settings. ");
		}

		if(prop.containsKey(ACS_DB_PATH_PROPERTY) && prop.containsKey(SCHEMA_REPO_PROPERTY)){

			ACSDB acsDB = new ACSDB(initOwlim(prop.getProperty(ACS_DB_PATH_PROPERTY), loadedByDefault));
			Repository schemaRepo = initOwlim(prop.getProperty(SCHEMA_REPO_PROPERTY), loadedByDefault);
			Repository dataRepo = initOwlim(prop.getProperty(DATA_REPO_PROPERTY), loadedByDefault);

			RELATEDNESS_THRESHOLD = Double.parseDouble(prop.getProperty(RELATEDNESS_THRESHOLD_PROPERTY, RELATEDNESS_THRESHOLD + ""));
			logger.info("Set relatedness threshhold to " + RELATEDNESS_THRESHOLD);

			checkSchemaRepo(schemaRepo, prop);

			List<URI> datasetsURIs = retrieveDatasetsURIs(dataRepo);
			
			Freebase freebase = new Freebase();
			if(prop.containsKey(FREEBASE_CACHE_PROPERTY) && (new File(prop.getProperty(FREEBASE_CACHE_PROPERTY))).exists()){
				freebase.loadFromFile(new File(prop.getProperty(FREEBASE_CACHE_PROPERTY)));
				logger.info("Loaded freebase label cache from " + prop.getProperty(FREEBASE_CACHE_PROPERTY));
			}
			
			List<Dataset> datasets = createDatasets(schemaRepo, dataRepo, datasetsURIs, freebase);
			
			
			logger.info("Started to compute entity complement scores. ");
			calculateEntityComplementScore(datasets);
			logger.info("Entity complement scores computed. ");

			handleEntityComplementResult(datasets, prop.getProperty(ENTITY_SCORE_OUTPUT_PROPERTY));
			// reset related datasets for schema complement
			resetRelatedDatasets(datasets);
			
			 
			logger.info("Started to compute schema complement scores. ");
			SchemaComplementScorer scs = calculateSchemaComplementScore(datasets, acsDB);
			logger.info("Schema complement scores computed. ");
			
			handleSchemaComplementResult(datasets, scs, prop.getProperty(SCHEMA_SCORE_OUTPUT_PROPERTY));
			 
			if(prop.containsKey(FREEBASE_CACHE_PROPERTY)){
				freebase.saveToFile(new File(prop.getProperty(FREEBASE_CACHE_PROPERTY)));
				logger.info("Store freebase label cache to " + prop.getProperty(FREEBASE_CACHE_PROPERTY));
			}
			
			closeConnections(datasets);
			acsDB.shutdown();
			dataRepo.shutDown();
			schemaRepo.shutDown();
		}else{
			logger.warn("acsDB path and schema repo path must be set!");
		}


	}

	private static void handleEntityComplementResult(List<Dataset> datasets, String path) throws Exception{
		int count = 0;
		ValueFactory vf = ValueFactoryImpl.getInstance();

		TurtleWriter tw = new TurtleWriter(new BufferedWriter(new FileWriter(new File(path))));
		tw.startRDF();

		URI relPred = vf.createURI(ENTITY_COMPLEMENT_RELATION);
		URI scorePred = vf.createURI(ENTITY_COMPLEMENT_SCORE);
		URI dsPred = vf.createURI(ENTITY_COMPLEMENT_DATASET);

		BNode bnode;
		for(Dataset ds : datasets){
			for(RelatednessEntry re : ds.getRelatedDatasets()){
				bnode = vf.createBNode(BNODE + count++);
				tw.handleStatement(vf.createStatement(ds.getIdentifier(), relPred, bnode));
				tw.handleStatement(vf.createStatement(bnode, dsPred, re.getIdentifier()));
				tw.handleStatement(vf.createStatement(bnode, scorePred, vf.createLiteral(re.getSimilarity())));
			}
		}

		tw.endRDF();
	}

	private static void handleSchemaComplementResult(List<Dataset> datasets, SchemaComplementScorer scs, String path) throws Exception{
		int count = 0;
		ValueFactory vf = ValueFactoryImpl.getInstance();

		TurtleWriter tw = new TurtleWriter(new BufferedWriter(new FileWriter(new File(path))));
		tw.startRDF();

		vf.createStatement(vf.createURI(SCHEMA_COMPLEMENT_META), vf.createURI(ACS_DB_SINGLE_QUERY_TRIES), vf.createLiteral(scs.getConsistencyScore().getSingleQueryTries()));
		vf.createStatement(vf.createURI(SCHEMA_COMPLEMENT_META), vf.createURI(ACS_DB_SINGLE_QUERY_HITS), vf.createLiteral(scs.getConsistencyScore().getSingleQueryHits()));
		vf.createStatement(vf.createURI(SCHEMA_COMPLEMENT_META), vf.createURI(ACS_DB_COMBO_QUERY_HITS), vf.createLiteral(scs.getConsistencyScore().getComboQueryHits()));
		
		URI relPred = vf.createURI(SCHEMA_COMPLEMENT_RELATION);
		URI scorePred = vf.createURI(SCHEMA_COMPLEMENT_SCORE);
		URI dsPred = vf.createURI(SCHEMA_COMPLEMENT_DATASET);

		BNode bnode;
		for(Dataset ds : datasets){
			for(RelatednessEntry re : ds.getRelatedDatasets()){
				bnode = vf.createBNode(BNODE + count++);
				tw.handleStatement(vf.createStatement(ds.getIdentifier(), relPred, bnode));
				tw.handleStatement(vf.createStatement(bnode, dsPred, re.getIdentifier()));
				tw.handleStatement(vf.createStatement(bnode, scorePred, vf.createLiteral(re.getSimilarity())));
			}
		}

		tw.endRDF();
	}

	private static void calculateEntityComplementScore(List<Dataset> datasets){
		EntityComplementScorer ecs = new EntityComplementScorer(new AveragePair(new UniformWeight()), new SchemaMatcher());
		for(Dataset ds1 : datasets){
			for(Dataset ds2 : datasets){
				if(!ds1.getIdentifier().equals(ds2.getIdentifier())){
					ecs.compare(ds1, ds2);
//					ds2.clearCaches();
				}
			}
//			ds1.clearCaches();
		}
	}

	private static SchemaComplementScorer calculateSchemaComplementScore(List<Dataset> datasets, ACSDB acsDB){
		SchemaComplementScorer scs = new SchemaComplementScorer(new SchemaSumScore(acsDB));

		for(Dataset ds1: datasets){
			for(Dataset ds2 : datasets){
				if(!ds1.getIdentifier().equals(ds2.getIdentifier())){
					scs.compareDatasets(ds1, ds2);
//					ds2.clearCaches();
				}
			}
//			ds1.clearCaches();
		}
		
		return scs;
	}

	private static void closeConnections(List<Dataset> datasets){
		for(Dataset ds : datasets){
			ds.closeConnections();
		}
	}

	private static void resetRelatedDatasets(List<Dataset> datasets){
		for(Dataset ds : datasets){
			ds.resetRelatedDatasets();
			ds.clearCaches();
		}
	}


	private static List<Dataset> createDatasets(Repository schemaRepo, Repository dataRepo, List<URI> datasetURIs, ILabelSource labelSource) throws Exception{
		List<Dataset> datasets = new LinkedList<Dataset>();

		ExtractionAdapter exAdapter = null;
	
		Dataset ds;
		boolean found;
		
		int i = 0;
		for(URI dsURI : datasetURIs){
			if(i == 50) break;
			i++;
			if(dsURI.stringValue().contains(ES_DATASET_IDENTIFIER)){
				exAdapter = new EurostatExtractor(schemaRepo.getConnection(), null);
				found = true;
			}else if(dsURI.stringValue().contains(WB_DATASET_IDENTIFIER)){
				exAdapter = new WorldbankAdapter(schemaRepo.getConnection(), dataRepo.getConnection(), null, dsURI);
				found = true;
			}else {
				found = false;
				logger.warn("Could not identify origin of " + dsURI.stringValue());
			}
			if(found){
				ds = new Dataset(dsURI, labelSource, exAdapter, RELATEDNESS_THRESHOLD);
				datasets.add(ds);
			}
		}

		return datasets;
	}

	private static List<URI> retrieveDatasetsURIs(Repository dataRepo) throws Exception{
		List<URI> datasets = new LinkedList<URI>();

		RepositoryConnection con = dataRepo.getConnection();
		logger.info("Started to retrieve the dataset contexts.");
		RepositoryResult<Resource> context = con.getContextIDs();
		try{
			Resource r;
			while(context.hasNext()){
				r = context.next();
				datasets.add((URI) r);
			}
		}finally{
			if(context != null){
				context.close();
			}
		}
		con.close();
		logger.info("Dataset contexts retrieved.");
		return datasets;
	}

	private static void checkSchemaRepo(Repository schemaRepo, Properties prop) throws Exception{
		if(!hasIndexOfCatalog(schemaRepo, EurostatLabelProvider.eurostatRDFNamespace)){
			if(!prop.containsKey(ES_INPUT_PROPERTY) || !(new File(prop.getProperty(ES_INPUT_PROPERTY))).exists()){
				// we have to load it first
				logger.info("Loading the Eurostat labels from the web.");
				EurostatLabelProvider esLabelProvider = new EurostatLabelProvider();
				List<Statement> esLabels = esLabelProvider.getLabelStatements();
				String esOut = prop.getProperty(ES_OUTPUT_PROPERTY);
				logger.info("Write es_labels to file " + esOut);
				storeToFile(esOut, esLabels);
				logger.info("Finished loading es_labels to file.");
				logger.info("Write es_labels to repository " + schemaRepo.getDataDir().getAbsolutePath());
				storeToRepo(schemaRepo, esLabels);
				logger.info("Finished loading es_labels to repository.");
			}else{
				// we have already an file load it from there
				File esInput = new File(prop.getProperty(ES_INPUT_PROPERTY));
				logger.info("Starting to load input file of eurostat labels " + esInput.getAbsolutePath());
				RepositoryConnection con = schemaRepo.getConnection();
				con.setAutoCommit(false);
				con.add(esInput, "", RDFFormat.NTRIPLES);
				con.commit();
				con.close();
				logger.info("Eurostat labels loaded. ");
			}
		}else{
			logger.info("Labels of Eurostat already stored.");
		}

		if(!hasIndexOfCatalog(schemaRepo, WorldbankLabelProvider.CATALOG_NAMESPACE)){
			if(!prop.containsKey(WB_INPUT_PROPERTY) || !(new File(prop.getProperty(WB_INPUT_PROPERTY))).exists()){
				//load worldbank labels from the web
				logger.info("Loading the WorldBank labels from the web.");
				WorldbankLabelProvider wbLabelProvider = new WorldbankLabelProvider();
				List<Statement> wbLabels = wbLabelProvider.getLabelStatements();
				String wbOut = prop.getProperty(WB_OUTPUT_PROPERTY);
				logger.info("Write es_labels to file " + wbOut);
				storeToFile(wbOut, wbLabels);
				logger.info("Finished loading worldbank labels to file.");
				logger.info("Write wb_labels to repository " + schemaRepo.getDataDir().getAbsolutePath());
				storeToRepo(schemaRepo, wbLabels);
				logger.info("Finished loading wb_labels to repository.");
			}else{
				// we have already an file load it from there
				File wbInput = new File(prop.getProperty(WB_INPUT_PROPERTY));
				logger.info("Starting to load input file of worldbank labels " + wbInput.getAbsolutePath());
				RepositoryConnection con = schemaRepo.getConnection();
				con.add(wbInput, "", RDFFormat.NTRIPLES);
				con.commit();
				con.close();
				logger.info("Worldbank labels loaded.");
			}
			if(prop.containsKey(WB_LABELS_PROPERTY)){
				logger.info("Write wb_country_labels to repository " + schemaRepo.getDataDir().getAbsolutePath());
				File wbCountryLabels = new File(prop.getProperty(WB_LABELS_PROPERTY));
				logger.info("Starting to load input file of worldbank country labels " + wbCountryLabels.getAbsolutePath());
				RepositoryConnection con = schemaRepo.getConnection();
				con.add(wbCountryLabels, "", RDFFormat.NTRIPLES);
				con.commit();
				con.close();
				logger.info("Finished loading wb_county_labels to repository.");
			}
		}else{
			logger.info("Labels of WorldBank already stored.");
		}
	}

	private static boolean hasIndexOfCatalog(Repository schemaRepo, String providerName) throws Exception{
		boolean hasSchema;
		RepositoryConnection con = schemaRepo.getConnection();
		ValueFactory vf = con.getValueFactory();
		hasSchema = con.hasStatement(vf.createURI(catalogSubject), vf.createURI(catalogProperty), vf.createURI(providerName), false);

		con.close();
		return hasSchema;
	}

	private static Repository initOwlim(String location, boolean loadLicense) throws Exception{
		Repository repo = null;

		if(loadLicense){
			System.setProperty("owlim-license", loadResource(DEFAULT_OWLIM_LICENSE_PATH).getPath());
		}
		System.setProperty("ruleset", "empty"); // for performance reasons
		System.setProperty("repository-type", "weighted-file-repository");
		System.setProperty("console-thread", "false");
		System.setProperty("storage-folder", location);
		System.setProperty("build-pcsot", "true");
		repo = new SailRepository((Sail) 
				Class.forName("com.ontotext.trree.OwlimSchemaRepository")
				.getConstructor().newInstance());
		repo.initialize();
		logger.info("OWLIM Repository initialized at " + location);

		return repo;
	}

	private static Repository initNativeStore(String location) throws Exception{
		Repository repo = null;
		
		repo = new SailRepository(new NativeStore(new File(location)));
		repo.initialize();
		
		return repo;
	}
	
	private static Properties loadProperties(File file){
		Properties prop = new Properties();

		InputStream is = null;
		try{
			is = new FileInputStream(file);
			prop.load(is);
		}catch(IOException e){
			prop = null;
			logger.warn("Specified configuration file could not be found. " + file.getAbsolutePath());
		} finally{
			try {
				if(is != null){
					is.close();
				}
			} catch (IOException e1) {
				logger.warn("Could not close input stream of config file.");
			}
		}
		return prop;
	}
	
	
	private static Properties loadProperties(String path){
		Properties prop = new Properties();
		InputStream is = loadResourceAsStream(path);
		if(is != null){
			try{
				prop.load(is);
			}catch(IOException e){
				prop = null;
				try {
					is.close();
				} catch (IOException e1) {
					logger.warn("Could not close input stream of config file.");
				}
				logger.warn("Specified configuration file could not be found. " + path);
			}
		}
		return prop;
	}

	private static void initLogger(){
		URL url = loadResource(LOGGER_CONFIG_PATH);
		PropertyConfigurator.configure(url);
	}


	private static URL loadResource(String path){
		return Starter.class.getClassLoader().getResource(path);
	}

	private static InputStream loadResourceAsStream(String path){
		return Starter.class.getClassLoader().getResourceAsStream(path);
	}

	public static void storeToRepo(Repository repo, List<Statement> statements) throws Exception{
		RepositoryConnection con = repo.getConnection();
		con.setAutoCommit(false);
		for(Statement s : statements){
			con.add(s);
		}
		con.commit();
		con.close();
	}

	public static void storeToFile(String location, List<Statement> statements) throws Exception{
		File output = new File(location);
		output.createNewFile();

		NTriplesWriter ntw = new NTriplesWriter(new BufferedWriter(new FileWriter(output)));
		ntw.startRDF();
		for(Statement s : statements){
			ntw.handleStatement(s);
		}

		ntw.endRDF();
	}
}
