package org.OpenGeoPortal.LayerData;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Hashtable;
import java.util.Vector;

import javax.xml.parsers.*;

import org.apache.commons.lang.StringEscapeUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;



/**
 * code to convert FGDC data to Solr ingestable xml files
 * this code uses a DOM based API to parse the FGDC input data
 * 
 * currently, you have to change some static variables and recompile the code before running it
 * it is typically run from an IDE and converts a directory of Fgdc files 
 *   to a directory of ingestable Solr files
 * 
 * 
 * php code to parse harvard fgdc xml docs: (geometry type)

//assign geometry type

  if ($xml_file->spdoinfo->direct == 'Raster'){
        $geometry_type = 'raster';
  }
  else {
        $geotype = (string) $xml_file->spdoinfo->ptvctinf->sdtsterm->sdtstype;

 switch($geotype){
             case "G-polygon":
                     $geometry_type = 'polygon';
             break;
             case "Composite object":
             case "Entity point":
                     $geometry_type = 'point';
             break;
             case "String":
                     $geometry_type = 'line';
             break;
             default:
                     $geometry_type = 'undefined';
 * @author smcdon08
 *
 */
public class FgdcToSolr 
{
	

	static String fgdcDirectoryName; 
	static String solrDirectoryName; 
	static int baseLayerId; 
	static String institution; 
	static String wmsUrl; 
	
	
	public static enum Key{LayerId, Title, EsriName, Abstract, WestBc, EastBc, NorthBc, SouthBc, DataType, Publisher, 
							Originator, ThemeKeywords, PlaceKeywords, ContentDate, FgdcText};

							
	/**
	 * convert the passed Fgdc file to a Solr file at the passed destination
	 * The passed layerId is a Solr key
	 * @param inputFilename
	 * @param outputFilename
	 * @param layerId
	 */
	public static void convert(String inputFilename, String outputFilename, int layerId)
	{
		Hashtable<Key, Object> layerValues = readFile(inputFilename);
		layerValues.put(Key.LayerId, layerId);
		// add full text of document to hashtable
		File inputFile = new File(inputFilename);	
		layerValues.put(Key.FgdcText, getFgdcText(inputFile));
		saveSolrDocument(outputFilename, layerValues);
	}
	
	public static Hashtable<Key, Object> readFile(String inputFilename)
	{
		try
		{
			DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
			documentBuilderFactory.setValidating(false);  // dtd isn't available
			documentBuilderFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
			DocumentBuilder documentBuilder;
			documentBuilder = documentBuilderFactory.newDocumentBuilder();
			
			File inputFile = new File(inputFilename);	
			Document document = documentBuilder.parse(inputFile);
			Hashtable<Key, Object> layerValues = processDocument(document);
			return layerValues;
		}
		catch (ParserConfigurationException e) 	
	    {
	    	System.out.println("error with file: " + inputFilename);
	    	e.printStackTrace();
	    }
	    catch (IOException e)
	    {
	    	System.out.println("error with file: " + inputFilename);
	    	e.printStackTrace();
	    } 
	    catch (SAXException e) 
	    {
	    	System.out.println("error with file: " + inputFilename);
			e.printStackTrace();
		}
	    return null;
	}
	
	/**
	 * read the file into a string, escape it, and return it
	 * @param inputFile
	 * @param layerValues
	 */
	private static String getFgdcText(File inputFile)
	{
		try 
		{
			BufferedReader reader = new BufferedReader(new FileReader(inputFile));
			String fileContents = "";
			String currentLine = reader.readLine();
			while (currentLine != null)
			{
				fileContents += currentLine;
				currentLine = reader.readLine();
			}
			fileContents = fileContents.replaceAll("[^\\p{ASCII}]", "");

			String escapedFileContents = StringEscapeUtils.escapeHtml(fileContents);
			return escapedFileContents;
		} 
		catch (FileNotFoundException e) 
		{
			System.out.println("error in FgedToSolr.getFgdcText");
			e.printStackTrace();
		} 
		catch (IOException e) 
		{
			System.out.println("error in FgedToSolr.getFgdcText");
			e.printStackTrace();
		}
		return "";
	}
	
	private static String keyToFgdcTag(Key key)
	{
		if (key == Key.Title)
			return "title";
		else if (key == Key.Abstract)
			return "abstract";
		else if (key == Key.WestBc)
			return "westbc";
		else if (key == Key.EastBc)
			return "eastbc";
		else if (key == Key.NorthBc)
			return "northbc";
		else if (key == Key.SouthBc)
			return "southbc";
		else if (key == Key.EsriName)
			return "ftname";
		else if (key == Key.Publisher)
			return "publish";
		else if (key == Key.Originator)
			return "origin";
		else if (key == Key.ThemeKeywords)
			return "themekey";
		else if (key == Key.PlaceKeywords)
			return "placekey";
		else 
		{
			System.out.println("error in FgdcToSolr.keyToFgdcTag, unexpected key = " + key);
			return "error";
		}
		
	}
	
	/**
	 * create a data structure to hold information about this layer
	 * walk the DOM, pulling out the needed information
	 *   and saving it in a hashtable
	 * @param document
	 * @return
	 */
	private static Hashtable<Key, Object> processDocument(Document document)
	{
		Hashtable<Key, Object> returnValue = new Hashtable<Key, Object>();
		addToHashtable(document, returnValue, keyToFgdcTag(Key.Title), Key.Title);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.Abstract), Key.Abstract);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.WestBc), Key.WestBc);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.EastBc), Key.EastBc);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.NorthBc), Key.NorthBc);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.SouthBc), Key.SouthBc);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.EastBc), Key.EsriName);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.Publisher), Key.Publisher);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.Originator), Key.Originator);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.ThemeKeywords), Key.ThemeKeywords);
		addToHashtable(document, returnValue, keyToFgdcTag(Key.PlaceKeywords), Key.PlaceKeywords);
		
		//String publisherValue = getPublisherValue(document);
		//returnValue.put(Key.Publisher, publisherValue);
		String dateValue = getDateValue(document);
		returnValue.put(Key.ContentDate, dateValue);
		
		handleDataType(document, returnValue);
		
		return returnValue;
	}
	
	/**
	 * get the values associated with xml tag tagName and store them in the hash with the key key
	 * 
	 * @param document
	 * @param layerValues
	 * @param tagName
	 * @param key
	 */
	private static void addToHashtable(Document document, Hashtable<Key, Object> layerValues, String tagName, Key key)
	{
		NodeList nodes = document.getElementsByTagName(tagName);
		if (nodes.getLength() == 0)
		{
			//System.out.println("  did not find tag " + tagName);
			numberOfWarnings++;
			missingTags.add(tagName);
			return;
		}
		if ((key == Key.PlaceKeywords) || (key == Key.ThemeKeywords))
		{
			// keys with multiple values must be handled differently
			// should we use a multiple value field in the schema or simply concatenate together a string
			//  for simplicity, we concatenate strings
			String keyValue = getDocumentValues(document, tagName); 
			layerValues.put(key, keyValue.trim());
			if (keyValue.length() == 0)
			{
				//System.out.println("  did not find values for tag " + tagName);
				numberOfWarnings++;
				missingTags.add(tagName);
			}
		}
		else
		{
			// for most tags, we get the value from the first tag
			Node node = nodes.item(0);
			String value = getDocumentValue(document, tagName); 
			if (value != null)
				layerValues.put(key,value);
			else
			{
				//System.out.println("  did not find values for tag " + tagName);
				numberOfWarnings++;
				missingTags.add(tagName);
			}
		}
	}
	
	/**
	 * computing the data type for the layer involves looking at multiple fields in the document
	 * @param document
	 * @param layerValues
	 */
	private static void handleDataType(Document document, Hashtable<Key, Object> layerValues)
	{
		String direct = getDocumentValue(document, "direct");
		String sdtsType = getDocumentValue(document, "sdtstype");
		String srcCiteA = getDocumentValue(document, "srccitea");

		String solrType = "Raster";
		
		if (direct.equalsIgnoreCase("raster") == false)
		{
			// here if we don't have a raster, must check another tag
			if (sdtsType != null)
			{
				if (sdtsType.equals("G-polygon") || sdtsType.contains("olygon") || sdtsType.contains("chain"))
					solrType = "Polygon";
				else if (sdtsType.equals("Composite") || sdtsType.contains("omposite") || sdtsType.equals("Entity point"))
					solrType = "Point";
				else if (sdtsType.equals("String"))
					solrType = "Line";
				else
					solrType = "Undefined";
			}
		}
		if (srcCiteA != null)
		{
			if (srcCiteA.equalsIgnoreCase("Paper Map"))
				solrType = "Paper Map";
		}
		
		layerValues.put(Key.DataType, solrType);
	}
	
	/**
	 * concatenate the values for all occurrences for the passed tag
	 * @param document
	 * @param tagName
	 * @return
	 */
	private static String getDocumentValues(Document document, String tagName)
	{
		NodeList nodes = document.getElementsByTagName(tagName);
		if (nodes.getLength() == 0)
		{
			return null;
		}
		String tagValues = "";
		for (int i = 0 ; i < nodes.getLength() ; i++)
		{
			Node currentNode = nodes.item(i);
			NodeList tempNodes = currentNode.getChildNodes();
			if (tempNodes != null)
			{
				Node tempNode = tempNodes.item(0);
				if (tempNode != null)
				{
					String currentValue = tempNodes.item(0).getNodeValue();
					tagValues = tagValues + " " + currentValue;
				}
			}
		}
		return tagValues;

	}
	
	/**
	 * return the first value for the passed tag
	 * @param document
	 * @param tagName
	 * @return
	 */
	private static String getDocumentValue(Document document, String tagName)
	{
		NodeList nodes = document.getElementsByTagName(tagName);
		if (nodes.getLength() == 0)
		{
			return null;
		}
		Node node = nodes.item(0);
		String tagValue = node.getChildNodes().item(0).getNodeValue();
		if (tagValue != null)
			tagValue = tagValue.trim();
		return tagValue;
	}
	
	
	/**
	 * check two different tags for the publisher
	 * @param document
	 * @return
	 */
	/*
	private static String getPublisherValue(Document document)
	{
		String value = getDocumentValue(document, "origin");
		if (value == null)
			value = getDocumentValue(document, "publish");
		if (value == null)
		{
			System.out.println("  warning: did not find tags for origin or publish");
			value = "";
		}
		return value;
	}
	*/
	
	
	
	/**
	 * get the content date which could be in one of two different tags
	 * @param document
	 * @return
	 */
	private static String getDateValue(Document document)
	{
		NodeList nodes = document.getElementsByTagName("caldate");
		if (nodes.getLength() == 0)
			nodes = document.getElementsByTagName("begdate");
		if (nodes.getLength() == 0)
			return "";
		Node node = nodes.item(0);
		String tagValue = node.getChildNodes().item(0).getNodeValue();
		if (tagValue != null)
			tagValue = tagValue.trim();
		return tagValue;
	}
	
	/**
	 * generate a file based on the data in hashtable that can be ingested by Solr
	 * this function simply pounds together data from the hashtable with some literal tags
	 * @param solrFile
	 * @param layerInfo
	 */
	private static void saveSolrDocument(String outputFilename, Hashtable<Key, Object> layerInfo)
	{
		PrintWriter solrFile = null;
		try 
		{
			solrFile = new PrintWriter(new FileWriter(outputFilename));
		} catch (IOException e) 
		{
			System.out.println("error in FgdcToSolr.saveSolrDocument");
			e.printStackTrace();
			return;
		}

		solrFile.println("<add allowDups=\"false\">");
		solrFile.println();
		solrFile.println("<doc>");
		solrFile.println("  <field name=\"LayerId\">" + layerInfo.get(Key.LayerId) + "</field>");
		solrFile.println("  <field name=\"Name\">" + getValue(layerInfo, Key.EsriName) + "</field>");
		solrFile.println("  <field name=\"CollectionId\">initial collection</field>");
		solrFile.println("  <field name=\"Institution\">" + institution + "</field>");
		solrFile.println("  <field name=\"InstitutionSort\">" + institution + "</field>");
		solrFile.println("  <field name=\"Access\">Public</field>");
		String solrType = getValue(layerInfo, Key.DataType); 
		
	
		solrFile.println("  <field name=\"DataType\">" + solrType + "</field>");
		solrFile.println("  <field name=\"DataTypeSort\">" + solrType + "</field>");
		solrFile.println("  <field name=\"Availability\">Online</field>");
		solrFile.println("  <field name=\"LayerDisplayName\">" + getValue(layerInfo, Key.Title) + "</field>");
		solrFile.println("  <field name=\"LayerDisplayNameSort\">" + getValue(layerInfo, Key.Title) + "</field>");
		solrFile.println("  <field name=\"Publisher\">" + getValue(layerInfo, Key.Publisher) + "</field>");
		solrFile.println("  <field name=\"PublisherSort\">" + getValue(layerInfo, Key.Publisher) + "</field>");
		solrFile.println("  <field name=\"Originator\">" + getValue(layerInfo, Key.Originator) + "</field>");
		solrFile.println("  <field name=\"OriginatorSort\">" + getValue(layerInfo, Key.Originator) + "</field>");
		
		solrFile.println("  <field name=\"ThemeKeywords\">" + getValue(layerInfo, Key.ThemeKeywords) + "</field>");
		solrFile.println("  <field name=\"PlaceKeywords\">" + getValue(layerInfo, Key.PlaceKeywords) + "</field>");		
		
		solrFile.println("  <field name=\"Abstract\">" + getValue(layerInfo, Key.Abstract) + "</field>");
		
		double northBc = getDoubleValue(layerInfo, Key.NorthBc);
		double southBc = getDoubleValue(layerInfo, Key.SouthBc);
		double eastBc = getDoubleValue(layerInfo, Key.EastBc);
		double westBc = getDoubleValue(layerInfo, Key.WestBc);
		
		double centerX = (eastBc + westBc) / 2.; 
		double centerY = (northBc + southBc) / 2.;
		double halfWidth = Math.abs(eastBc - westBc) / 2.;
		double halfHeight = Math.abs(northBc - southBc) / 2.;
		double area = (halfHeight * 2.) * (halfWidth * 2.);
		
		solrFile.println("  <field name=\"Location\">" + wmsUrl + "</field>");
		solrFile.println("  <field name=\"MaxY\">" + northBc + "</field>");
		solrFile.println("  <field name=\"MinY\">" + southBc + "</field>");
		solrFile.println("  <field name=\"MinX\">" + westBc + "</field>");
		solrFile.println("  <field name=\"MaxX\">" + eastBc + "</field>");
		solrFile.println("  <field name=\"CenterX\">" + centerX + "</field>");
		solrFile.println("  <field name=\"CenterY\">" + centerY + "</field>");
		solrFile.println("  <field name=\"HalfWidth\">" + halfWidth + "</field>");
		solrFile.println("  <field name=\"HalfHeight\">" + halfHeight + "</field>");
		solrFile.println("  <field name=\"Area\">" + area + "</field>");
		solrFile.println("  <field name=\"ContentDate\">" + processDateString(getValue(layerInfo, Key.ContentDate)) + "</field>");
		solrFile.println("  <field name=\"FgdcText\">" + layerInfo.get(Key.FgdcText) + "</field>");
		
		solrFile.println("</doc>");
		solrFile.println();
		solrFile.println("</add>");
		solrFile.flush();
		solrFile.close();
	}
	
	
	/**
	 * I've see lat/lon values of REQUIRED: Eastern-most coordinate of the limit of coverage expressed in longitude.
	 *   they are not yet properly handled
	 * @param layerInfo
	 * @param key
	 * @return
	 */
	private static double getDoubleValue(Hashtable layerInfo, Key key)
	{
		String temp = getValue(layerInfo, key);
		try
		{
			return Double.parseDouble(temp);
		}
		catch (NumberFormatException e)
		{
			return 0;
		}
	}
	
	/**
	 * take care of special characters which would otherwise cause a problem in the Solr formatted file
	 * they are simply eliminated, should they instead by quoted?
	 * @param layerInfo
	 * @param key
	 * @return
	 */
	private static String getValue(Hashtable layerInfo, Key key)
	{
		String temp = (String)layerInfo.get(key);
		if (temp == null) return "";
		temp = temp.replace('&', ' ');
		return temp;
	}
	
	
	
	/**
	 * return the year portion of the date
	 * the FGDC date field is a free format string
	 * this function does a little to clean it up and creates a UTC date
	 *   solr requires the date to be something like 1995-12-31T23:59:59Z
	 * as more data becomes available, it will have to do more error checking
	 * @param date
	 * @return
	 */
	private static String processDateString(String passedDate)
	{
		String returnYear = "0001";
		if (passedDate == null)
			return "";
		if (passedDate.length() >= 6)
		{
			String temp = passedDate.substring(2,6);
			if (isYear(temp))
				returnYear = temp;
		}
		if (passedDate.length() >= 5)
		{
			String temp = passedDate.substring(1, 5);
			if (isYear(temp))
				returnYear = temp;
		}
		if (passedDate.length() >= 4)
		{
			String temp = passedDate.substring(0, 4);
			if (isYear(passedDate))
				returnYear = temp;
		}
		
		String returnValue = returnYear + "-01-01T01:01:01Z";
		return returnValue;
	}
	
	/**
	 * can the passed string be parsed as an int
	 * @param year$
	 * @return
	 */
	private static boolean isYear(String year$)
	{
		try
		{
			int year = Integer.parseInt(year$);
			return true;
		}
		catch (NumberFormatException e)
		{
			return false;
		}
	}
	
	/**
	 * set pathname global variables
	 * they make it easier to 
	 * @param passedInstitution
	 */
	private static void setGlobals(String passedInstitution)
	{
		institution = passedInstitution;
		if (institution == "Harvard")
		{
			solrDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/HarvardSolrData/";
			fgdcDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/HarvardMetadata/";
			wmsUrl = "http://arrowsmith.mit.edu:8080/geoserver/gwc/service/wms";
			baseLayerId = 2000;
		}
		else if (institution == "MIT")
		{
			fgdcDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/MitMetadata";
			solrDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/MitSolrData/";
			wmsUrl = "http://arrowsmith.mit.edu:8080/geoserver/gwc/service/wms";
			baseLayerId = 10000;
		}
		else if (institution == "Princeton")
		{
			fgdcDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/PrincetonMetadata";
			solrDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/PrincetonSolrData/";
			wmsUrl = "http://arrowsmith.mit.edu:8080/geoserver/gwc/service/wms";
			baseLayerId = 15000;
		}
		else if (institution == "Tufts")
		{
			fgdcDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/TuftsMetadata";
			solrDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/TuftsSolrData/";
			wmsUrl = "http://arrowsmith.mit.edu:8080/geoserver/gwc/service/wms";
			baseLayerId = 25000;
		}
		else if (institution == "Test")
		{
			fgdcDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/TestMetadata";
			solrDirectoryName = "/Users/smcdon08/tmp/OpenGeoPortal/TestSolrData/";
			wmsUrl = "http://arrowsmith.mit.edu:8080/geoserver/gwc/service/wms";
			baseLayerId = 100000;
		} 
	}
	
	private static int numberOfWarnings = 0;
	private static Vector<String> missingTags = new Vector<String>();
	
	/**
	 * convert all the FGDC files in the input directory to Solr ingestable files in the Solr directory
	 * @param args
	 */
	public static void main(String[] args)
	{
		
		//institution = "Harvard";
		//institution = "MIT";
		//institution = "Princeton";
		//institution = "Tufts";
		//institution = "Test";
		
		String passedInstitution = "Tufts";
		if (args.length > 0)
			passedInstitution = args[0];
		setGlobals(passedInstitution);
		
		File fgdcDirectory = new File(fgdcDirectoryName);
		File[] fgdcFiles = fgdcDirectory.listFiles();
		if (fgdcFiles.length == 0)
		{
			System.out.println("error: invalid fgdc directory " + fgdcDirectoryName);
			return;
		}
		
		// loop over all the fgdc files, converting each to our Solr schema
		System.out.println("number files to process =  " + fgdcFiles.length);
		for (int i = 0 ; i < fgdcFiles.length ; i++)
		{
			File currentFgdcFile = fgdcFiles[i];
			String filename = currentFgdcFile.getName();
			if (filename.endsWith(".xml"))
			{
				// here with a real fgdc file
				String solrFilename = solrDirectoryName + filename;
				missingTags = new Vector<String>();
				System.out.println("converting " + currentFgdcFile.toString() + " to " + solrFilename);
				convert(currentFgdcFile.toString(), solrFilename, baseLayerId + i);
				if (missingTags.size() > 0)
				{
					System.out.print("    missing:");
					for (int j = 0 ; j < missingTags.size() ; j++)
						System.out.print(" " + missingTags.get(j));
					System.out.println();
				}
			}
		}
		System.out.println("files processed =  " + fgdcFiles.length + ", number of warnings = " + numberOfWarnings);
		
		//convert("/Users/smcdon08/tmp/OpenGeoPortal/HarvardMetadata/AFRICOVER_KE_WOODY_AGG.xml",
		//		"/Users/smcdon08/tmp/OpenGeoPortal/tmp/solrTest.xml", 1001);

	}
	
}
