/*
 * Copyright 2011 Subhabrata Ghosh <subho.ghosh at langene.net>.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */



package net.langene.nlp.corpus.anc;

//~--- non-JDK imports --------------------------------------------------------

import edu.smu.tspell.wordnet.WordNetDatabase;

import net.langene.nlp.Context;
import net.langene.nlp.model.AbstractPOS;
import net.langene.nlp.model.Sentence;
import net.langene.nlp.model.Token;

import org.apache.commons.configuration.ConfigurationException;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;

//~--- JDK imports ------------------------------------------------------------

import java.io.File;
import java.io.FileInputStream;
import java.io.RandomAccessFile;

import java.util.LinkedList;
import java.util.List;

import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;

//~--- classes ----------------------------------------------------------------

/**
 * This class handles the loading and parsing of OpenANC
 * corpus files, including the annotation XMLs.
 *
 * @author Subhabrata Ghosh <subho.ghosh at langene.net>
 */
public class OpenANCHandler {
    private static Logger		_logger = LoggerFactory.getLogger(OpenANCHandler.class);
    private static WordNetDatabase	_wndb   = null;
    private static final Object		_lock   = new Object();

    //~--- fields -------------------------------------------------------------

    private String		datadir   = null;
    private List<Sentence>	sentences = null;
    private String		filename;

    //~--- methods ------------------------------------------------------------

    /**
     * Setup the Handler.
     * WordNetDatabase handle should be create only once.
     *
     * @throws Exception
     */
    public void setup() throws Exception {
        datadir = Context.get().getConfig().getString("net.langene.training.corpus.directory");

        synchronized (_lock) {
            if (_wndb == null) {
                String	prop =
                    Context.get().getConfig().getString("net.langene.core.wordnet.directory");

                if ((prop == null) || prop.isEmpty()) {
                    throw new ConfigurationException(
                        "Invalid Configuration : Missing parameter [net.langene.core.wordnet.directory]");
                }

                System.setProperty("wordnet.database.dir", prop);
                _wndb = WordNetDatabase.getFileInstance();
            }
        }
    }

    /**
     * Load the Sentences and Tokens for the specified file.
     *
     * @param filename Content filename (without extension)
     * @param reload Reload file, if true will reload even if the file has been already processed.
     *
     * @return true, loaded or false, load not required.
     * @throws Exception
     */
    public boolean load(String filename, boolean reload) throws Exception {
        if (!reload && (this.filename != null) && this.filename.equals(filename)) {
            return false;
        }

        this.filename = filename;

        if (sentences != null) {
            sentences.clear();
        } else {
            sentences = new LinkedList<Sentence>();
        }

        File	fi = new File(datadir + "/" + filename + ".txt");

        if (!fi.exists()) {
            throw new Exception("Load Error : File [" + fi.getAbsolutePath() + "] not found.");
        }

        _logger.debug("Loading content from file [" + fi.getAbsolutePath() + "]...");
        parseSentences(fi.getAbsolutePath());

        return true;
    }

    private void parseSentences(String contentfi) throws Exception {
        String	sentfile = datadir + "/" + filename + "-s.xml";

        // _logger.debug("Loading setences from [" + sentfile + "]");
        FileInputStream		fis          = new FileInputStream(sentfile);
        DocumentBuilderFactory	dbf          = DocumentBuilderFactory.newInstance();
        DocumentBuilder		db           = dbf.newDocumentBuilder();
        Document		doc          = db.parse(fis);
        Element			docroot      = doc.getDocumentElement();
        XPathFactory		xPathfactory = XPathFactory.newInstance();
        XPath			xpath        = xPathfactory.newXPath();
        XPathExpression		expr         = xpath.compile("./struct");
        NodeList		nl           = (NodeList) expr.evaluate(docroot, XPathConstants.NODESET);

        if ((nl != null) && (nl.getLength() > 0)) {
            RandomAccessFile	cofis = new RandomAccessFile(contentfi, "r");

            for (int ii = 0; ii < nl.getLength(); ii++) {
                Element	ne   = (Element) nl.item(ii);
                int	spos = Integer.parseInt(ne.getAttribute("from"));
                int	epos = Integer.parseInt(ne.getAttribute("to"));
                byte[]	data = new byte[epos - spos + 1];

                cofis.seek(spos);
                cofis.read(data);

                String	sentstr = new String(data, "UTF-8");

                sentstr = sentstr.replaceAll("\n", " ");

                Sentence	sentence = new Sentence(sentstr);

                sentence.setStartOffset(spos);
                sentence.setEndOffset(epos);
                sentences.add(sentence);
            }

            cofis.close();
        }

        fis.close();
        parseTokens();
    }

    private void parseTokens() throws Exception {
        String	hepplefile = datadir + "/" + filename + "-hepple.xml";

        _logger.debug("Loading setences from [" + hepplefile + "]");

        File	fi = new File(hepplefile);

        if (!fi.exists()) {
            return;
        }

        FileInputStream		fis          = new FileInputStream(hepplefile);
        DocumentBuilderFactory	dbf          = DocumentBuilderFactory.newInstance();
        DocumentBuilder		db           = dbf.newDocumentBuilder();
        Document		doc          = db.parse(fis);
        Element			docroot      = doc.getDocumentElement();
        XPathFactory		xPathfactory = XPathFactory.newInstance();
        XPath			xpath        = xPathfactory.newXPath();
        XPathExpression		expr         = xpath.compile("./struct");
        NodeList		nl           = (NodeList) expr.evaluate(docroot, XPathConstants.NODESET);

        if ((nl != null) && (nl.getLength() > 0)) {
            int		index   = 0;
            Sentence	current = sentences.get(index);

            _logger.debug("SENTENCE [" + current.getSentence() + "]");

            for (int ii = 0; ii < nl.getLength(); ii++) {
                Element	ne = (Element) nl.item(ii);

                if (ne.getAttribute("type").compareToIgnoreCase("tok") == 0) {
                    int	spos = Integer.parseInt(ne.getAttribute("from"));
                    int	epos = Integer.parseInt(ne.getAttribute("to"));

                    if (spos > current.getEndOffset()) {
                        index++;
                        current = sentences.get(index);
                        _logger.debug("SENTENCE [" + current.getSentence() + "]");
                    }

                    int	sspos = spos - current.getStartOffset();
                    int	sepos = epos - current.getStartOffset();

                    if (sspos >= current.getSentence().length()) {
                        _logger.warn("Invalid Token Markers : [" + current.getSentence() + "]["
                                     + current.getSentence().length() + "][, Word Start [" + sspos
                                     + "]");

                        continue;
                    }

                    if (sepos >= current.getSentence().length()) {
                        _logger.warn("Invalid Token Markers : [" + current.getSentence() + "]["
                                     + current.getSentence().length() + "], Word End[" + sepos
                                     + "]");
                        sepos = current.getSentence().length() - 1;
                    }

                    String	word = current.getSentence().substring(sspos, sepos);

                    expr = xpath.compile("./feat");

                    NodeList	nn = (NodeList) expr.evaluate(ne, XPathConstants.NODESET);

                    if ((nn != null) && (nn.getLength() > 0)) {
                        String	pos  = null;
                        String	base = null;

                        for (int jj = 0; jj < nn.getLength(); jj++) {
                            Element	nne = (Element) nn.item(jj);

                            if (nne.getAttribute("name").compareToIgnoreCase("msd") == 0) {
                                pos = nne.getAttribute("value");
                            } else if (nne.getAttribute("name").compareToIgnoreCase("base") == 0) {
                                base = nne.getAttribute("value");
                            }
                        }

                        if ((pos != null) &&!pos.isEmpty()) {
                            Token	tk = new Token(word);

                            tk.setPos(AbstractPOS.parse(word, pos));
                            current.addToken(tk);

                            /*
                             * Synset[]    synsets = _wndb.getSynsets(word);
                             *
                             * tk.setSynsets(synsets);
                             */

                            // _logger.debug("Token : " + tk.toString());
                        }
                    }
                }
            }
        }
    }

    //~--- get methods --------------------------------------------------------

    /**
     * Get the loaded sentences.
     *
     * @return
     */
    public List<Sentence> getSentences() {
        return sentences;
    }
}


//~ Formatted by Jindent --- http://www.jindent.com
