/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package cz.mzk.oai.metadataRepository.harvest;

import cz.mzk.oai.metadataRepository.tools.CannotReadDirectoryException;
import cz.mzk.oai.metadataRepository.metadata.DateStamp;
import cz.mzk.oai.metadataRepository.metadata.MetadataFormat;
import cz.mzk.oai.metadataRepository.repository.Identifier;
import cz.mzk.oai.metadataRepository.repository.MetadataBase;
import cz.mzk.oai.metadataRepository.repository.Repository;
import cz.mzk.oai.metadataRepository.tools.FileTools;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.dom4j.Attribute;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.DocumentFactory;
import org.dom4j.Element;
import org.dom4j.Namespace;
import org.dom4j.QName;
import org.dom4j.io.SAXReader;
import org.marc4j.MarcException;
import org.marc4j.MarcReader;
import org.marc4j.MarcStreamReader;
import org.marc4j.MarcWriter;
import org.marc4j.MarcXmlWriter;
import org.marc4j.marc.ControlField;
import org.marc4j.marc.DataField;
import org.marc4j.marc.Record;

/**
 *
 * @author Martin Rehanek, Martin.Rehanek@gmail.com
 */
public class AlephExportHarvester implements MarcHarvester {

    private Repository repository;
    private File dirWithExports;
    private MetadataBase base;

    public AlephExportHarvester(MetadataBase base, File dirWithExports, Repository repository, String logFileName) {
        this.base = base;
        this.repository = repository;
        this.dirWithExports = dirWithExports;
    }

    //rozdil proti harvestAndUpdate:
    //mam v repository zaznam X naposledy zmeneny 10.1. (predtim zmeneny 3.1.)
    //Dnes je 15.1.
    //harvestAndUpdate("1.1.","5.1."):
    //je nalezen zaznam X zmeneny 3.1. Aktualni zaznam je ale mladsiho data, tak se nic nemeni
    //harvestAndReplace("1.1.","5.1."):
    //je nalezen zaznam X zmeneny 3.1. I pres novejsi aktualni zaznam se zmeni. Ale co datum?
    //  1. zustane 3.1. Potom ale nebude zmena pres providera videt. Kdyby nekdo harvestoval
    //  jen zmeny od 14.1., tak se mu zmena X nezobrazi
    //  2. nastavi se 15.1. Pak ale bude problem, pokud by pozdeji probehl harvest
    //  harvestAndUpdate(9.1.-11.1.). Potom se realne novejsi zmena z 10.1. nepouzije
    //rozhodnuti: asi pouzit prvni variantu. Zmeny na venek by mely byt konzistentni
    //s datumama exportu. harvestAndReplace slouzi k obnoveni stavu v intervalu od-do
    //mozna i pouvazovat zmenu rozhrani. DirectoryHarvester se asi pouzivat nebude,
    //ale OAIHarvester by mel zustat funkcni
    public void harvestAndReplace(DateStamp from, DateStamp until, boolean thorough) {
        harvest(from, until, true, thorough);
    }

    public void harvestAndUpdate(DateStamp from, DateStamp until, boolean thorough) {
        harvest(from, until, false, thorough);
    }

    /**
     * @param base 
     * @param from
     * @param until
     * @param forceReplace if harvested record with older dataStamp then record already present in repository,
     * present record will be replaced anyway
     * @param thorough
     */
    private void harvest(DateStamp from, DateStamp until, boolean forceReplace, boolean thorough) {
        if (from == null) {
            from = DateStamp.MIN;
        }
        if (until == null) {
            until = DateStamp.MAX;
        }
        //from = new DateStamp("2010","06","30");
        try {
            AlephExports exports = new AlephExports(dirWithExports, base);
            List<AlephExport> relevantExports = exports.getSortedExports(from, until);
            for (AlephExport export : relevantExports) {
                System.out.println("processing export " + export.getFile().getAbsolutePath());
                processExport(export, forceReplace, thorough);
            }
        } catch (CannotReadDirectoryException ex) {
            Logger.getLogger(AlephExportHarvester.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    private void processExport(AlephExport export, boolean forceReplace, boolean thorough) {
        InputStream in = null;
        try {
            ExecutorService exec = Executors.newSingleThreadExecutor();
            in = new FileInputStream(export.getFile());
            MarcReader reader = new MarcStreamReader(in);
            int counter = 0;
            int addedCounter = 0;
            while (reader.hasNext()) {
                if (++counter % 1000 == 0) {
                    System.out.println("counter: " + counter);
                    System.out.println("added: " + addedCounter);
                    System.gc();
                }
                try {
                    Record record = reader.next();
                    removeEmptyDataFields(record);
                    //if (counter > 0) {//TESTovaci, odstranit
                    Document document = recordToDocument(record, exec);
                    if (document != null) {
                        DateStamp datestamp = null;
                        if (forceReplace) { //co? zvazit, jak to je
                            datestamp = new DateStamp();
                        } else {
                            datestamp = export.getDateStamp();
                        }
                        //System.out.println(document.asXML());
                        Identifier id = controlfieldsToId(record.getControlFields(), base);
                        //System.out.println("id: " + id);
                        Document transformedDoc = transformDocument(document);
                        //System.out.println("doc: " + doc.asXML());
                        //Tools.writeToFile(transformedDoc, "/tmp/alephExport.xml");
                        // boolean isMyId = "oai:aleph.mzk.cz:MZK03-001028635".equals(id);
                        //boolean added = repository.addToCollection(transformedDoc, id, datestamp, thorough, isMyId);
                        //System.out.println("id: " + id);
                        boolean added = repository.addToCollection(base, transformedDoc, id.toString(), datestamp, thorough, true);
                        if (added) {
                            addedCounter++;
                        }
                    }
                    //}
                } catch (MarcException ex) {
                    System.err.println("MarcException");
                    //TODO: poresit poradne
                }
            }
            System.out.println("counter: " + counter);
            System.out.println("added: " + addedCounter);
            exec.shutdown();
            in.close();
        } catch (FileNotFoundException ex) {
            Logger.getLogger(AlephExportHarvester.class.getName()).log(Level.SEVERE, null, ex);
        } catch (IOException ex) {
            Logger.getLogger(AlephExportHarvester.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    private Identifier controlfieldsToId(List<ControlField> controlFields, MetadataBase base) {
        for (int i = 0; i < controlFields.size(); i++) {
            if ("001".equals(controlFields.get(i).getTag())) {
                String sysno = controlFields.get(i).getData();
                return new Identifier(base, sysno);
            }
        }
        return null;
    }

    public void close() {
        throw new UnsupportedOperationException("Not supported yet.");
    }

    /**
     * @testMethod
     */
    private static List<Record> readRecords(InputStream in) {
        List<Record> records = new ArrayList<Record>();
        MarcReader reader = new MarcStreamReader(in);
        int counter = 0;
        while (reader.hasNext()) {
            counter++;
            if (counter % 100 == 0) {
                System.out.println("readRecords.counter: " + counter);
            }
            try {
                Record record = reader.next();
                records.add(record);
                //TMP
                removeEmptyDataFields(record);
                System.out.println(record);
                break;
                //TMP/

            } catch (MarcException e) {
                System.out.println("COUNTER: " + counter);
                e.printStackTrace();
                return records;
            }
        }
        return records;
    }

    /**
     * Transforms org.marc4j.marc.Record to org.dom4j.Document;
     * @param record
     * @param exec this is an executor with pool of single thread. 
     * We need another thread that would write into the pipe. 
     * The executor is necessary because it contains pool of single thread and we don't want to create new thread for each invocation of this method.
     * The method is expected to be invoked in a cycle.
     * @return new Document or null in case of error in processing the record
     */
    private static Document recordToDocument(final Record record, Executor exec) {
        PipedInputStream pipeIn = null;
        Document document = null;
        try {
            pipeIn = new PipedInputStream();
            final PipedOutputStream pipeOut = new PipedOutputStream(pipeIn);
            exec.execute(
                    new Runnable() {

                        public void run() {
                            MarcWriter writer = new MarcXmlWriter(pipeOut, true);
                            writer.write(record);
                            writer.close();
                        }
                    });
            SAXReader reader = new SAXReader();
            document = reader.read(pipeIn);
            return document;
        } catch (DocumentException ex) {
            //System.err.println("HEREEEE1");
            //TODO: poresit poradne
            //Logger.getLogger(AlephExportHarvester.class.getName()).log(Level.SEVERE, null, ex);
            System.err.println("DocumentException");
        } catch (IOException ex) {
            //System.err.println("HEREEEE2");
            Logger.getLogger(AlephExportHarvester.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            try {
                pipeIn.close();
            } catch (IOException ex) {
                //System.err.println("HEREEEE3");
                Logger.getLogger(AlephExportHarvester.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
        return document;
    }

    /**
     * @testMethod
     * @param inFile file in MARC21 in ISO 2709
     * @param outFilePrefix if not null every found marc record will be saved in marcxml into the file with outfilePrefix concatenated with sequence nuber (starting from zero)
     * @param printXMLs if true all the records in macxml will be printed to the standard output
     * @throws DocumentException
     */
    public static void readMarcExample(String inFile, String outFilePrefix, boolean printXMLs) throws DocumentException {
        InputStream in = null;
        try {
            in = new FileInputStream(inFile);
            List<Record> records = readRecords(in);//tohle nepujde hodit to cele do pameti
            System.out.println("number of records: " + records.size());
            ExecutorService exec = Executors.newSingleThreadExecutor();
            int counter = 0;
            for (Record record : records) {
                counter++;
                //System.out.println("readMarcExample.counter:" + counter);
                //System.out.println("record: " + record.toString());
                Document document = transformDocument(recordToDocument(record, exec));
                if (outFilePrefix != null) {
                    FileTools.writeToFile(document, outFilePrefix.concat(Integer.toString(counter)).concat(".xml"));
                }
                if (printXMLs) {
                    System.out.println("document: " + document.asXML());
                }
                //break;
            }
            exec.shutdown();
        } catch (FileNotFoundException ex) {
            Logger.getLogger(AlephExportHarvester.class.getName()).log(Level.SEVERE, null, ex);
        } finally {
            try {
                in.close();
            } catch (IOException ex) {
                Logger.getLogger(AlephExportHarvester.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    }

    /**
     * According to the http://www.loc.gov/MARC21/slim http://www.loc.gov/standards/marcxml/schema/MARC21slim.xsd
     * every data field must have at least one subfield.
     * This method removes from record all the datafield with no subfiels so that valid xml in marcxml was created.
     * @param record
     */
    private static void removeEmptyDataFields(Record record) {
        List resultList = record.getDataFields();
        //System.out.println("dataFields:");
        List<DataField> emptyDataFields = new ArrayList<DataField>();
        for (Object obj : resultList) {
            DataField dataField = (DataField) obj;
            //System.out.println(dataField.getTag());
            List subFields = dataField.getSubfields();
            if (subFields.size() == 0) {
                emptyDataFields.add(dataField);
                //record.removeVariableField(dataField);
            }

        }
        for (DataField dataField : emptyDataFields) {
            record.removeVariableField(dataField);
        }
    }

    /**
     * Transforms document from <collection><record> ... </record></collection> to
     * <marc21:record xmlns:marc21="http://www.loc.gov/MARC21/slim" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.loc.gov/MARC21/slim http://www.loc.gov/standards/marcxml/schema/MARC21slim.xsd"> ... </marc21:record>
     * @param document
     * @return
     */
    private static Document transformDocument(Document document) {
        Namespace marc = new Namespace(MetadataFormat.marc21.name(), MetadataFormat.marc21.getNamespace());
        Namespace xsi = new Namespace("xsi", "http://www.w3.org/2001/XMLSchema-instance");
        DocumentFactory factory = DocumentFactory.getInstance();
        Element rootElement = factory.createElement(new QName("record", marc));
        rootElement.addNamespace(xsi.getPrefix(), xsi.getText());
        rootElement.addNamespace(marc.getPrefix(), marc.getText());
        Attribute schemaLocation = factory.createAttribute(rootElement, new QName("schemaLocation", xsi), "http://www.loc.gov/MARC21/slim http://www.loc.gov/standards/marcxml/schema/MARC21slim.xsd");
        rootElement.add(schemaLocation);
        Document doc = factory.createDocument(rootElement);
        Element recordEl = (Element) document.getRootElement().elements().get(0);
        copyElement(rootElement, recordEl, marc, factory);
        return doc;
    }

    /**
     * Copies all attributes, child elements and text nodes from elementFrom to elementTo (both are in different documents).
     * The method also adds namespace prefix. 
     * @param elementTo
     * @param elementFrom
     * @param namespace
     * @param factory
     */
    private static void copyElement(Element elementTo, Element elementFrom, Namespace namespace, DocumentFactory factory) {
        //attributes
        Iterator<Attribute> attributes = elementFrom.attributeIterator();
        while (attributes.hasNext()) {
            elementTo.add((Attribute) attributes.next().clone());
        }

        //elements
        Iterator<Element> elements = elementFrom.elementIterator();
        while (elements.hasNext()) {
            Element el = elements.next();
            //System.out.println("adding " + el.getName() + ", " + el.attributeValue("tag"));
            QName qualifiedName = new QName(el.getName(), namespace);
            Element newEl = factory.createElement(qualifiedName);
            elementTo.add(newEl);
            copyElement(newEl, el, namespace, factory);
        }

        //text nodes
        String text = elementFrom.getText();
        //System.out.println("text: " + text);
        if (text != null && text.length() != 0) {
            elementTo.addText(elementFrom.getText());
        }
    }
}
