
package org.apache.solr.update;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.PrintStreamInfoStream;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.schema.IndexSchema;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * An IndexWriter that is configured via Solr config mechanisms.
 * @since solr 0.9
 */

public class SolrIndexWriter extends IndexWriter {

    private static Logger log = LoggerFactory.getLogger(SolrIndexWriter.class);
    // These should *only* be used for debugging or monitoring purposes
    public static final AtomicLong numOpens = new AtomicLong();
    public static final AtomicLong numCloses = new AtomicLong();
    /**
     * Stored into each Lucene commit to record the System.currentTimeMillis()
     * when commit was called.
     */
    public static final String COMMIT_TIME_MSEC_KEY = "commitTimeMSec";
    String name;
    private DirectoryFactory directoryFactory;

    public static SolrIndexWriter create(String name, String path, DirectoryFactory directoryFactory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, Codec codec, boolean forceNewDirectory) throws IOException {

        SolrIndexWriter w = null;
        final Directory d = directoryFactory.get(path, config.lockType, forceNewDirectory);
        try {
            w = new SolrIndexWriter(name, path, d, create, schema, config, delPolicy, codec, forceNewDirectory);
            w.setDirectoryFactory(directoryFactory);

            return w;
        }
        finally {
            if (null == w && null != d) {
                directoryFactory.doneWithDirectory(d);
                directoryFactory.release(d);
            }
        }
    }

    private SolrIndexWriter(String name, String path, Directory directory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, Codec codec, boolean forceNewDirectory) throws IOException {
        super(directory, config.toIndexWriterConfig(schema).setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND).
                setIndexDeletionPolicy(delPolicy).setCodec(codec).setInfoStream(toInfoStream(config)));

        log.debug("Opened Writer " + name);
        this.name = name;

        numOpens.incrementAndGet();
    }

    private void setDirectoryFactory(DirectoryFactory factory) {
        this.directoryFactory = factory;
    }

    private static InfoStream toInfoStream(SolrIndexConfig config) throws IOException {

        String infoStreamFile = config.infoStreamFile;
        if (infoStreamFile != null) {
            File f = new File(infoStreamFile);
            File parent = f.getParentFile();
            if (parent != null) {
                parent.mkdirs();
            }

            FileOutputStream fos = new FileOutputStream(f, true);
            return new PrintStreamInfoStream(new PrintStream(fos, true, "UTF-8"));
        }
        else {
            return InfoStream.NO_OUTPUT;
        }
    }
    /**
     * use DocumentBuilder now... private final void addField(Document doc,
     * String name, String val) { SchemaField ftype = schema.getField(name);
     * <p/>
     * // we don't check for a null val ourselves because a solr.FieldType //
     * might actually want to map it to something. If createField() // returns
     * null, then we don't store the field.
     * <p/>
     * Field field = ftype.createField(val, boost); if (field != null)
     * doc.add(field); }
     * <p/>
     * <
     * p/>
     * public void addRecord(String[] fieldNames, String[] fieldValues) throws
     * IOException { Document doc = new Document(); for (int i=0;
     * i<fieldNames.length; i++) { String name = fieldNames[i]; String val =
     * fieldNames[i]; <p/>
     * // first null is end of list. client can reuse arrays if they want //
     * and just write a single null if there is unused space. if (name==null)
     * break;
     * <p/>
     * addField(doc,name,val); } addDocument(doc); } ****
     */
    private volatile boolean isClosed = false;

    @Override
    public void close() throws IOException {

        log.debug("Closing Writer " + name);
        Directory directory = getDirectory();
        final InfoStream infoStream = isClosed ? null : getConfig().getInfoStream();
        try {
            super.close();
        }
        finally {
            if (infoStream != null) {
                infoStream.close();
            }

            isClosed = true;

            directoryFactory.release(directory);

            numCloses.incrementAndGet();
        }
    }

    @Override
    public void rollback() throws IOException {

        try {
            super.rollback();
        }
        finally {
            isClosed = true;
            directoryFactory.release(getDirectory());
            numCloses.incrementAndGet();
        }
    }

    @Override
    protected void finalize() throws Throwable {

        try {
            if (!isClosed) {
                assert false : "SolrIndexWriter was not closed prior to finalize()";
                log.error("SolrIndexWriter was not closed prior to finalize(), indicates a bug -- POSSIBLE RESOURCE LEAK!!!");
                close();
            }
        }
        finally {
            super.finalize();
        }

    }
}
