/*
 * Created on Feb 17, 2004
 *
 * Author: Sandeep Prakash
 * Modified by Boon Siew, Tzyy Ming, Klarinda
 * Project: Sucxentv2 (using leaf nodes only)
 */
package dbsucxentW.storageW.parserW;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import org.xml.sax.SAXException;

import dbsucxentW.storageW.entitiesW.PathCollection;
import dbsucxentW.storageW.entitiesW.LeafNode;
import dbsucxentW.storageW.loadersW.Store;
import dbsucxentW.constantW.Constant;

/**
 * main purpose of this class it to find the deweyOrderSum and SiblingSum for each node (2nd round of parsing)
 * the algorithm used is similar to that described in the technical report 
 *		'Efficient Support of Ordered XPath Processsing in Relational Databases' with minor changes
 */
public class BranchDataSummarizer extends NodeExtractor {

    private PathCollection _paths;
    private Store _store;
    private DocumentSummary summary;	//This class stores the result of the first pass.
    private AncestorDeweyGroupCalculator ancestorDeweyGroupCalculator;
    private SiblingSumCalculator siblingSumCalculator;
    private String dataSourceName;
    protected BufferedWriter[] _writers; // bulk writers

    public BranchDataSummarizer(PathCollection paths, DocumentSummary summary, Store store, String dataSourceName) {
        this.dataSourceName = dataSourceName;
        _paths = paths;
        this.summary = summary;
        _store = store;
        this.ancestorDeweyGroupCalculator = new AncestorDeweyGroupCalculator(this.summary);
        this.siblingSumCalculator = new SiblingSumCalculator(this.summary, this.getElementNames());
        createBulkWriters();		//bulk loader
    }

    /* Only leaf() needs to be overridden.
     * Summarization code is overridden.
     */
    @Override
    protected void leaf() {
        _order++; //Not done in super class. 
        //So, need to do it here.

        LeafNode node = new LeafNode();

        long _siblingSum = this.siblingSumCalculator.calculate(_branchOrder, _depth);
        long ancestorDeweyGroup = this.ancestorDeweyGroupCalculator.calculate(_branchOrder, true);

        //Set leaf node properties.
        node.setDocId(_store.getDocId());
        node.setOrder(_order);
        node.setBranchOrder(_branchOrder);
        node.setPathId(_paths.getPathId(_path.toString()));
        node.setValue(_leafValue.toString().trim());
        node.setAncestorDeweyGroup(ancestorDeweyGroup);
        node.setSiblingSum(_siblingSum);

        /* -- Uncomment this if you want to seperate large
        if(_leafValue.toString().trim().length() >= Constant.LARGETEXTDATA) //k20070525 add textcontent & attrcontent
        node.setIsLargeTextData(true);
         */

        //bulk loader - write each node as a row in to the file
        try {
             
            _writers[Constant.PATHVALUE_TABLE].append(node.bulkLoad());
            _writers[Constant.PATHVALUE_TABLE].newLine();
            
            if (node.getIsLargeTextData()) { //k20070525 add textcontent & attrcontent
                _writers[Constant.TEXTCONTENT_TABLE].append(node.bulkLoadTextContent());
                _writers[Constant.TEXTCONTENT_TABLE].newLine();
            }
        } catch (IOException e) {
            System.out.println("ERR at bulk load leaf node");
            System.out.println(e.getMessage());
        }
    }

    //klarinda 20060831 - to separate attribute to different table
    @Override
    protected void leafAttribute() {
        long ancestorDeweyGroup = this.ancestorDeweyGroupCalculator.calculate(_branchOrder, false);

        LeafNode node = new LeafNode();

        //Set Attribute node properties.
        node.setDocId(_store.getDocId());
        node.setOrder(_order + 1);	//add + 1 because attribute's leaf order == the parent's or the 1st child
        node.setAncestorDeweyGroup(ancestorDeweyGroup);
        node.setPathId(_paths.getPathId(_path.toString()));
        node.setValue(_leafValue.toString().trim());
        node.setIsAttribute(true);

        if (_leafValue.toString().trim().length() >= Constant.LARGETEXTDATA) //k20070525 add textcontent & attrcontent
        {
            node.setIsLargeTextData(true);
        }

        //bulk loader - write each node as a row in to the file
        try {
            _writers[Constant.ATTRIBUTE_TABLE].append(node.bulkLoadAttribute());
            _writers[Constant.ATTRIBUTE_TABLE].newLine();
            if (node.getIsLargeTextData()) { //k20070525 add textcontent & attrcontent
                _writers[Constant.ATTRCONTENT_TABLE].append(node.bulkLoadAttrContent());
                _writers[Constant.ATTRCONTENT_TABLE].newLine();
            }
        } catch (IOException e) {
            System.out.println("ERR at bulk load attribute node");
            System.out.println(e.getMessage());
        }
    }

    /**
     * Receive notification of the end of the document.
     * Used to flush and close bulk writers
     * @see org.xml.sax.ContentHandler#endDocument()
     */
    @Override
    public void endDocument() throws SAXException {
        //bulk loader
        try {
            //only create ATTRIBUTE & PATHVALUE
            _writers[Constant.ATTRIBUTE_TABLE].flush();
            _writers[Constant.ATTRIBUTE_TABLE].close();
            _writers[Constant.PATHVALUE_TABLE].flush();
            _writers[Constant.PATHVALUE_TABLE].close();
            
            _writers[Constant.TEXTCONTENT_TABLE].flush();
            _writers[Constant.TEXTCONTENT_TABLE].close();
            _writers[Constant.ATTRCONTENT_TABLE].flush();
            _writers[Constant.ATTRCONTENT_TABLE].close();
        } catch (IOException e) {
            System.out.println(e.getMessage());
        }
        super.endDocument();
    }

    //klarinda 20060904 bulk loading - create file to store Sucxent table for bulk load
    //hardcode the bulk load path because SQL needs the exact directory
    private void createBulkWriters() {
        String fname;
        _writers = new BufferedWriter[Constant.NO_OF_TABLES];
        //only create ATTRIBUTE & PATHVALUE files
        try {
            // add by erwin 2007-07-04
            //File dir1 = new File(".");
            //Constant.BULKFILEPATH = "D:\\bulk\\";
            // end
            
            Constant.BULKFILEPATH = controller.Controller.BULK_PATH;
            
            fname = Constant.BULKFILEPATH + getTableName(Constant.PATHVALUE_TABLE) + ".tbl";
            _writers[Constant.PATHVALUE_TABLE] = new BufferedWriter(new FileWriter(fname));

            fname = Constant.BULKFILEPATH + getTableName(Constant.ATTRIBUTE_TABLE) + ".tbl";
            _writers[Constant.ATTRIBUTE_TABLE] = new BufferedWriter(new FileWriter(fname));

            //klarinda 20070525 add TEXTCONTENT & ATTRCONTENT
            fname = Constant.BULKFILEPATH + getTableName(Constant.TEXTCONTENT_TABLE) + ".tbl";
            _writers[Constant.TEXTCONTENT_TABLE] = new BufferedWriter(new FileWriter(fname));

            fname = Constant.BULKFILEPATH + getTableName(Constant.ATTRCONTENT_TABLE) + ".tbl";
            _writers[Constant.ATTRCONTENT_TABLE] = new BufferedWriter(new FileWriter(fname));
        } catch (IOException e) {
            System.out.println(e.getMessage());
        }
    }

    private String getTableName(int tableId) {
        String name = Constant.TABLE_NAMES[tableId];
        return ((dataSourceName == null)?"":dataSourceName + "_") + name;
    }
}
