package libnetdb.dm.assoc.dhp;
import java.util.*;

import libnetdb.dm.assoc.base.*;
import libnetdb.dm.assoc.util.*;



/**
 * <p>This miner is used to find so the called large itemsets in the
 * process of association rule mining. Before running this miner,
 * one should prepare the input of transactions and the output of
 * those found large itemsets, i.e., one should create a class which
 * impelements the <code>ARInput</code> interface and another class
 * which implements the <code>AROutput</code> interface, then use
 * the instance of the two classes as the parameter of the
 * constructors. In addition, please note that this miner itself
 * is a <code>ControllableTask</code> which means one can add a
 * <code>TaskListener</code> or a <code>ProgressListener</code> to
 * an instance of the miner to monitor the status or progress of the
 * task.</p>
 *
 * <p>For example, assume one has implemented a new class named
 * <code>CSVARInput</code> and another class <code>MemAROutput</code>,
 * where an instance of <code>CSVARInput</code> reads transactions
 * from a csv file and an instance of <code>MemAROutput</code> keeps
 * those found large itemsets in memory. Then one should run this miner
 * in the following way: <br><pre>
 *     ARInput input = new CSVARInput("c:\\transaction.csv");
 *     AROuptut output = new MemAROutput();
 *     DHPMiner miner = new DHPMiner(input, output, 0.05);
 *     miner.start(); // run in another thread
 *     // or miner.run(); // run in the same thread
 * </pre>When the miner stop, the output of those large itemset should
 * be keep in the instance of <code>MemAROutput</code>.
 *
 * @author Cheng-Ru Lin
 * @version 1.0
 *
 * @see lib.mining.ar.ARInput
 * @see lib.mining.ar.AROutput
 */
public class DHPMiner extends ControllableTask{


    static private ResourceBundle bundle =
            ResourceBundle.getBundle(DHPMiner.class.getName());

    private int support_lmt, tranx_num, hash_size[], hash_level;
    private double support;
    private ARInput in_db;
    private AROutput out_db;
    private ScannerBuilder builder;
    private LinkedList scanned_list = new LinkedList();
    private boolean selective_scan;
    private HashPruner pruner;

    /**
     * <p>Constructs a new allocated DHPMiner. The new created miner
     * will read input form in_db and write the output, i.e., the
     * large itemset to out_db. The support is set as the input
     * parameter with the same name.</p>
     *
     * @param in_db where this miner will read the transaction data
     * @param out_db where this miner will output the large itemset
     * @param support the support of large items
     * @throws LicenseException if this minier is unlicensed or the
     *  license is corrupted.
     */
    public DHPMiner(ARInput in_db, AROutput out_db, double support){
        this.in_db = in_db;
        this.out_db  = out_db;
        this.support = support;
        this.setScannerBuilder(new HashScannerBuilder(4, 643));
        this.setHashSize(new int[]{512249, 523261});
        this.setHashLevel(2);
        this.setSelectiveScan(true);
        
        // 'if' block added by Silver 2003/12/26
        if (!checkSupport()) {
        	System.out.println("Support = " + support + " out of range [0,1]");
       		System.exit(1);
        }
    }


	/** Added by Silver 2003/12/26
	 * Check if 0 <= support <= 1
	 *
	 * @param hash_size the sizes of those hash tables
	 */
	public boolean checkSupport() {
		return 0 <= support && support <= 1;
	}

    /**
     * Sets the hash sizes of used hash tables. This miner uses one
     * or more  hash tables to speed up the process of mining of
     * large itemsets.
     *
     * @param hash_size the sizes of those hash tables
     */
    public void setHashSize(int hash_size[]){
        this.hash_size = new int[hash_size.length];
        System.arraycopy(
                hash_size, 0, this.hash_size, 0, hash_size.length);
    }

    /**
     * Indicates if this miner will use the selective scan
     * enhancement.
     *
     * @param enabled true to enable the selective scan; or flase
     * to disable this feature
     */
    public void setSelectiveScan(boolean enabled){
        this.selective_scan = enabled;
    }

    /**
     * Sets the level of the hash prunning techniques be applied to.
     * One can use this parameter to decide how many levels should
     * the hash-prune techniques be applied. This parameter is
     * suggest to be set as two or three.
     *
     * @param hash_level to which k large itemset should the hash
     *   prunning technique should be applied
     */
    public void setHashLevel(int hash_level){
        this.hash_level = hash_level;
    }

    /**
     * Sets the scanner builder for this mininer. The scanner
     * builder will be used to build a scanner to update the
     * count of candidates large itemset.
     *
     * @param builder the scanner builder used to generate scanner
     */
    public void setScannerBuilder(ScannerBuilder builder){
        this.builder = builder;
    }

    private Litemset[] pruneLitemset(Litemset[] litemset){
        if(litemset==null) return null;
        ArrayList list = new ArrayList();
        for(int i=0;i<litemset.length;i++)
            if(litemset[i].count >= support_lmt) list.add(litemset[i]);
        return (Litemset[]) list.toArray(new Litemset[0]);
    }

    private Litemset[] firstScan() throws Exception{
        controllee.setStage(bundle.getString("FirstScan"));
        Map map = new Hashtable();
        pruner.reset(2);
        in_db.beforeFirst();
        for(tranx_num = 0; in_db.hasNext(); tranx_num++){
            processCommands();
            Itemset tranx = in_db.next();
            pruner.add(tranx.ids);
            for(int j=0;j<tranx.ids.length;j++){
                Integer id = new Integer(tranx.ids[j]);
                Litemset litemset = (Litemset)map.get(id);
                if(litemset == null){
                    map.put(id, litemset = new Litemset(
                            new int[]{id.intValue()}, 1));
                }else litemset.count++;
            }
        }
        
        postMessage(bundle.getString("TransactionNumber").
                replaceFirst("<tranx_numb>", String.valueOf(tranx_num)));
        out_db.setTransactionNumber(tranx_num);

        support_lmt = (int)Math.ceil(tranx_num * support);
        postMessage(bundle.getString("SupportLimit").replaceFirst(
                "<support_limit>", String.valueOf(support_lmt)));

        pruner.setSupportLimit(support_lmt);
        if(out_db!=null)
            out_db.writeLitemset(new Litemset(new int[0], tranx_num));
        Litemset litemset[] = (Litemset[]) map.values().toArray(new Litemset[0]);
        Arrays.sort(litemset);
        return litemset;
    }

    private Scanner buildScanner() throws Exception{
        CandidateGenerator cangen = new CandidateGenerator();
        Litemset data[] = null;
        for(Iterator it = scanned_list.iterator(); it.hasNext();){
            data = pruneLitemset((Litemset[]) it.next());
            if(data.length==0) return null;
            if(out_db!=null){
                setStage(bundle.getString("OutputLitemsets").
                         replaceFirst("<k>", String.valueOf(data[0].ids.length)).
                         replaceFirst("<size>", String.valueOf(data.length)));
                for(int i=0;i<data.length;++i)
                    out_db.writeLitemset(data[i]);
            }
        }
        scanned_list.clear();
        int last_num;
        do{
            last_num = data.length;
            data = cangen.generate(data, pruner);
            if(data.length > 0){
                setStage(bundle.getString("BuildCandidate").
                         replaceFirst("<k>", String.valueOf(data[0].ids.length)).
                         replaceFirst("<size>", String.valueOf(data.length)));
                for(int i=0;i<data.length;++i)
                    builder.addCandidate(data[i]);
            }
            scanned_list.add(data);
        }while(data.length < last_num && data.length > 0 && selective_scan);
        if(data.length > 0)
            pruner.reset(data[data.length-1].ids.length + 1);
        return builder.getScanner();
    }

    protected void runImpl() throws Exception{
        try{
            in_db.open();
            out_db.open();
            out_db.setSupport(this.support);
            postMessage(bundle.getString("SetSupport").
                    replaceFirst("<support>", 
                    String.valueOf(support)));
            this.pruner = new HashPruner(hash_size, hash_level);
            Litemset litemset[] = firstScan();
            if (litemset == null) return;
            scanned_list.add(litemset);
            Scanner scanner = buildScanner();
            for (int s = 2; scanner != null; s++) {
                setStage(bundle.getString("ScanDatabase").
                        replaceFirst("<s>", String.valueOf(s)));
                in_db.beforeFirst();
                for (int i = 0; in_db.hasNext(); i++) {
                    controllee.processCommands();
                    controllee.setProgress((float) i / tranx_num);
                    Itemset itemset = (Itemset) in_db.next();
                    pruner.add(itemset.ids);
                    scanner.scan(itemset);
                }
                scanner = buildScanner();
            }
        }finally{
            in_db.close();
            out_db.close();
        }
        setStage(bundle.getString("MiningComplete"));
    }
    
    
    
}