package paper.pdf;

import paper.pdf.TextExtractor;

import java.io.IOException;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.io.File;
import java.io.PrintWriter;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.regex.*;
import java.util.Iterator;

import paper.token.TT;
import paper.token.Token;
import paper.token.DocumentHead;
import paper.token.BasicToken;

import paper.parse.*;

import paper.bayes.*;

class Ground {
    public boolean newTT = false;
    public TT t;
    public String text;
    
    public Ground(String text, TT t, boolean newTT) {
        this.text = text;
        this.t = t;
        this.newTT = newTT;
    }
}

public class GenerateGrammars extends TextExtractor {
    static private PrintWriter pw = null;
    static private PrintWriter index = null;
    static String htmlName = null, gtName = null;
    private boolean inTitle = true;
    private String titleBlock = "";
    Ground[] truth;
    
    void listNone() throws FileNotFoundException {
    }
    
    void listFirst() throws FileNotFoundException {
    }

    void listLast() {
    }

    void startFile(String pdfName, String pdfAbs) throws FileNotFoundException {
        inTitle = true;
        titleBlock = "";
        gtName = "groundTruths/" + pdfName.replace(".pdf",".gt");
        htmlName = "html/grammars/" + pdfName.replace(".pdf",".html");
        String line;
        StringBuilder all = new StringBuilder();
        BufferedReader fr = new BufferedReader(new FileReader(gtName));
        try {
            while ((line = fr.readLine()) != null) {
                all.append(line + "\n");
            }
        } catch( Exception e) {
            System.err.println("Shit input");
        }
        String[] gt = all.toString().split("\\|");
        truth = new Ground[gt.length-1];
        TT type = TT.RUBBISH;
        TT otype = type;
        for (int i = 1; i < gt.length; i++) {
//            System.out.println("" + i + ": " + gt[i].length() + ">" + gt[i] + "<");
            if (gt[i].charAt(0) == '#') {
                type = TT.TITLE;
                gt[i] = gt[i].substring(1);
            } else if (gt[i].charAt(0) == '@') {
                type = TT.AUTHOR;
                gt[i] = gt[i].substring(1);
            } else if (gt[i].charAt(0) == '%') {
                type = TT.RUBBISH;
                gt[i] = gt[i].substring(1);
            }
            truth[i-1] = new Ground(gt[i],type,otype == type);
            otype = type;
        }
    }

    void doneFile() {
    }

    void closeFile() {
    }

    void errorOut(String s) {
        System.err.println(s);
    }

    public String dec3(int x) {
        String s = "" + x;
        if (s.length() < 2) {
            return "  " + s;
        }
        return " " + s;
    }

    public void process()  {
        int i ;
        Pattern ref = Pattern.compile("(^ *abstract(\\.)? *$)|(^ *abstract[^:] .*)|(^(1.? +)? *introduction(\\.)? *$)");
        while ( (i = lines.indexOf("\n")) != -1) {
            String first = lines.substring(0,i);
            if (ref.matcher(first.toLowerCase()).matches()) {
                inTitle = false;
            }
            if (titleBlock.length() < 1600 && inTitle) {
                titleBlock += " \n" + first;
            }
            lines = lines.substring(i+1);
        }
    }

    public void extract() {
        try {
            pw = new PrintWriter(htmlName);
        } catch(Exception e) {
            System.out.println("Huh " + e);
        }
        DocumentHead dh = new DocumentHead(titleBlock);
        BasicToken.distill(dh);
        BasicToken.tokenise(dh);
        pw.println(dh.toString().replaceAll("\n","<br>") + "<hr>");

        HMMatcher m = new HMMatcher();
        MatchBuilder mb = new MatchBuilder();
        Iterator<Token> i = dh.iterator();
        for (int j = 0; j < truth.length; j++) {
            Token x = i.next();
            if (x.t != TT.UNKNOWN) {
                mb.add(x.t, truth[j].t);
            }
        }
        m.compile(mb);

        for (int j = 0; j < TT.nTT; j++) {
            i = dh.iterator();
            while (i.hasNext()) {
                Token x = i.next();
                System.out.print(dec3((int)(x.probs[j] * 99)));
            }
            System.out.println("");
        }


//        HMMatch sStart = m.addState();
//        HMMatch sLast = m.addState();
//        HMMatch sEnd = m.addState();
//        sStart.addOut(first.t, truth[0].t, sLast, false);
//        sLast.addOut(first.t, truth[0].t, sLast, false);
//        sLast.addOut(TT.EOF, TT.EOF, sEnd, false);

        int max = 0;
        do {
            m.reset();        
            Parse pt = m.parse(new TokenStream(dh));
            pw.println("" + mb + "<hr>" +pt.toString().replaceAll("\n","<br>") + "<hr>");
            pw.flush();
            Parse ptf = pt;
            i = dh.iterator();
/*
            for (int j = 0; j < truth.length && ptf != null; j++, ptf = ptf.next) {
                first = i.next();
                if (truth[j].t != ptf.matched) {
                     if (true || j > max) {
                        HMMatch s = new HMMatch();
                        sLast.replaceOut(TT.EOF, first.t, truth[j].t, s, false);
                        s.addOut(first.t, truth[j].t, s, false);
                        s.addOut(TT.EOF, TT.EOF, sEnd, false);
                        System.err.println("Found token " + first.text + " must be recognised as " + first.t);
                        sLast = s;
                        max++;
                        break;
                    } else {
                        pw.close();
                        System.err.println("Level " + max);
                        return;
                    }
                }
            }
*/
            
            max++;
            break;
        } while (true);
    }

    public static void main( String[] args ) throws Exception {
        new GenerateGrammars().runMain(args);
    }

}


