/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package com.bio.tests;

import com.bio.jpa.entities.Fasta;
import com.bio.jpa.entities.FastaSequence;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.Serializable;
import java.io.StreamTokenizer;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.SessionScoped;

/**
 *
 * @author Diego
 */
@SessionScoped
@ManagedBean(name = "fastaParser")
public class FastaParser implements Serializable {

    private static StreamTokenizer tok = null;

    public static void main(String[] args) {
        FastaParser fp = new FastaParser();
        fp.parse("C:\\P0CH25.fasta");

    }

    public List<Fasta> parse(Object file) {
        if (file instanceof String) {
            try {
                tok = new StreamTokenizer(new FileReader(file.toString()));
            } catch (FileNotFoundException ex) {
                Logger.getLogger(FastaParser.class.getName()).log(Level.SEVERE, null, ex);
            }
        } else if (file instanceof java.io.File) {
            try {
                tok = new StreamTokenizer(new FileReader(((java.io.File) file)));
            } catch (FileNotFoundException ex) {
                Logger.getLogger(FastaParser.class.getName()).log(Level.SEVERE, null, ex);
            }
        }

        setGrama(tok);

        int type = 0;
        String current = null;
        List<Fasta> fastas = new ArrayList();
        Fasta fasta = null;
        FastaSequence fs = new FastaSequence();
        StringBuilder sb = new StringBuilder();
        StringReader sr = null;
        int buffer = 60 * 200;
        char[] buf = new char[buffer];
        int reads = 0;
        int pos = 1;
        try {
            while (type != StreamTokenizer.TT_EOF) {

                type = tok.nextToken();

                current = tok.sval;
                if (type == StreamTokenizer.TT_WORD) {
                    if (current.startsWith(">")) {
                        if (current.contains("ACCESSION")) {
                            current = current.replace("ACCESSION", "");
                        }
                        if (fasta != null) {
                            String content = sb.toString();
                            if (content.contains(">")) {
                                System.out.println(fasta.getDefline() + " Will not be processed, inconsistent characters found in sequence");
                            } else {
                                sr = new StringReader(content);
                                while ((reads = sr.read(buf)) != -1) {
                                    fasta.getFastaSequenceList().add(new FastaSequence(String.valueOf(buf, 0, reads), pos, fasta));
                                    pos = buffer + pos;
                                }

                                fastas.add(fasta);
                                sb.setLength(0);
                            }
                        }
                        fasta = new Fasta();
                        if (current.trim().length() == 1) {
                            fasta.setDefline(">SEQUENCE_" + (fastas.size() + 1));
                        } else {
                            fasta.setDefline(current);
                        }

                    } else {
                        if (current.startsWith(";")) {
                            System.out.println("Comments will be ignored: " + current);
                        } else {
                            sb.append(current.replaceAll("[\\s\\d]", ""));
                        }
                    }
                    // System.out.println(current);
                }
            }
            sr = new StringReader(sb.toString());
            while ((reads = sr.read(buf)) != -1) {
                fasta.getFastaSequenceList().add(new FastaSequence(String.valueOf(buf, 0, reads), pos, fasta));
                pos = buffer + pos;
            }
            fastas.add(fasta);

            System.out.println(fastas.get(0).getDefline());
            System.out.println(fastas.get(0).getFastaSequenceList().get(0).getSeq());
            System.out.println(fastas.size());
        } catch (IOException ex) {
            Logger.getLogger(FastaParser.class.getName()).log(Level.SEVERE, null, ex);
        }
        return fastas;
    }

    public void setGrama(StreamTokenizer tok) {
        // recreate standard syntax table
        tok.resetSyntax();
        tok.whitespaceChars('\u0000', '\u0020');

        tok.wordChars('a', 'z');
        tok.wordChars('A', 'Z');
        tok.wordChars('\u00A0', '\u00FF');
        // tok.commentChar('/');
        tok.quoteChar('\'');
        tok.quoteChar('"');

        tok.eolIsSignificant(true);
        tok.slashSlashComments(false);
        tok.slashStarComments(false);
//tok.parseNumbers();  // this WOULD be part of the standard syntax
// Exclude from separator
        tok.wordChars('0', '9');
        tok.wordChars('.', '.');
        tok.wordChars('|', '|');
        tok.wordChars('>', '>');
        tok.wordChars('_', '_');
        tok.wordChars(' ', ' ');
        tok.wordChars('-', '-');
        tok.wordChars('=', '=');
        tok.wordChars('\'', '\'');
        tok.wordChars('@', '@');
        tok.wordChars(',', ',');
        tok.wordChars('"', '"');
        tok.wordChars(':', ':');
        tok.wordChars('/', '/');
        tok.wordChars('(', ')');
        tok.wordChars(';', ';');

        tok.wordChars('+', '+');
        tok.wordChars('[', ']');
    }
}
