package predictionEngine;


import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;

import predictionEngine.errorMangement.ErrorManager;

/**
 *
 * @author RIK
 */

public class ArffReader {
    Dataset dataset = null;
    String fileName;
    public ArffReader(String fileName){
        this.fileName = fileName;
    }
    public void loadData(){
        dataset = new Dataset(100);
        readArffFile(fileName);
    }
    public List<double[]> getInstances(){
        return dataset.getInstances();
    }
    public List<Attribute> getAttributes(){
        return dataset.getAttributes();
    }
    
    private ArrayList<String> readArffFile(String fileName) {
        ArrayList<String> tmpData = new ArrayList<String>();
        try {
            BufferedReader b;
            if (fileName.contains(File.separator)) {
                b = new BufferedReader(new FileReader(fileName));
            } else {
                b = new BufferedReader(new FileReader("." + File.separatorChar + "datasets" + File.separator + fileName));
            }
            String line = "";
            String[] result = null;
            String[] tmp;
            while ((line = b.readLine()) != null) {
                line = line.trim();
                if (!line.isEmpty() && line.charAt(0) != '%') {
                    result = line.split("%");
                    tmpData.add(result[0]);
                } 
            }
            
            parseArffFile(tmpData);
        } catch (FileNotFoundException ex) {
            ErrorManager.getInstance().reportError(ex);
        } catch (IOException ex) {
            ErrorManager.getInstance().reportError(ex);
        }
        return tmpData;
    }
    
      private void parseArffFile(ArrayList<String> arffData) {
        int row = 0;
        for (String line : arffData) {

            if (line.charAt(0) == '@') {
                //Reading definition
                StringTokenizer stk = new StringTokenizer(line, " \t");
                String token = stk.nextToken();
                if (token.equalsIgnoreCase("@attribute")) {
                    token = stk.nextToken();
                    String columnName = "";
                    String type = "";
                    if (token.charAt(0) == '\'') {
                        columnName = parseQuoteToken(token, stk);
                        while (stk.hasMoreTokens()) {
                            type += " " + stk.nextToken();
                        }
                    } else {
                        columnName = token;
                        while (stk.hasMoreTokens()) {
                            type += " " + stk.nextToken();
                        }
                        type = type.trim();
                    }
                    dataset.addAttribute(new Attribute(columnName, parseArffColumnType(type)));
                } else if (token.equalsIgnoreCase("@relation")) {
                    String name = "";
                    while (stk.hasMoreTokens()) {
                        name += " " + stk.nextToken();
                    }
                    name = name.trim();
                    name = name.replace("'", "");
                    dataset.setName(name);
                }
            } else {
                //Reading data line
                double[] instance = new double[dataset.getNumberOfAttributes()];
                StringTokenizer stk = new StringTokenizer(line, ",;\t");
                String token = "";
                for (int c = 0; stk.hasMoreTokens(); c++) {
                    token = stk.nextToken();
                    if (token.charAt(0) == '\'') {
                        token = parseQuoteToken(token, stk);
                    }
                    instance[c] = dataset.getAttribute(c).getValue(token);
                }
                dataset.addInstance(instance);
            }
        }
    }

    private String parseQuoteToken(String startToken, StringTokenizer stk) {
        String token = startToken;
        while (!token.endsWith("'")) {
            token += " " + stk.nextToken();
        }
        return token.replace("'", "");
    }

    private ArrayList<String> parseArffColumnType(String type) {
        String values = type.replace("{", "").replace("}", "");
        StringTokenizer stk = new StringTokenizer(values, ",");

        ArrayList<String> allowedValues = new ArrayList<String>();
        String token = "";
        for (int i = 0; stk.hasMoreTokens(); i++) {
            token = stk.nextToken();
            token = token.trim();
            token = token.replace("'", "");
            allowedValues.add(token);
        }
        return allowedValues;
    }



}
