package extractors;

import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import parser.ArticleParser;
import primitive.Article;
import primitive.Pair;

public class BagOfWords implements Extractor {
	private List<Article> articles_list = new ArrayList<>();
	private List<Pair> labels_list = new ArrayList<>();
	private List<HashMap<String, List<Double>>> features_vectors = new ArrayList<>();
	List<String> list_words = new ArrayList<>();
	
	public void set_labels(List<Pair> labels){
		labels_list = labels;
	}
	
	public List<String> setKeyWords(String filename){		
		try {
			FileInputStream fstream = new FileInputStream(filename);
			DataInputStream in = new DataInputStream(fstream);
			BufferedReader br = new BufferedReader(new InputStreamReader(in));
			String line;
			
			while ((line = br.readLine()) != null) {				
				String[] read_terms = line.split(" ");
				
				for (int i = 0; i < read_terms.length; ++i) {
					list_words.add(read_terms[i]);
				}				
			}
			
			in.close();
			
		} catch (Exception e) {
			System.out.println("Error: " + e.getMessage());
		}
		
		return list_words;
	}
	
	public void featuresExtraction(List<Article> aritlces){		// Extract features and feature vectors for training data
		articles_list = aritlces;
		
		ArticleParser article_pareser = new ArticleParser();
		
		for(Article a : articles_list){
			Article cleared_article = article_pareser.remove_all(a);
			
			List<Double> vec_of_features = new ArrayList<>();
			
			for(String word_from_list_words : list_words){
				Double result = countTerm(word_from_list_words, cleared_article.getBody());
				
				vec_of_features.add(result);
			}
			
			HashMap<String, List<Double>> group = new HashMap<>();
			group.put(cleared_article.getValue(labels_list.get(0).first), vec_of_features);
			
			features_vectors.add(group);			
		}
	}
	
	public void featuresSave(String filename){
		try{
			PrintWriter save = new PrintWriter(filename);
				
			for(String s : list_words){
				save.print(s+";");
			}
			
			save.println();
			
			for(HashMap<String, List<Double>> l : features_vectors){
				for (Map.Entry<String, List<Double>> entry : l.entrySet()) {
					
					String result =entry.getKey()+"|";
					
					for(Double val :entry.getValue()){
						result += val.toString() + ";";
					}
					
					result = result.substring(0, result.length()-1);
					
					save.println(result);
				}
			}
			save.close();
		}
		catch (Exception e) {
			e.printStackTrace();
		}
	}
	
	public List<Double> extraction(Article article){			// Called by knn to extract vector from one article
		List<Double> output = new ArrayList<>();
		
		ArticleParser article_pareser = new ArticleParser();
		
		Article cleared_article = article_pareser.remove_all(article);
		
		for(String word_from_list_words : list_words){
			Double result = countTerm(word_from_list_words, cleared_article.getBody());
			
			output.add(result);
		}
		
		return output;
	}
	
	public void loadFeatures(String filename){
		try {
			FileInputStream fstream = new FileInputStream(filename);
			DataInputStream in = new DataInputStream(fstream);
			BufferedReader br = new BufferedReader(new InputStreamReader(in));
			String line;
			boolean header_read = false;
			
			while ((line = br.readLine()) != null) {
				// First read file header with terms =)
				if (header_read == false) {
					String[] read_terms = line.split(";");
					
					for (int i = 0; i < read_terms.length; ++i) {
						list_words.add(read_terms[i]);
					}
					
					header_read = true;
				} else {
					String key = line.substring(0, line.indexOf("|"));
					String values = line.substring(line.indexOf("|") + 1, line.length() - 1);
					
					String value[] = values.split(";");
					List<Double> val = new ArrayList<>();
					for (int i = 0; i < value.length; ++i) {
						val.add(Double.parseDouble(value[i]));
					}
					HashMap<String, List<Double>> p = new HashMap<>();
					p.put(key, val);
					features_vectors.add(p);
				}
			}
			
			in.close();
			
		} catch (Exception e) {
			System.out.println("Error: " + e.getMessage());
		}
	}
	
	public List<HashMap<String, List<Double>>> getTrainingSet(){		
		return features_vectors;
	}
	
	// ------------------------
	
	private Double countTerm(String word, String article){
		double count = 0.0;
		String [] splited_body = article.split(" ");
		
		for(String word_from_article : splited_body){
			if(word.compareTo(word_from_article) == 0){
				count += 1.0;
			}
		}
		
		return count;
	}
}