package at.ac.tuwien.ir.controller;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;

import cern.colt.matrix.impl.SparseDoubleMatrix2D;

import at.ac.tuwien.ir.model.Document;
import at.ac.tuwien.ir.model.DocumentWord;
import at.ac.tuwien.ir.model.FrequencyRange;
import at.ac.tuwien.ir.model.IndexSize;
import at.ac.tuwien.ir.model.TokenWord;

public class DocumentController implements IDocumentController
{
	private IFileController m_fileController;
	private ITokenizer m_tokenizer;
	
	private FrequencyRange[] m_frequencies;
	private HashMap<String, TokenWord> m_tokenWords;
	private List<Document> m_documents;
	private IndexSize m_indexSize;
	private Double m_documentLenghtAverage;
	
	public DocumentController(IFileController _fileController, ITokenizer _tokenizer, IndexSize _indexSize)
	{
		m_fileController = _fileController;
		m_tokenizer = _tokenizer;
		
		m_tokenWords = new HashMap<String, TokenWord>();
		m_documents = new ArrayList<Document>();
		
		m_indexSize = _indexSize;
		
		m_frequencies = new FrequencyRange[] { new FrequencyRange(2, 30), new FrequencyRange(1, 50), new FrequencyRange(0, 500) };
	}
	
	
	@Override
	public void generateDocumentDescription(String documentPath)
	{
		List<String> directories = m_fileController.getFolderNames(documentPath);
		List<String> documents;
		HashMap<String, DocumentWord> documentWords = null;
		int documentNumber = 0;
		
		for(String dir : directories) {
			documents = m_fileController.getDocumentNames(documentPath.concat(File.separator).concat(dir));
			
			for(String file : documents) {
				documentNumber++;
				documentWords = GetDocumentWords(documentPath, dir, file, documentNumber);
				if(documentWords != null) 
					this.mergeWithMasterList(documentWords);
			}
			System.out.println("Directory: " + dir);
			System.gc();
		}
		
		System.out.println("Reading done!");
	}
	
	@Override
	public HashMap<String, DocumentWord> GetDocumentWords(String documentPath, String dir, String file, int documentNumber)
	{
		HashMap<String, DocumentWord> documentWords = new HashMap<String, DocumentWord>();
		try {
			StringBuilder fileText = m_fileController.getFileContents(documentPath.concat(File.separator).concat(dir).concat(File.separator).concat(file));
			List<String> tokens = m_tokenizer.tokenizeText(fileText);
			int _documentLenght = fileText.toString().length();
			Document document = new Document(file, dir, documentNumber, _documentLenght);
			m_documents.add(document);
			CalculateDocumentLenghtAverage(_documentLenght);
			for(String t : tokens) {
				if(documentWords.containsKey(t)) {
					documentWords.get(t).increaseTermFrequency();
				} else {
					DocumentWord documentWord = new DocumentWord(t, document);
					documentWords.put(t, documentWord);
				}
			}
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		}
		return documentWords;
	}
	private void mergeWithMasterList(HashMap<String, DocumentWord> documentWords)
	{
		DocumentWord word;
		for(String t : documentWords.keySet()) {
			word = documentWords.get(t);
			if(word.get_termFrequency() < m_frequencies[m_indexSize.ordinal()].get_minFrequency() && 
					word.get_termFrequency() > m_frequencies[m_indexSize.ordinal()].get_minFrequency()) continue;
			if(m_tokenWords.containsKey(t)) {
				//if(word.get_termFrequency() > 10) System.out.println(word.get_word());
				m_tokenWords.get(t).addDocument(word.getDocument(), word.get_termFrequency());
			} else {
				m_tokenWords.put(t, new TokenWord(t, word.getDocument(), word.get_termFrequency()));
			}
		}
	}
	
	@Override
	public void generateIndexFile(String filename)
	{
		try{
			int i = 0;
			int j = 0;
			double weight;
			FileWriter fileWriter = new FileWriter(filename);
			PrintWriter writer = new PrintWriter(fileWriter);
			
			writer.println("@relation newsgroups");
			writer.println();
			
			writer.println("@attribute DocumentClass string");
			writer.println("@attribute DocumentName string");
			
			LinkedList<String> tokens = new LinkedList<String>();
			tokens.addAll(m_tokenWords.keySet());
			Collections.sort(tokens);
			
			SparseDoubleMatrix2D matrix = new SparseDoubleMatrix2D(m_documents.size(), m_tokenWords.size());
			
			for(String token : tokens) {
				writer.println("@attribute '" + token + "' numeric");
				for(Document doc : m_tokenWords.get(token).getPostingsList()) {
					matrix.setQuick(doc.get_documentNumber(), i, m_tokenWords.get(token).calculateTermWeight(j++, m_documents.size()));
				}
				j=0;
				i++;
			}
			
			writer.println();
			writer.println("@data");
			
			for(i = 0; i < m_documents.size(); i++) {
				writer.print("{0 " + m_documents.get(i).get_documentClass() + ", 1 " + m_documents.get(i).get_documentName());
				for(j = 0; j < matrix.columns(); j++) {
					if( (weight = matrix.getQuick(i, j)) != 0.0d ) {
						writer.print(", " + (j + 2) + " " + weight);
					}
				}
				writer.println("}");
				//System.out.println("Written document: " + m_documents.get(i).get_documentClass() + ":" + m_documents.get(i).get_documentName());
			}
			
			writer.close();
			System.out.println("Index file written");
		}catch(IOException ioe) {
			ioe.printStackTrace();
		}
	}
	
	
	
	public void generateReversedIndexFile(String filename)
	{
		try {
			int i;
			int documentIndex;
			FileWriter fileWriter = new FileWriter(filename);
			PrintWriter writer = new PrintWriter(fileWriter);
			
			writer.println("@relation newsgroups");
			writer.println();
			//writer.println("@attribute DocumentClass string");
			//writer.println("@attribute DocumentName string");
			
			writer.println("@attribute Token string");
				
			/*
			for(String word : m_documentWords.keySet()) {
				writer.println("@attribute '" + word.replace("\\", "").replace("\'", "\\\'") + "' numeric");
			}
			*/
			
			for(Document document : m_documents) {
				writer.println("@attribute " + document.get_documentClass() + ":" + document.get_documentName() + " numeric");
			}
			
			writer.println();
			writer.println("@data");
			
			System.out.println("Sorting Keys...");
			ArrayList<String> keys = new ArrayList<String>();
			keys.addAll(m_tokenWords.keySet());
			Collections.sort(keys);
			for(String word : keys) {
				writer.print("{0 " + word);
				i = 0;
				for(Document doc : m_tokenWords.get(word).getPostingsList()) {
					documentIndex = doc.get_documentNumber() + 1;
					writer.print(", " + documentIndex + " " + m_tokenWords.get(word).calculateTermWeight(i++, m_documents.size()));
				}
				writer.println("}");
				System.out.println("Writing: " + word);
			}
			
			/*
			for(Document doc : m_documents)
			{
				writer.print("{0 " + doc.get_documentClass() + ", 1 " + doc.get_documentName());
				i = 2;
				for(String word : m_documentWords.keySet()) {
					if(doc.containsWord(m_documentWords.get(word))) {
						writer.print(", " + i + " " + m_documentWords.get(word).get_documentWeight(m_documents.size()));
					}
					i++;
				}
				writer.println("}");
			}
			*/
			writer.close();
		
		}catch(IOException  ioe) {
			ioe.printStackTrace();
		}
	}
	
	@Override
	public HashMap<String, TokenWord> GetTokenWords()
	{
		return m_tokenWords;
	}
	
	@Override
	public List<Document> GetDocuments()
	{
		return m_documents;
	}

	private void CalculateDocumentLenghtAverage(int _newDocumentLenght)
	{
		if (m_documents.size() < 2)
			m_documentLenghtAverage = (double) _newDocumentLenght;
		else
		{
			m_documentLenghtAverage = ((m_documentLenghtAverage * m_documents.size()) + (double) _newDocumentLenght) / m_documents.size(); 
		}
	}

	@Override
	public double GetDocumentLenghtAverage()
	{
		return m_documentLenghtAverage;
	}
}
