/*
 * Duplicate Detector Copyright (C) 2010 Marco Biscaro <marcobiscaro2112@gmail.com>
 * 
 * This file is part of Duplicate Detector.
 *
 * Duplicate Detector is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * Duplicate Detector is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with Duplicate Detector.  If not, see <http://www.gnu.org/licenses/>.
 */
package com.googlecode.duplicatedetector.model;

import static com.googlecode.duplicatedetector.i18n.Messages._;
import static javax.swing.SwingWorker.StateValue.DONE;
import static javax.swing.SwingWorker.StateValue.PENDING;
import static javax.swing.SwingWorker.StateValue.STARTED;

import java.io.File;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;

import javax.swing.SwingWorker;

import com.googlecode.duplicatedetector.Constants;
import com.googlecode.duplicatedetector.i18n.Keys;
import com.googlecode.duplicatedetector.util.ByteWrapper;
import com.googlecode.duplicatedetector.util.FileUtils;
import com.googlecode.duplicatedetector.util.Logger;

/**
 * Core class that find and list all duplicated files.
 * 
 * @author Marco Biscaro
 */
public class DuplicateFinder extends SwingWorker<Map<String, Set<File>>, Void>
		implements Constants {

	private static final Logger LOGGER = new Logger(DuplicateFinder.class);
	private Map<Long, Set<File>> filesSize;
	private Set<Set<File>> possibleDuplicates;
	private Map<String, Set<File>> duplicateFiles;
	private Filter filter;
	private File[] files;
	private int totalFiles;
	private int possibleDuplicateCount;
	private int remaingDuplicateCount;
	private int filesWithSameSizeCount;
	private int duplicatesCount;
	private long elapsedTime;
	private long wastedSpace;

	public DuplicateFinder(Filter filter, File... files) {
		this.filter = filter;
		this.files = files;
	}

	public int getTotalFiles() {
		return totalFiles;
	}

	public int getFilesWithSameSizeCount() {
		return filesWithSameSizeCount;
	}

	public int getPossibleDuplicateFiles() {
		return possibleDuplicateCount;
	}

	public int getDuplicates() {
		return duplicatesCount;
	}

	public long getElapsedTime() {
		return elapsedTime;
	}

	public long getWastedSpace() {
		return wastedSpace;
	}

	@Override
	protected Map<String, Set<File>> doInBackground() throws Exception {
		long start = System.currentTimeMillis();
		filesSize = new HashMap<Long, Set<File>>();
		duplicateFiles = new HashMap<String, Set<File>>();
		possibleDuplicates = new HashSet<Set<File>>();
		firePropertyChange(PROPERTY_SCAN, PENDING, STARTED);
		LOGGER.info("Starting scan");
		scan(files);
		removeTrash(filesSize);
		firePropertyChange(PROPERTY_SCAN, STARTED, DONE);
		setProgress(5);
		firePropertyChange(PROPERTY_FILTER, PENDING, STARTED);
		countFilesWithSameSize();
		filterByContent();
		countPossibleDuplicates();
		firePropertyChange(PROPERTY_FILTER, STARTED, DONE);
		setProgress(50);
		firePropertyChange(PROPERTY_HASH, PENDING, STARTED);
		registerSmallFiles();
		findDuplicates();
		countDuplicates();
		countSpaceWasted();
		elapsedTime = System.currentTimeMillis() - start;
		setProgress(100);
		firePropertyChange(PROPERTY_HASH, STARTED, DONE);
		return duplicateFiles;
	}

	private void scan(File[] filesToScan) {
		for (File f : filesToScan) {
			scan(f);
			if (isCancelled()) {
				LOGGER.info("Aborting");
				throw new RuntimeException(new InterruptedException());
			}
		}
	}

	private void scan(File f) {
		if (FileUtils.isSymlink(f)) {
			LOGGER.debug("Ignoring symbolic link " + f);
			return;
		}
		if (f.isDirectory()) {
			scan(f.listFiles());
		} else {
			if (!filter.accept(f)) {
				LOGGER.debug("Not including file " + f);
				return;
			}
			put(filesSize, f.length(), f);
			totalFiles++;
			LOGGER.debug("Found file " + f);
		}
	}

	private void countFilesWithSameSize() {
		for (Set<File> fileSet : filesSize.values()) {
			filesWithSameSizeCount += fileSet.size();
		}
		LOGGER.debug("Files with same size: " + filesWithSameSizeCount);
	}

	private void filterByContent() {
		int count = 0;
		for (Set<File> fileSet : filesSize.values()) {
			Map<ByteWrapper, Set<File>> tempMap = new HashMap<ByteWrapper, Set<File>>();
			for (File f : fileSet) {
				LOGGER.debug("Reading bytes of file " + f);
				put(tempMap, FileUtils.readFirstBytes(f), f);
				count++;
				setProgress((count * 45 / filesWithSameSizeCount + 5));
				if (isCancelled()) {
					LOGGER.info("Aborting");
					throw new RuntimeException(new InterruptedException());
				}
			}
			removeTrash(tempMap);
			possibleDuplicates.addAll(tempMap.values());
		}
	}

	private void registerSmallFiles() {
		int count = 0;
		Iterator<Set<File>> i = possibleDuplicates.iterator();
		while (i.hasNext()) {
			Set<File> fileSet = i.next();
			for (File f : fileSet) {
				if (f.length() < FileUtils.BUFFER_SIZE) {
					remaingDuplicateCount -= fileSet.size();
					duplicateFiles.put(_(Keys.NOT_CALCULATED) + count++,
							fileSet);
					i.remove();
				}
				break;
			}
		}
	}

	private void countPossibleDuplicates() {
		for (Set<File> fileSet : possibleDuplicates) {
			possibleDuplicateCount += fileSet.size();
		}
		remaingDuplicateCount = possibleDuplicateCount;
		LOGGER.debug("Files that start with same bytes: "
				+ possibleDuplicateCount);
	}

	private void findDuplicates() {
		int count = 0;
		for (Set<File> fileSet : possibleDuplicates) {
			Map<String, Set<File>> tempMap = new HashMap<String, Set<File>>();
			for (File f : fileSet) {
				LOGGER.debug("Generating hash of file " + f);
				put(tempMap, FileUtils.generateHash(f), f);
				count++;
				setProgress((count * 49 / remaingDuplicateCount + 50));
				if (isCancelled()) {
					LOGGER.info("Aborting");
					throw new RuntimeException(new InterruptedException());
				}
			}
			removeTrash(tempMap);
			duplicateFiles.putAll(tempMap);
		}
	}

	private void countDuplicates() {
		for (Set<File> fileSet : duplicateFiles.values()) {
			duplicatesCount += fileSet.size();
		}
		LOGGER.debug(duplicatesCount + " duplicated files");
	}

	private void countSpaceWasted() {
		for (Set<File> fileSet : duplicateFiles.values()) {
			int i = 0;
			for (File f : fileSet) {
				if (++i == fileSet.size()) {
					break;
				}
				wastedSpace += f.length();
			}
		}
		LOGGER.debug("Wasted space: " + wastedSpace + " bytes");
	}

	private static <K, T> void put(Map<K, Set<T>> map, K key, T value) {
		Set<T> tempSet;
		if (map.containsKey(key)) {
			tempSet = map.get(key);
		} else {
			tempSet = new HashSet<T>();
			map.put(key, tempSet);
		}
		tempSet.add(value);
	}

	private static <K> void removeTrash(Map<K, Set<File>> map) {
		Iterator<Entry<K, Set<File>>> i = map.entrySet().iterator();
		while (i.hasNext()) {
			Entry<K, Set<File>> entry = i.next();
			if (entry.getValue().size() < 2) {
				i.remove();
			}
		}
	}

}
