package organizer.hb;

import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger;
import organizer.Duplicate;
import organizer.FileMetadata;
import organizer.IFileMetadata;
import organizer.IFileSource;
import organizer.IProgressMonitor;

/**
 * Looks for files to place into the metadata list, and then searches the list
 * for duplicates.
 * @author dawong
 */
public class Scanner {
    
    private static final Logger log = Logger.getLogger(Scanner.class);
    
    private MetadataList metaList;
    private FileRepository repository;

    public Scanner() {
        this(null, null);
    }

    public Scanner(MetadataList metaList, FileRepository repository) {
        this.metaList = metaList;
        this.repository = repository;
    }
    
    public void setMetadataList(MetadataList metaList) {
        this.metaList = metaList;
    }
    
    public void setRepository(FileRepository repository) {
        this.repository = repository;
    }
    
    public void loadFiles(IFileSource fileSource) {
        File file = fileSource.nextFile();
        while (file != null) {
            IFileMetadata meta = repository.getMetadata(file);
            metaList.add(meta);
            file = fileSource.nextFile();
        }
    }
    
    public List<Duplicate> getDuplicates(IProgressMonitor progress, IScannerListener listener) {
        Map<String, Duplicate> dupMap = new HashMap<String, Duplicate>();
        log.debug("searching for dups");
        if (progress != null) {
            progress.start(metaList.getNumberOfSameLengthFiles());
            progress.showMessage("Calculating differences");
        }
        Set<String> dupHashes = new HashSet<String>();
        // Loop through all the files of the same length
        List<Long> lengthList = metaList.getLengthGroups();
        for (Long length : lengthList) {
            if (listener != null)
                listener.dupGroupStarted();
            // Go through all of the files in this length group.
            Set<String> groupHashes = new HashSet<String>();
            for (Iterator<IFileMetadata> imeta = metaList.getFilesOfLength(length); imeta.hasNext(); ) {
                IFileMetadata meta = imeta.next();
                if (progress != null && progress.isCanceled())
                    break;
                String metaHash = meta.getContentHash();
                if (metaHash == null) {
                    // Update with hash
                    metaHash = repository.getHash(new File(meta.getPath()));
                    FileMetadata newMeta = new FileMetadata(meta);
                    newMeta.setContentHash(metaHash);
                    repository.updateMetadata(newMeta);
                    meta = newMeta;
                }
                if (groupHashes.contains(metaHash)) {
                    // Found a dup
                    dupHashes.add(meta.getContentHash());
                }
                groupHashes.add(metaHash);
                if (progress != null)
                    progress.increment(1);
            }
            if (listener != null)
                listener.dupGroupFinished();
        }
        if (progress != null) {
            progress.stop();
            progress.start(dupHashes.size());
            progress.showMessage("Retrieving duplicate file info");
        }
        // Collect all the dup files 
        for (String hash : dupHashes) {
            Duplicate dup = new Duplicate(hash);
            dupMap.put(hash, dup);
            List<IFileMetadata> hashMetaList = metaList.getFilesOfHash(hash);
            for (IFileMetadata hashMeta : hashMetaList) {
                dup.add(hashMeta.getPath());
            }
            if (progress != null)
                progress.increment(1);
        }
        if (progress != null) {
            progress.stop();
        }
        return new ArrayList<Duplicate>(dupMap.values());
    }

}
