package com.nilcaream.dirstore.cli.command;

import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.nilcaream.dirstore.cli.Configuration;
import com.nilcaream.dirstore.core.common.ProgressMeter;
import com.nilcaream.dirstore.core.common.ZipTool;
import com.nilcaream.dirstore.core.model.ContentId;
import com.nilcaream.dirstore.core.model.ZipDir;
import com.nilcaream.dirstore.core.processor.DuplicateDirsFinder;
import org.slf4j.Logger;

import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;

import static com.google.common.base.Strings.isNullOrEmpty;
import static com.nilcaream.dirstore.core.common.Utils.toReadableFileSize;
import static java.lang.String.format;

/**
 * TODO
 * <p/>
 * Krzysztof Smigielski 7/12/13 8:54 PM
 */
public class FindDuplicates implements Command {

    @Inject
    protected Logger logger;

    @Inject
    protected Configuration configuration;

    @Inject
    private ProgressMeter progressMeter;

    @Inject
    private DuplicateDirsFinder finder;

    @Inject
    private ZipTool zipTool;

    @Override
    public void execute() {
        try {
            final Multimap<ContentId, ZipDir> map = filterByParent(filterBySize(readDuplicatesFromFiles()));

            long duplicatesSize = finder.calculateTotalSizeOfDuplicates(map);
            logger.info(format("removing all duplicated directories will free %s of space", toReadableFileSize(duplicatesSize)));

            for (ContentId contentId : finder.orderBySizeDesc(map.keySet())) {
                long size = contentId.getSize();
                String id = contentId.getId();
                logger.info(format("# %s (%s)", id, toReadableFileSize(size)));
                for (ZipDir zipDir : map.get(contentId)) {
                    logger.info(format("> %s - /%s", zipDir.getZipFileName(), zipDir.getDirectoryName()));
                }
            }
        } catch (IOException e) {
            logger.error(e.getMessage(), e);
        }
    }

    private Multimap<ContentId, ZipDir> filterByParent(Multimap<ContentId, ZipDir> map) {
        Multimap<ContentId, ZipDir> result = finder.filterDuplicatedRoots(map);
        logger.info(format("found %d parent-aware duplicated directories", result.keySet().size()));
        return result;
    }

    private Multimap<ContentId, ZipDir> filterBySize(Multimap<ContentId, ZipDir> map) {
        Multimap<ContentId, ZipDir> result = finder.filterBySize(map, configuration.getDuplicateThreshold());
        logger.info(format("found %d duplicated directories with size above %s threshold", result.keySet().size(), toReadableFileSize(configuration.getDuplicateThreshold())));
        return result;
    }

    private Multimap<ContentId, ZipDir> readDuplicatesFromFiles() throws IOException {
        Map<ZipDir, ContentId> zipDirToContentId = finder.createZipDirToContentIdMap(prepareFiles());
        Multimap<ContentId, ZipDir> contentIdToZipDirs = finder.invert(zipDirToContentId);
        Multimap<ContentId, ZipDir> duplicates = finder.filterNonDuplicates(contentIdToZipDirs);
        logger.info(format("found %d duplicated directories in total", duplicates.keySet().size()));
        return duplicates;
    }

    private List<File> prepareFiles() {
        List<File> files = Lists.newArrayList();
        String[] fileNames = configuration.getOperands().split(" ");
        for (String fileName : fileNames) {
            files.add(new File(fileName));
        }
        logger.info(format("analyzing %d files", files.size()));
        logger.debug(files.toString());
        return files;
    }

    @Override
    public boolean isValid() {
        boolean isValid = false;
        if (configuration.isFindDuplicates()) {
            if (isNullOrEmpty(configuration.getOperands())) {
                logger.warn("no input files were provided.");
            } else {
                isValid = true;
            }
        }
        return isValid;
    }
}
