#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 
# md5fdupl.py
#
# Copyright (c) 2008, Andrey Usov <uandrey@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#     * Redistributions of source code must retain the above copyright
#       notice, this list of conditions and the following disclaimer.
#     * Redistributions in binary form must reproduce the above copyright
#       notice, this list of conditions and the following disclaimer in the
#       documentation and/or other materials provided with the distribution.
#     * Neither the name of the <organization> nor the
#       names of its contributors may be used to endorse or promote products
#       derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY <copyright holder> ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <copyright holder> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.


__version = "0.1"

import os, sys, md5flib

def md5data_file_search_duplicates(md5data):
    """ 
    search duplicates in md5data file
    """
    files = md5flib.load_md5data_detail(md5data)
        
    # search duplicates
    duplicates = {}
    for md5 in files.keys():
        if len(files[md5]) > 1: duplicates[md5] = files[md5] 
    return duplicates

def md5data_directory_search_duplicates(directory):
    """
    search duplicates in md5data files directory
    """
    
    files = md5flib.scan_directory(directory)
    file_combs = []
    for file_a in files:
        for file_b in files:
            if file_a == file_b: continue
            if set((file_a,file_b)) in file_combs: continue
            file_combs.append(set((file_a,file_b)))
            file_a_data = set(md5flib.load_md5data(directory + "/" + file_a))
            file_b_data = set(md5flib.load_md5data(directory + "/" + file_b))
            common_files = file_a_data & file_b_data
            if len(common_files) > 0:
                print "%s/%s:%d" % (file_a, file_b, len(common_files))

def md5data_two_files_search_duplicates(file_a, file_b):
    """
    search duplicates in two md5data files
    """
    files_a_data = md5flib.load_md5data_detail(file_a)
    files_b_data = md5flib.load_md5data_detail(file_b)
    common_files = set(files_a_data.keys()) & set(files_b_data.keys())

    for md5key in common_files:
        for file in files_a_data[md5key]:
            print "%s: %s;%s;%s" % (file_a, file[0], file[1], file[2])
        for file in files_b_data[md5key]:
            print "%s: %s;%s;%s" % (file_b, file[0], file[1], file[2])
        print

def remove_duplicates(filename):
    """
    remove duplicate files in source file (filename) marked by '#'
    """
    for line in file(filename, 'r').read().split("\n"):
        if not line: continue
        if line[0] == "#":
            filename = line[1:].split(";")[0]
            try:
                os.remove(filename)
                print filename
            except OSError:
                print "* %s - not found" % filename
            
            
if __name__ == "__main__":
    from optparse import OptionParser
    
    parser = OptionParser(version="%prog, v." + __version)
    parser.add_option("-f", "--file", dest="md5data_file", help="md5data file for searching of duplicates")
    parser.add_option("-d", "--directory", dest="md5data_directory", help="md5data files directory for searching of duplicates")
    parser.add_option("-t", "--two_files", dest="md5data_two_files", help="two files for searching of duplicates, separeted by ':'.")
    parser.add_option("-r", "--remove", dest="remove_duplicates", help="remove duplicate files marked by '#' in source file")
    (options, args) = parser.parse_args()
    
    if options.md5data_file != None:
        # search duplicates in md5data file
        duplicates = md5data_file_search_duplicates(options.md5data_file)
        for md5 in duplicates.keys():
            for file in duplicates[md5]:
                print "%s;%s;%s" % (file[0], file[1], file[2])
            print
        sys.exit(1)

    if options.md5data_directory != None:
        # search duplicates in md5data files directory
        md5data_directory_search_duplicates(options.md5data_directory)
        sys.exit(1)

    if options.md5data_two_files != None:
        # search duplicates in two md5data files
        try:
            (file_a, file_b) = options.md5data_two_files.split(":")
            md5data_two_files_search_duplicates(file_a, file_b)
        except ValueError:
            print "Error! Please sepate two files by ':'. Use -h or --help for detail."
        sys.exit(1)

    if options.remove_duplicates != None:
        # remove duplicate files in source file marked by '#'
        remove_duplicates(options.remove_duplicates)
        sys.exit(1)

    parser.error("Incorrect number of arguments, \n\nuse -h or --help for detail")
