# This file is part of Sheriff53.
#
# Sheriff53 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sheriff53 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sheriff53. If not, see <https://www.gnu.org/licenses/>.

import logging
from collections import Counter
from glob import glob
from os import rename
from os.path import basename, dirname, realpath

logging.basicConfig(
    format="%(asctime)s - %(message)s",
    datefmt="%Y-%b-%d %H:%M:%S",
    level=logging.INFO,
)
logger = logging.getLogger(__name__)


def remove_duplicates(input_file: str) -> None:
    """Remove duplicates from file."""
    fin = open(input_file)
    output_file = input_file.replace(".txt", "_clean.tmp")
    fout = open(output_file, "w")

    lines_seen = set()
    for line in fin:
        if line.startswith("#"):
            fout.write(line)
        elif line not in lines_seen:
            lines_seen.add(line)
            fout.write(line)

    fin.close()
    fout.close()

    rename(output_file, input_file)


def get_duplicates(input_file: str) -> None:
    """Detect duplicates entries and removes them to file _clean.tmp."""
    file_open = open(input_file)
    file_lines = file_open.readlines()

    without_comments = [x for x in file_lines if not (x.startswith("#") or x == "\n")]

    duplicates = [
        item for item, count in Counter(without_comments).items() if count > 1
    ]
    filename = basename(input_file)
    if duplicates:
        logger.info(f"Removing duplicates from {filename} ...")
        remove_duplicates(input_file)
    else:
        return None


if __name__ == "__main__":
    DIR = dirname(realpath("__file__"))
    list_of_files = glob(f"{DIR}/lists.d/*.txt")
    for _file in list_of_files:
        get_duplicates(_file)
