import argparse

import ujson

from .core import Checker
from .utils import ids_count


def main():
    parser = argparse.ArgumentParser(description='Ethereum network deviant nodes finder')
    parser.add_argument("infile", help="input json file path", type=argparse.FileType('r', encoding='UTF-8'))
    parser.add_argument("-a", "--aliases", help="checks for nodes with too much different addresses",
                        action="store_true")
    parser.add_argument("-A", "--aliases-threshold", help="maximum number of addresses allowed for a single node",
                        type=int, default=1, metavar="COUNT")
    parser.add_argument("-x", "--ip-aliases", help="xxx",
                        action="store_true")
    parser.add_argument("-X", "--ip-aliases-threshold", help="xxx",
                        type=int, default=1, metavar="COUNT")
    parser.add_argument("-r", "--roommates", help="checks for IP addresses with too much different nodes",
                        action="store_true")
    parser.add_argument("-R", "--roommates-threshold", help="maximum number of nodes allowed for a single IP address",
                        type=int, default=1, metavar="COUNT")
    parser.add_argument("-s", "--subnets", help="checks for IP addresses on the same /24 subnet", action="store_true")
    parser.add_argument("-S", "--subnets-threshold",
                        help="maximum number of IP addresses allowed for a single /24 subnet",
                        type=int, default=1, metavar="COUNT")
    parser.add_argument("-c", "--concentration", help="checks for grouped node addresses", action="store_true")
    parser.add_argument("-C", "--concentration-threshold",
                        help="maximum length (in bits) of the shared prefix between 2 successive addresses",
                        type=int, metavar="SIZE")
    parser.add_argument("-i", "--identical", help="checks for nodes sharing the same id", action="store_true")
    parser.add_argument("-I", "--identical-threshold",
                        help="maximum number of nodes sharing the same id allowed",
                        type=int, default=1, metavar="COUNT")
    parser.add_argument("-f", "--full", help="checks for all the above cases", action="store_true")
    parser.add_argument("-q", "--quiet", help="disable stdout messages", action="store_true")
    parser.add_argument("-v", "--verbose", help="displays some additional stats", action="store_true")
    parser.add_argument("-m", "--minified", help="outputs a more concise output suited for an API", action="store_true")
    parser.add_argument("-o", "--output", help="sets the export file path")
    args = parser.parse_args()

    if args.infile is not None:
        data = ujson.load(args.infile)
        args.infile.close()
        if args.verbose and not args.quiet:
            print(ids_count(data))
        out = {}
        c = Checker(is_quiet=args.quiet, is_minified=args.minified)
        if args.aliases or args.full:
            out["aliases"] = c.aliases_check(data, max(args.aliases_threshold, 2))
        if args.ip_aliases or args.full:
            out["ip_aliases"] = c.ip_aliases_check(data, max(args.ip_aliases_threshold, 2))
        if args.roommates or args.full:
            out["roommates"] = c.roommates_check(data, max(args.roommates_threshold, 2))
        if args.subnets or args.full:
            out["subnets"] = c.subnet_check(data, max(args.subnets_threshold, 2))
        if args.concentration or args.full:
            out["concentration"] = c.concentration_check(data,
                                                         args.concentration_threshold
                                                         if args.concentration_threshold is not None
                                                         else 18)
        if args.identical or args.full:
            out["identical"] = c.identical_check(data, max(args.identical_threshold, 1))

        for filename in ["result.json"] if args.output is None else (
                args.output if args.output.lower().endswith(".json") else args.output + ".json", "results.json"):
            try:
                with open(filename, 'w') as fp:
                    ujson.dump(out, fp)
                if not args.quiet:
                    print("results exported as", filename)
                break
            except IOError:
                if not args.quiet:
                    print("failed to export results as", filename)
        else:
            if not args.quiet:
                print("results couldn't be exported")
