import argparse
import logging
import os

from .crawl.churn import churn
from .crawl.crawl import run
from .crawl.feed_api import feed_api
from .config import LOG_LEVEL, EXPORT_DIRECTORY, TMP_EXPORT_DIRECTORY
from .crawl.utils import do_export
from .data_management.export import default_export, churn_export, tmp_export
from .parse_data import generate_graphs, plot_churn, plot_by_time_of_day

timers = []
stopped = False


def main():
    # Initialize logger
    TRACE = 9

    # Initialize arg parser
    parser = argparse.ArgumentParser(description='Ethereum network nodes crawler')
    parser.add_argument("-r", "--run", help="perform a crawl", action="store_true")
    parser.add_argument("-f", "--flush", help="reset the log file", action="store_true")
    parser.add_argument("-q", "--quiet", help="don't output anything to stdout", action="store_true")
    parser.add_argument("-e", "--export", help="export the data as json", nargs='?', choices=['default', 'date', 'tmp'],
                        const='default')
    parser.add_argument("-g", "--graphs", help="generate graphs", nargs='?', choices=['last', 'multiple', 'churn'],
                        const='last')
    parser.add_argument("-c", "--churn", help="run multiple crawls for at least the specifies number of hours",
                        type=float)
    parser.add_argument("-a", "--aggregate-churn", help="aggregates the data for '-g churn'", type=int)
    parser.add_argument("-A", "--api", help="xxx", action="store_true")
    args = parser.parse_args()

    args.flush = args.flush or args.run

    if args.run or args.flush or args.export is not None or args.churn is not None:
        logging.addLevelName(TRACE, "TRACE")

        def trace(self, message, *args, **kws):
            if self.isEnabledFor(TRACE):
                self._log(TRACE, message, args, **kws)

        logging.Logger.trace = trace
        logformat = ("[%(process)d] %(asctime)s,%(msecs)05.1f %(levelname)s "
                     "(%(funcName)s) %(message)s")
        logging.basicConfig(level=logging.getLogger().setLevel(LOG_LEVEL),
                            format=logformat,
                            filename="last.log",
                            filemode='w' if args.flush else 'a')
        if not args.quiet:
            print(f"Log: {'last.log'}")

    if args.run:
        if args.export is None:
            args.export = 'default'
        if not args.quiet:
            print("Crawl started, press CTRL+C to abort...")
        run(args.quiet)

    path = EXPORT_DIRECTORY
    if args.export is not None:
        if args.export == 'default':
            do_export(default_export.export, args.quiet)
        elif args.export == 'date':
            do_export(churn_export.export, args.quiet)
        elif args.export == 'tmp':
            path = TMP_EXPORT_DIRECTORY
            do_export(tmp_export.export, args.quiet)

    if args.graphs is not None:
        if not args.quiet:
            print("Generating graphs...")
        if args.graphs == 'last':
            generate_graphs(path)
        elif args.graphs == 'churn':
            plot_churn(args.aggregate_churn if args.aggregate_churn is not None and args.aggregate_churn > 0 else 1)
        else:
            if any(fname.endswith('.json') for fname in os.listdir(EXPORT_DIRECTORY)):
                plot_by_time_of_day()
            else:
                print("No .json file detected in " + EXPORT_DIRECTORY)

    if args.churn is not None:
        if not args.quiet:
            print("Measuring churn...")
        churn(args.quiet, args.churn)
        exit(0)

    if args.api:
        if not args.quiet:
            print("Feeding api...")
        feed_api(args.quiet)
