#!/usr/bin/env python3

import asyncio
import glob
import logging
import multiprocessing as mp
import os
import re
import stat
import sys

from argparse import ArgumentParser
from collections import OrderedDict
from graphlib import TopologicalSorter


logger = logging.getLogger(__name__)
MYDIR = os.path.dirname(os.path.abspath(__file__))
VOLATILE_DIRS = ('build', 'r00tfs', 'rootfs', 'xtools', 'stage')


def read_depends(recipe):
    """read dependencies from the given recipe"""
    # given the recipe
    #
    # RECIPE START foo
    # depends: bar
    # depends: baz
    #
    #  make 42
    #
    # RECIPE END foo
    #
    # produces the output 'foo': {'bar', 'baz'}

    start_marker = '# RECIPE START'
    end_marker = '# RECIPE END'
    ignore_marker = '# RECIPE SKIP'
    depends_marker = '# depends:'
    runtime_marker = '# runtime:'
    pkg = None
    deps = []
    runtime = []

    for line in recipe:
        stripped_line = line.strip()
        if stripped_line.startswith(start_marker):
            pkg = stripped_line.split(start_marker)[1].strip()
            logger.debug('read_depends: processing recipe "%s"', pkg)
        elif stripped_line.startswith(ignore_marker):
            assert pkg is not None
            logger.info('read_depends: skipping recipe "%s" as requested', pkg)
            return None, None, None
        elif stripped_line.startswith(end_marker):
            assert pkg is not None
            deps = tuple(sorted(set(deps)))
            runtime = tuple(sorted(set(runtime)))
            logger.debug('read_depends: "%s" depends on %s', pkg, str(deps))
            return (pkg, deps, runtime)
        elif stripped_line.startswith(depends_marker):
            assert pkg is not None
            req_pkg = stripped_line.split(depends_marker)[1].strip()
            logger.debug('read_depends: "%s" found dependency "%s"', pkg, str(deps))
            deps.append(req_pkg)
        elif stripped_line.startswith(runtime_marker):
            assert pkg is not None
            runtime_pkg = stripped_line.split(runtime_marker)[1].strip()
            runtime.append(runtime_pkg)
            logger.debug('read_depends: "%s" found runtime dependency: "%s"',
                         pkg, str(runtime))
        else:
            continue

    assert pkg is not None
    raise ValueError('Broken recipe, no RECIPE_END marker')


def find_recipes(recipe_dir, suffixes):
    files = []
    for suf in suffixes:
        logger.debug('find_recipes: looking for "*.%s" files in "%s"',
                     suf, recipe_dir)
        files.extend(glob.glob(recipe_dir + '/*.' + suf))
    return set(files)


def make_depgraph(filenames=None, recipe_dir=None, suffixes=None):
    if filenames is None or len(filenames) == 0:
        if recipe_dir is None or suffixes is None:
            raise ValueError('make_depgraph: either filenames or '\
                             'recipe_dir/suffixes must be specified')
        filenames = find_recipes(recipe_dir, suffixes)

    graph = {}
    while len(filenames) > 0:
        recipe_filename = filenames.pop()
        logger.debug('make_depgraph: processing "%s"', recipe_filename)
        with open(recipe_filename, 'rt') as recipe:
            pkg, deps, runtime_deps = read_depends(recipe)
        if pkg in graph:
            logger.debug('make_graph: package "%s" is already in graph', pkg)
        elif pkg is None:
            # skipped due to RECIPE SKIP directive
            continue
        else:
            graph[pkg] = deps
            all_deps = tuple(sorted(set(deps).union(set(runtime_deps))))
            for d in all_deps:
                if d not in graph:
                    fname = os.path.join(recipe_dir, d + '.sh')
                    logger.debug('make_graph: auto-adding "%s" as dependency of "%s"', fname, pkg)
                    filenames.update([fname])

    sortedGraph = OrderedDict()
    for k in sorted(graph.keys()):
        sortedGraph[k] = graph[k]

    return sortedGraph


class Bcontext:
    def __init__(self, srcdir):
        self._srcdir = srcdir
        self._builddir = os.path.abspath(os.path.join(srcdir, 'build'))
        self._recipedir = os.path.abspath(os.path.join(srcdir, 'recipes'))
        self._environ = {}

    def clean(self):
        for thedir in VOLATILE_DIRS:
            rm_rf(os.path.join(self._srcdir, thedir))

    def prep(self):
        for thedir in VOLATILE_DIRS:
            os.makedirs(os.path.join(self._srcdir, thedir), exist_ok=True)

    @property
    def builddir(self):
        return self._builddir

    @property
    def srcdir(self):
        return self._srcdir

    @property
    def recipedir(self):
        return self._recipedir

    def get_logfile(self, pkgname):
        return os.path.join(self._builddir, pkgname + '.log')


def rm_rf(thedir):
    if not os.path.isdir(thedir):
        return

    for root, dirs, files, rootfd in os.fwalk(thedir, topdown=False):
        logger.debug('removing files in directory %s', root)
        for name in files:
            os.unlink(name, dir_fd=rootfd)
        for name in dirs:
            # XXX: both directories and symlinks pointing to directories end up here
            st = os.lstat(name, dir_fd=rootfd)
            if stat.S_ISLNK(st.st_mode):
                logger.debug('removing symlink "%s"', name)
                os.unlink(name, dir_fd=rootfd)
            else:
                logger.debug('removing subdirectory "%s" in "%s"', name, root)
                os.rmdir(name, dir_fd=rootfd)


class Job:
    def __init__(self, pkgname):
        self._name = pkgname
        self._returncode = None
        self._logfile = None
        self._processed = False

    @property
    def returncode(self):
        return self._returncode

    @property
    def name(self):
        return self._name

    @property
    def processed(self):
        return self._processed

    @property
    def logfile(self):
        return self._logfile


    async def run(self, context):
        recipe = os.path.join(context.recipedir, self._name + '.sh')
        self._logpath = context.get_logfile(self._name)
        logger.debug('running recipe "%s"', recipe)
        with open(self._logpath, 'wb') as logfile:
            proc = await asyncio.create_subprocess_exec(recipe,
                    stdout=logfile,
                    stderr=asyncio.subprocess.STDOUT
            )
            self._returncode = await proc.wait()
            self._processed = True


async def worker(task_queue, finalized_task_queue, context):
    while True:
        name = await task_queue.get()
        job = Job(name)
        logger.info('starting job %s', job.name)
        await job.run(context)
        if job.returncode != 0:
            logger.error('job "%s" failed, code %d', job.name, job.returncode)
        else:
            logger.info('job "%s" completed OK', job.name)
        await finalized_task_queue.put((name, job))
        task_queue.task_done()


async def status_poller(finalized_task_queue, failed_tasks):
    while True:
        name, job = await finalized_task_queue.get()
        finalized_task_queue.task_done()
        if job.returncode != 0:
            failed_tasks.append(name)


async def run(graph, context):
    max_jobs = max(mp.cpu_count(), 1) * 2
    task_queue = asyncio.Queue()
    finalized_task_queue = asyncio.Queue()
    failed_jobs = []

    threads = []
    for _ in range(max_jobs):
        thread = asyncio.create_task(worker(task_queue, finalized_task_queue, context))
        threads.append(thread)

    ts = TopologicalSorter(graph)
    ts.prepare()
    
    while ts.is_active():
        for name in ts.get_ready():
            await task_queue.put(name)
            logger.debug('Enqueued task "%s"', name)
        name, completed_job = await finalized_task_queue.get()
        finalized_task_queue.task_done()
        if completed_job.returncode != 0:
            failed_jobs.append(name)
            logger.error('job "%s" failed, bailing out', name)
            break
        else:
            ts.done(name)
            logger.debug('Finished task "%s"', completed_job.name)

    if len(failed_jobs) > 0:
        logger.info('Waiting for pending tasks')

    # all jobs have been submitted, however some might be in process
    waiter_thread = asyncio.create_task(status_poller(finalized_task_queue, failed_jobs))
    threads.append(waiter_thread)

    # wait for all enqueued jobs to be picked
    logger.debug('All jobs have been submitted')
    await task_queue.join()

    # wait for jobs to complete
    await finalized_task_queue.join()
    if len(failed_jobs) == 0:
        logger.info('All jobs have completed successfully')
    else:
        logger.error('%d jobs have failed: %s',
                     len(failed_jobs), ', '.join(failed_jobs))
    for t in threads:
        t.cancel()
    await asyncio.gather(*threads, return_exceptions=True)
    return len(failed_jobs)


class MyTopologicalSorter(TopologicalSorter):
    def __init__(self, graph=None):
        super().__init__(graph=graph)

    def get_ready(self):
        return tuple(sorted(super().get_ready()))


def graph(packages, skip_cross=False):
    def is_cross(pkg):
        return pkg.startswith('cross-') or pkg.endswith('-native')

    ts = TopologicalSorter(packages)
    for pkg in ts.static_order():
        if skip_cross and is_cross(pkg):
            pass
        else:
            print(pkg)


def main():
    logging.basicConfig(level='DEBUG', format='%(asctime)-15s %(message)s')
    parser = ArgumentParser('Build BoringLinux')
    parser.add_argument('-g', '--graph', action='store_true',
        help='only print dependency graph, not build')
    parser.add_argument('packages', nargs='*',
        help='build only specified packages and their dependencies')
    parser.add_argument('-n', '--native', action='store_true',
        default=False, help="don't list cross-tools")

    logger.debug('starting up')
    topdir = MYDIR
    context = Bcontext(topdir)
    args = parser.parse_args()
    packages = make_depgraph(filenames=set(args.packages),
                             recipe_dir=os.path.join(topdir, 'recipes'),
                             suffixes=('sh',))
    if args.graph:
        graph(packages, args.native)
    else:
        context.clean()
        context.prep()
        return asyncio.run(run(packages, context))


if __name__ == '__main__':
    sys.exit(main())
