#!/usr/bin/env python3
# Copyright (c) 2008-2014 LG Electronics, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import argparse
import errno
import logging
import os
import subprocess
import sys
import re
from time import gmtime, strftime
import shutil
import glob

__version__ = "5.1.0"

logger = logging.getLogger(__name__)

submodules = {}
CLEAN = False
TRACE = False
REMOTE = "origin"
SSTATE_MIRRORS = ''
WEBOSLAYERS = []
LAYERSPRIORITY = {}
SUBMISSIONS = {}
LOCATIONS = {}
URLS = {}
PRIORITYORDER = []
COLLECTION_NAME = {}
COLLECTION_PATH = {}
SUMMARYINFO = {}
BRANCHINFONEW = {}
BRANCHINFOCURRENT = {}
COMMITIDSNEW = {}
COMMITIDSCURRENT = {}
TAGSINFONEW = {}
REPOPATCHDIR = {}
DISTRO = None
SUPPORTED_MACHINES = []

def echo_check_call(todo, verbosity=False):
    if verbosity or TRACE:
        cmd = 'set -x; ' + todo
    else:
        cmd = todo

    logger.debug(cmd)

    return str(subprocess.check_output(cmd, shell=True), encoding='utf-8', errors='strict')

def enable_trace():
    global TRACE
    TRACE = True

def enable_clean():
    logger.warn('Running in clean non-interactive mode, all possible local changes and untracked files will be removed')
    global CLEAN
    CLEAN = True

def set_log_level(level):
    logger = logging.getLogger(__name__)
    logger.setLevel(logging.DEBUG)
    f = logging.Formatter('%(asctime)s %(levelname)s %(name)s %(message)s', datefmt='%Y-%m-%dT%H:%M:%S')

    s = logging.StreamHandler()
    s.setLevel(level)

    s.setFormatter(f)
    logging.getLogger('').addHandler(s)

# Essentially, mcf parses options, creates mcf.status, and runs mcf.status.

def process_file(f, replacements):
    (ifile, ofile) = f
    with open(ifile, 'r') as f:
        status = f.read()

    for i, j in replacements:
        status = status.replace(i, j)

    odir = os.path.dirname(ofile)
    if odir and not os.path.isdir(odir):
        os.mkdir(odir)
    with open(ofile, 'w') as f:
        f.write(status)

def getopts():
    mcfcommand_option = '--command'
    mcfcommand_dest = 'mcfcommand'
    # be careful when changing this, jenkins-job.sh is doing
    # grep "mcfcommand_choices = \['configure', 'update', "
    # to detect if it needs to explicitly run --command update after default action
    mcfcommand_choices = ['configure', 'update', 'update+configure']
    mcfcommand_default = 'update+configure'

    # Just parse the --command argument here, so that we can select a parser
    mcfcommand_parser = argparse.ArgumentParser(add_help=False)
    mcfcommand_parser.add_argument(mcfcommand_option, dest=mcfcommand_dest, choices=mcfcommand_choices, default=mcfcommand_default)
    mcfcommand_parser_result = mcfcommand_parser.parse_known_args()
    mcfcommand = mcfcommand_parser_result[0].mcfcommand

    # Put --command back in (as the first option) so that the main parser sees everything
    arglist = [mcfcommand_option, mcfcommand ] + mcfcommand_parser_result[1]

    parser = argparse.ArgumentParser()

    general = parser.add_argument_group('General Options')

    verbosity = general.add_mutually_exclusive_group()

    verbosity.add_argument('-s', '--silent',  action='count', help='work silently, repeat the option twice to hide also the warnings, tree times to hide the errors as well')
    verbosity.add_argument('-v', '--verbose', action='count', help='work verbosely, repeat the option twice for more debug output')

    general.add_argument('-c', '--clean', dest='clean', action='store_true', default=False, help='clean checkout - WARN: removes all local changes')
    general.add_argument('-V', '--version', action='version', version='%(prog)s {0}'.format(__version__), help='print version and exit')

    general.add_argument(mcfcommand_option, dest=mcfcommand_dest, choices=mcfcommand_choices, default=mcfcommand_default,
                              help='command to mcf; if update is given, none of the remaining options nor MACHINE can be specified (default: %(default)s)')

    if mcfcommand in ('configure','update+configure'):
        variations = parser.add_argument_group('Build Instructions')

        variations.add_argument('-p', '--enable-parallel-make', dest='parallel_make', type=int, default=0,
                                help='maximum number of parallel tasks each submake of bitbake should spawn (default: 0 = 2x the number of processor cores)')

        variations.add_argument('-b', '--enable-bb-number-threads', dest='bb_number_threads', type=int, default=0,
                                help='maximum number of bitbake tasks to spawn (default: 0 = 2x the number of processor cores))')

        icecc = parser.add_argument_group('ICECC Configuration')

        icecc.add_argument('--enable-icecc', dest='enable_icecc', action='store_true', default=True,
                           help='enable build to use ICECC (default: True)')

        icecc.add_argument('--disable-icecc', dest='disable_icecc', action='store_true', default=False,
                           help='disable build from using ICECC (default: False = use ICECC)')

        icecc.add_argument('--enable-icecc-parallel-make', dest='icecc_parallel_make', type=int, default=0,
                           help='Number of parallel threads for ICECC build (default: 0 = 4x the number of processor cores))')

        icecc_advanced = parser.add_argument_group('ICECC Advanced Configuration')

        icecc_advanced.add_argument('--enable-icecc-user-package-blacklist', dest='icecc_user_package_blacklist', action='append',
                           help='Space separated list of components/recipes to be excluded from using ICECC  (default: None)')

        icecc_advanced.add_argument('--enable-icecc-user-class-blacklist', dest='icecc_user_class_blacklist', action='append',
                           help='Space separated list of components/recipes class to be excluded from using ICECC  (default: None)')

        icecc_advanced.add_argument('--enable-icecc-user-package-whitelist', dest='icecc_user_package_whitelist', action='append',
                           help='Space separated list of components/recipes to be forced to use ICECC  (default: None)')

        icecc_advanced.add_argument('--enable-icecc-location', dest='icecc_location', default='',
                           help='location of ICECC tool  (default: None)')

        icecc_advanced.add_argument('--enable-icecc-env-exec', dest='icecc_env_exec', default='',
                           help='location of ICECC environment script  (default: None)')


        partitions = parser.add_argument_group('Source Identification')

        mirrors = parser.add_argument_group('Networking and Mirrors')

        network = mirrors.add_mutually_exclusive_group()

        network.add_argument('--disable-network', dest='network', action='store_false', default=True,
                             help='disable fetching through the network (default: False)')

        network.add_argument('--enable-network', dest='network', action='store_true', default=True,
                             help='enable fetching through the network (default: True)')

        mirrors.add_argument('--sstatemirror', dest='sstatemirror', action='append',
                             help='set sstatemirror to specified URL, repeat this option if you want multiple sstate mirrors (default: None)')

        premirrorurl = mirrors.add_mutually_exclusive_group()
        default_premirror = 'http://downloads.yoctoproject.org/mirror/sources'
        premirrorurl.add_argument('--enable-default-premirror', dest='premirror', action='store_const', const=default_premirror, default="",
                                  help='enable default premirror URL (default: False)')
        premirrorurl.add_argument('--premirror', '--enable-premirror', dest='premirror', default='',
                                  help='set premirror to specified URL (default: None)')

        premirroronly = mirrors.add_mutually_exclusive_group()
        premirroronly.add_argument('--disable-fetch-premirror-only', dest='fetchpremirroronly', action='store_false', default=False,
                                   help='disable fetching through the network (default: False)')

        premirroronly.add_argument('--enable-fetch-premirror-only', dest='fetchpremirroronly', action='store_true', default=False,
                                   help='enable fetching through the network (default: True)')

        tarballs = mirrors.add_mutually_exclusive_group()
        tarballs.add_argument('--disable-generate-mirror-tarballs', dest='generatemirrortarballs', action='store_false', default=False,
                              help='disable tarball generation of fetched components (default: True)')

        tarballs.add_argument('--enable-generate-mirror-tarballs', dest='generatemirrortarballs', action='store_true', default=False,
                              help='generate tarballs suitable for mirroring (default: False)')

        buildhistory = parser.add_argument_group('Buildhistory')

        buildhistory1 = buildhistory.add_mutually_exclusive_group()

        buildhistory1.add_argument('--disable-buildhistory', dest='buildhistory', action='store_false', default=True,
                                  help='disable buildhistory functionality (default: False)')

        buildhistory1.add_argument('--enable-buildhistory', dest='buildhistory', action='store_true', default=True,
                                  help='enable buildhistory functionality (default: True)')

        buildhistory.add_argument('--enable-buildhistoryauthor', dest='buildhistoryauthor', default='', help='specify name and email used in buildhistory git commits (default: none, will use author from git global config)')

        parser.add_argument('MACHINE', nargs='+')

    options = parser.parse_args(arglist)
    if mcfcommand in ('configure','update+configure') and options.sstatemirror:
        process_sstatemirror_option(options)
    return options

def process_sstatemirror_option(options):
    """
       Sets global variable SSTATE_MIRRORS based on list of mirrors in options.sstatemirror

       /PATH suffix is automatically added when generating SSTATE_MIRRORS value
       verify that user didn't already include it and show error if he did
    """
    global SSTATE_MIRRORS
    SSTATE_MIRRORS = "SSTATE_MIRRORS ?= \" \\\n"
    for m in options.sstatemirror:
        if m.endswith("/PATH"):
            logger.error("sstatemirror entry '%s', already ends with '/PATH', remove that" % m)
            sys.exit(1)
        if m.endswith("/"):
            logger.error("sstatemirror entry '%s', ends with '/', remove that" % m)
            sys.exit(1)
        SSTATE_MIRRORS += "file://.* %s/PATH \\n \\\n" % m
    SSTATE_MIRRORS += "\"\n"

def _icecc_installed():
    try:
        # Note that if package is not installed following call will throw an exception
        iceinstallstatus,iceversion = subprocess.check_output("dpkg-query -W icecc" ,
                                                           shell=True,
                                                           universal_newlines=True).split()
        # We are expecting icecc for the name
        if 'icecc' == iceinstallstatus:
             if '1.0.1-1' == iceversion:
                 return True
             else:
                 logger.warn("WARNING: Wrong icecc package version {} is installed, disabling build from using ICECC.\n".format(iceversion) + \
                             "Please check 'How To Install ICECC on Your Workstation (Client)'\n" + \
                             "http://wiki.lgsvl.com/pages/viewpage.action?pageId=96175316")
                 return False
        else:
             logger.warn('WARNING: ICECC package installation check failed, disabling build from using ICECC.')
             return False

    except:
        logger.warn('WARNING: ICECC package installation check failed, disabling build from using ICECC.')
        return False

def location_to_dirname(location):
    str1 = location.split('/')
    return os.path.splitext(str1[len(str1)-1])[0]

def read_weboslayers(path):
    sys.path.insert(0,path)
    if not os.path.isfile(os.path.join(path,'weboslayers.py')):
        raise Exception("Error:" 'Configuration file %s does not exist!' % os.path.join(path,'weboslayers.py'))

    from weboslayers import webos_layers

    for p in webos_layers:
        WEBOSLAYERS.append(p[0])
        PRIORITYORDER.append(p[1])
        LAYERSPRIORITY[p[0]] = p[1]
        URLS[p[0]] = p[2]
        SUBMISSIONS[p[0]] = p[3]
        parsesubmissions(p[0])
        LOCATIONS[p[0]] = p[4]
        if not URLS[p[0]] and not LOCATIONS[p[0]]:
            raise Exception("Error:" 'Layer %s does not have either URL or alternative working-dir defined in weboslayers.py)')
        if not LOCATIONS[p[0]]:
            LOCATIONS[p[0]] = location_to_dirname(URLS[p[0]])

    PRIORITYORDER.sort()
    PRIORITYORDER.reverse()

    from weboslayers import Distribution
    global DISTRO
    DISTRO = Distribution

    from weboslayers import Machines
    global SUPPORTED_MACHINES
    SUPPORTED_MACHINES = Machines

def parsesubmissions(layer):
    BRANCH = ''
    COMMIT = ''
    TAG = ''
    for vgit in SUBMISSIONS[layer].split(','):
        if not vgit:
            continue
        str1, str2 = vgit.split('=')
        if str1.lower() == 'commit':
            if not COMMIT:
                COMMIT = str2
        elif str1.lower() == 'branch':
            BRANCH = str2
        elif str1.lower() == 'tag':
            if not TAG:
                TAG = str2

    if not BRANCH:
        BRANCH = 'master'

    BRANCHINFONEW[layer] = BRANCH
    COMMITIDSNEW[layer] = COMMIT
    TAGSINFONEW[layer] = TAG

def downloadrepo(layer):
    cmd = 'git clone %s %s' % (URLS[layer], LOCATIONS[layer])
    echo_check_call(cmd)

    olddir = os.getcwd()
    os.chdir(LOCATIONS[layer])
    newbranch = BRANCHINFONEW[layer]

    if newbranch:
        refbranchlist = echo_check_call("git branch")
        refbranch = refbranchlist.splitlines()
        foundbranch = False
        for ibranch in refbranch:
            if newbranch in ibranch:
                foundbranch = True
        if not foundbranch:
            refbranchlist = echo_check_call("git branch -r")
            refbranch = refbranchlist.splitlines()
            for ibranch in refbranch:
                if ibranch == "  %s/%s" % (REMOTE, newbranch):
                    foundbranch = True
                    logger.info( " found %s " % ibranch )
                    cmd ='git checkout -B %s %s' % (newbranch,ibranch)
                    echo_check_call(cmd)
                    break

    currentbranch = echo_check_call("git rev-parse --abbrev-ref HEAD").rstrip()
    newcommitid = COMMITIDSNEW[layer]
    if newcommitid:
        if newcommitid.startswith('refs/changes/'):
            if newbranch and newbranch != currentbranch:
                # older git doesn't allow to update reference on currently checked out branch
                cmd ='git fetch %s %s && git checkout -B %s FETCH_HEAD' % (REMOTE, newcommitid, newbranch)
            elif newbranch:
                # we're already on requested branch
                cmd ='git fetch %s %s && git reset --hard FETCH_HEAD' % (REMOTE, newcommitid)
            else:
                # we don't have any branch preference use detached
                cmd ='git fetch %s %s && git checkout FETCH_HEAD' % (REMOTE, newcommitid)
            echo_check_call(cmd)
        else:
            if newbranch and newbranch != currentbranch:
                # older git doesn't allow to update reference on currently checked out branch
                cmd ='git checkout -B %s %s' % (newbranch,newcommitid)
            elif newbranch:
                # we're already on requested branch
                cmd ='git reset --hard %s' % newcommitid
            else:
                # we don't have any branch preference use detached
                cmd ='git checkout %s' % newcommitid
            echo_check_call(cmd)

    newtag = TAGSINFONEW[layer]
    if newtag:
        if newbranch and newbranch != currentbranch:
            # older git doesn't allow to update reference on currently checked out branch
            cmd ='git checkout -B %s %s' % (newbranch,newtag)
        elif newbranch:
            # we're already on requested branch
            cmd ='git reset --hard %s' % newtag
        else:
            cmd ='git checkout %s' % newtag
        echo_check_call(cmd)

    os.chdir(olddir)

def parselayerconffile(layer, layerconffile):
    with open(layerconffile, 'r') as f:
        lines = f.readlines()
    for line in lines:
        if re.search( 'BBFILE_COLLECTIONS.*=' , line):
            (dummy, collectionname) = line.rsplit('=')
            collectionname = collectionname.strip()
            collectionname = collectionname.strip("\"")
            COLLECTION_NAME[layer] = collectionname
            logger.debug("parselayerconffile(%s,%s) -> %s" % (layer, layerconffile, COLLECTION_NAME[layer]))

def traversedir(layer, root):
    for path, dirs, files in os.walk(root):
        if os.path.basename(os.path.dirname(path)) == layer:
            for filename in files:
                if filename == 'layer.conf':
                    COLLECTION_PATH[layer] = os.path.relpath(os.path.dirname(path), os.path.dirname(root))
                    logger.debug("traversedir(%s,%s) -> %s" % (layer, root, COLLECTION_PATH[layer]))

                    layerconffile = os.path.join(path, filename)
                    parselayerconffile(layer, layerconffile)
                    break

def parse_collections(srcdir):
    for layer in WEBOSLAYERS:
        if os.path.exists(LOCATIONS[layer]):
            traversedir(layer, LOCATIONS[layer])
        else:
            raise Exception("Error:", "directory '%s' does not exist, you probably need to call update" % LOCATIONS[layer])

def write_bblayers_conf(sourcedir):
    f = open(os.path.join(sourcedir, "conf", "bblayers.conf"), 'a')
    f.write('\n')
    processed_layers = list()
    for p in PRIORITYORDER:
        for layer in LAYERSPRIORITY:
            if LAYERSPRIORITY[layer] == -1:
                continue
            if layer not in processed_layers:
                if LAYERSPRIORITY[layer] == p:
                    processed_layers.append(layer)
                    leftside = layer
                    leftside = leftside.replace('-','_')
                    leftside = leftside.upper()
                    if os.path.isabs(LOCATIONS[layer]):
                        str = "%s_LAYER ?= \"%s/%s\"" % (leftside, LOCATIONS[layer], COLLECTION_PATH[layer])
                    else:
                        str = "%s_LAYER ?= \"${TOPDIR}/%s\"" % (leftside, COLLECTION_PATH[layer])
                    f.write(str)
                    f.write('\n')
                    break
    f.write('\n')
    f.write('BBFILES ?= ""\n')
    f.write('BBLAYERS ?= " \\')
    f.write('\n')
    processed_layers = list()
    for p in PRIORITYORDER:
        for layer in LAYERSPRIORITY:
            if LAYERSPRIORITY[layer] == -1:
                continue
            if layer not in processed_layers:
                if LAYERSPRIORITY[layer] == p:
                    processed_layers.append(layer)
                    f.write("   ${%s_LAYER} \\" % layer.replace('-','_').upper())
                    f.write('\n')
                    break
    f.write('  "')
    f.write('\n')
    for layer in LAYERSPRIORITY:
        if LAYERSPRIORITY[layer] <= 0 :
            continue
        f.write("BBFILE_PRIORITY_%s = \"%s\"" % (COLLECTION_NAME[layer], LAYERSPRIORITY[layer]))
        f.write('\n')
    f.close

def update_layers(sourcedir):
    logger.info('MCF-%s: Updating build directory' % __version__)
    layers_sanity = list()
    update_location = list()
    for layer in WEBOSLAYERS:
        if LOCATIONS[layer] not in update_location:
            update_location.append(LOCATIONS[layer])
            if not os.path.exists(os.path.abspath( LOCATIONS[layer] ) ):
                # downloadrepo
                downloadrepo(layer)
            else:
                # run sanity check on repo
                if reposanitycheck(layer) != 0:
                    layers_sanity.append(layer)

                # update layers
                updaterepo(layer)

    if layers_sanity:
        logger.info('Found local changes for repos(s) %s' % layers_sanity)

    printupdatesummary()

def printupdatesummary ():
    logger.info('Repo Update Summary')
    logger.info('===================')
    if not len(SUMMARYINFO):
         logger.info('No local changes found')
    for layer in SUMMARYINFO:
        mstatus = SUMMARYINFO[layer]
        logger.info('[%s] has the following changes:' % layer)
        if int(mstatus) & 1:
            logger.info('    *) local uncommitted changes, use \'git stash pop\' to retrieve')
        if int(mstatus) & 2:
            logger.info('    *) local committed changes, patches are backed up in %s/' % REPOPATCHDIR[layer])
        if int(mstatus) & 4:
            logger.info('    *) local untracked changes')
        if BRANCHINFONEW[layer] != BRANCHINFOCURRENT[layer]:
            logger.info('    *) switched branches from %s to %s' % (BRANCHINFOCURRENT[layer], BRANCHINFONEW[layer]))

def get_remote_branch(newbranch, second_call = False):
    remotebranch = None
    refbranchlist = echo_check_call("git branch -r")
    refbranch = refbranchlist.splitlines()
    for ibranch in refbranch:
        if ibranch == "  %s/%s" % (REMOTE, newbranch):
            remotebranch = ibranch.strip()
            break
    if remotebranch or second_call:
        return remotebranch
    else:
        # try it again after "git remote update"
        echo_check_call("git remote update")
        return get_remote_branch(newbranch, True)
      
def reposanitycheck(layer):
    olddir = os.getcwd()
    os.chdir(LOCATIONS[layer])

    BRANCHINFOCURRENT[layer] = echo_check_call("git rev-parse --abbrev-ref HEAD").rstrip()

    res = False
    msgs = 0

    if CLEAN:
        if echo_check_call("git status --porcelain -s"):
            logger.warn('Removing all local changes and untracked files in [%s]' % layer)
            # abort rebase if git pull --rebase from update_layers got stuck on some local commit
            try:
                echo_check_call("git rebase --abort")
            except subprocess.CalledProcessError:
                # we can ignore this one
                pass

            echo_check_call("git stash clear")
            echo_check_call("git clean -fdx")
            echo_check_call("git reset --hard")
    else:
        logger.info('Checking for local changes in [%s]' % layer)
        if echo_check_call("git status --porcelain --u=no -s"):
            logger.warn('Found local uncommited changes in [%s]' % layer)
            msgs += 1
            echo_check_call("git stash")
            res = True

        if echo_check_call("git status --porcelain -s | grep -v '^?? MCF-PATCHES_' || true"):
            logger.warn('Found local untracked changes in [%s]' % layer)
            msgs += 4
            res = True

    try:
        remote = echo_check_call('git remote | grep "^%s$"' % REMOTE)
    except subprocess.CalledProcessError:
        remote = ''

    if not remote:
        logger.error("Layer %s doesn't have the remote '%s'" % (layer, REMOTE))
        raise Exception("Layer %s doesn't have the remote '%s'" % (layer, REMOTE))

    try:
        urlcurrent = echo_check_call("git config remote.%s.url" % REMOTE)
    except subprocess.CalledProcessError:
        # git config returns 1 when the option isn't set
        urlcurrent = ''

    # there is extra newline at the end
    urlcurrent = urlcurrent.strip()

    logger.debug("reposanitycheck(%s) dir %s, branchinfo %s, branchinfonew %s, url %s, urlnew %s" % (layer, LOCATIONS[layer], BRANCHINFOCURRENT[layer], BRANCHINFONEW[layer], URLS[layer], urlcurrent))

    if urlcurrent != URLS[layer]:
        logger.warn("Changing url for remote '%s' from '%s' to '%s'" % (REMOTE, urlcurrent, URLS[layer]))
        echo_check_call("git remote set-url %s %s" % (REMOTE, URLS[layer]))
        # Sync with new remote repo
        try:
            echo_check_call('git remote update')
        except subprocess.CalledProcessError:
            raise Exception('Failed to fetch %s repo' % layer)

    newbranch = BRANCHINFONEW[layer]
    if newbranch:
        refbranchlist = echo_check_call("git branch")
        refbranch = refbranchlist.splitlines()
        foundlocalbranch = False
        needcheckout = True
        for ibranch in refbranch:
            if ibranch == "  %s" % newbranch:
                foundlocalbranch = True
                break
            if ibranch == "* %s" % newbranch:
                foundlocalbranch = True
                needcheckout = False
                break

        remotebranch = get_remote_branch(newbranch)

        if foundlocalbranch and remotebranch:
            if needcheckout:
                echo_check_call('git checkout %s' % newbranch)

            head = echo_check_call("git rev-parse --abbrev-ref HEAD").rstrip()
            patchdir = './MCF-PATCHES_%s-%s' % (head.replace('/','_'), timestamp)
            REPOPATCHDIR[layer] = "%s/%s" % (LOCATIONS[layer], patchdir)
            cmd ='git format-patch %s..%s -o %s' % (remotebranch,newbranch,patchdir)
            rawpatches = echo_check_call(cmd)
            patches = rawpatches.splitlines()
            num = len(patches)
            # logger.info( ' info: number of patches: %s ' % num)
            if num > 0:
                msgs += 2
                res = True
            else:
                # remove empty dir if there weren't any patches created by format-patch
                cmd ='rmdir --ignore-fail-on-non-empty %s' % patchdir
                echo_check_call(cmd)

            try:
                trackingbranch = echo_check_call("git config --get branch.%s.merge" % newbranch)
            except subprocess.CalledProcessError:
                # git config returns 1 when the option isn't set
                trackingbranch = ''

            try:
                trackingremote = echo_check_call("git config --get branch.%s.remote" % newbranch)
            except subprocess.CalledProcessError:
                # git config returns 1 when the option isn't set
                trackingremote = ''

            # there is extra newline at the end
            trackingbranch = trackingbranch.strip()
            trackingremote = trackingremote.strip()

            if not trackingbranch or not trackingremote or trackingbranch.replace('refs/heads',trackingremote) != remotebranch:
                logger.warn("layer %s was tracking '%s/%s' changing it to track '%s'" % (layer, trackingremote, trackingbranch, remotebranch))
                # to ensure we are tracking remote
                echo_check_call('git branch --set-upstream %s %s' % (newbranch, remotebranch))

        elif not foundlocalbranch and remotebranch:
            echo_check_call('git checkout -b %s %s' % (newbranch, remotebranch))
        else:
            # anything else is failure
            raise Exception('Could not find local and remote branches for %s' % newbranch)
    else:
            raise Exception('Undefined branch name')

    if res:
        SUMMARYINFO[layer] = msgs

    newdir = os.chdir(olddir)
    return res

# Taken from bitbake/lib/bb/fetch2/git.py with modifications for mcf usage
def contains_ref(tag):
    cmd = "git log --pretty=oneline -n 1 %s -- 2>/dev/null | wc -l" % (tag)
    output = echo_check_call(cmd)
    if len(output.split()) > 1:
        raise Exception("Error: '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
    return output.split()[0] != "0"

def updaterepo(layer):
    olddir = os.getcwd()
    os.chdir(LOCATIONS[layer])

    COMMITIDSCURRENT[layer] = echo_check_call("git  log --pretty=format:%h -1")

    newcommitid = COMMITIDSNEW[layer]
    currentcommitid = COMMITIDSCURRENT[layer]
    newbranch = BRANCHINFONEW[layer]
    currentbranch = BRANCHINFOCURRENT[layer]

    logger.debug("updaterepo(%s) dir %s, id %s, newid %s, branch %s, newbranch %s" % (layer, LOCATIONS[layer], currentcommitid, newcommitid, currentbranch, newbranch))

    if newcommitid != currentcommitid:
        logger.info('Updating [%s]' % layer)
        if newcommitid:
            if newcommitid.startswith('refs/changes/'):
                if newbranch and newbranch != currentbranch:
                    # older git doesn't allow to update reference on currently checked out branch
                    cmd ='git fetch %s %s && git checkout -B %s FETCH_HEAD' % (REMOTE, newcommitid, newbranch)
                elif newbranch:
                    # we're already on requested branch
                    cmd ='git fetch %s %s && git reset --hard FETCH_HEAD' % (REMOTE, newcommitid)
                else:
                    # we don't have any branch preference use detached
                    cmd ='git fetch %s %s && git checkout FETCH_HEAD' % (REMOTE, newcommitid)
                echo_check_call(cmd)
            else:
                if not contains_ref(newcommitid):
                    echo_check_call('git fetch')
                if newbranch and newbranch != currentbranch:
                    # older git doesn't allow to update reference on currently checked out branch
                    cmd ='git checkout -B %s %s' % (newbranch,newcommitid)
                elif newbranch:
                    # we're already on requested branch
                    cmd ='git reset --hard %s' % newcommitid
                else:
                    # we don't have any branch preference use detached
                    cmd ='git checkout %s' % newcommitid
                echo_check_call(cmd)
        else:
            if CLEAN:
                echo_check_call("git remote update")
                echo_check_call('git reset --hard %s/%s' % (REMOTE, newbranch))
            else:
                # current branch always tracks a remote one
                echo_check_call('git pull %s' % REMOTE)
        logger.info('Done updating [%s]' % layer)
    else:
        logger.info(('[%s] is up-to-date.' % layer))

    newdir = os.chdir(olddir)
    os.getcwd()

def set_verbosity(options):
    if options.silent and options.silent == 1:
        set_log_level('WARNING')
    elif options.silent and options.silent == 2:
        set_log_level('ERROR')
    elif options.silent and options.silent >= 3:
        set_log_level('CRITICAL')
    elif options.verbose and options.verbose == 1:
        set_log_level('DEBUG')
    elif options.verbose and options.verbose >= 2:
        set_log_level('DEBUG')
        # but also run every system command with set -x
        enable_trace()
    else:
        set_log_level('INFO')

def recover_current_mcf_state(srcdir, origoptions):
    mcfstatusfile = os.path.join(srcdir, "mcf.status")
    if not os.path.exists(mcfstatusfile):
        raise Exception("mcf.status does not exist.")

    commandlinereconstructed = list()
    commandlinereconstructed.append('ignored-argv-0')
    start = False
    with open(mcfstatusfile, 'r') as f:
        for line in f.readlines():
            line = line.strip()
            if not start:
                start = line.startswith("exec")
                continue

            if start:
                if line.startswith('--command'):
                    # skip --command configure
                    continue
                elif line.startswith('--'):
                    line = line.rstrip('\\')
                    line = line.strip(' ')
                    line = line.replace('\"','')
                    line = line.replace('\'','')
                    commandlinereconstructed.append(line)
                else:
                    lines = line.rstrip('\\')
                    lines = lines.lstrip()
                    lines = lines.rstrip()
                    lines = lines.split()
                    for lline in lines:
                        commandlinereconstructed.append(lline)

    sys.argv = commandlinereconstructed
    options = getopts()
    # always use clean/verbose/silent flags from origoptions not mcf.status
    options.clean = origoptions.clean
    options.verbose = origoptions.verbose
    options.silent = origoptions.silent
    return options

def checkmirror(name, url):
    if url.startswith('file://'):
        pathstr = url[7:]
        if not os.path.isdir(pathstr):
            logger.warn("%s parameter '%s' points to non-existent directory" % (name, url))
        elif not os.listdir(pathstr):
            logger.warn("%s parameter '%s' points to empty directory, did you forgot to mount it?" % (name, url))

def sanitycheck(options):
    try:
        mirror = echo_check_call('git config -l | grep "^url\..*insteadof=github.com/"')
    except subprocess.CalledProcessError:
        # git config returns 1 when the option isn't set
        mirror = ''
        pass
    if not mirror:
        logger.warn('No mirror for github.com was detected, please define mirrors in ~/.gitconfig if some are available')
    if options.sstatemirror:
        for m in options.sstatemirror:
            checkmirror('sstatemirror', m)
    if options.premirror:
        checkmirror('premirror', options.premirror)

def configure_build(srcdir, options):
    files = [
        [os.path.join(srcdir, 'build-templates', 'mcf-status.in'),        'mcf.status'        ],
        [os.path.join(srcdir, 'build-templates', 'oe-init-build-env.in'), 'oe-init-build-env' ],
        [os.path.join(srcdir, 'build-templates', 'Makefile.in'),          'Makefile'          ],
        [os.path.join(srcdir, 'build-templates', 'bblayers-conf.in'),     'conf/bblayers.conf'],
        [os.path.join(srcdir, 'build-templates', 'local-conf.in'),        'conf/local.conf'   ],
    ]

    replacements = [
        ['@bb_number_threads@', str(options.bb_number_threads)],
        ['@parallel_make@',     str(options.parallel_make)],
        ['@no_network@',               '0'            if options.network                else '1'],
        ['@fetchpremirroronly@',       '1'            if options.fetchpremirroronly     else '0'],
        ['@generatemirrortarballs@',   '1'            if options.generatemirrortarballs else '0'],
        ['@buildhistory_enabled@',     '1'            if options.buildhistory           else '0'],
        ['@buildhistory_class@',       'buildhistory' if options.buildhistory           else '' ],
        ['@buildhistory_author_assignment@', 'BUILDHISTORY_COMMIT_AUTHOR ?= "%s"' % options.buildhistoryauthor if options.buildhistoryauthor else ''],
        ['@premirror_assignment@',           'SOURCE_MIRROR_URL ?= "%s"' % options.premirror   if options.premirror     else ''],
        ['@premirror_inherit@',              'INHERIT += "own-mirrors"'                        if options.premirror     else ''],
        ['@sstatemirror_assignment@',        SSTATE_MIRRORS                                    if options.sstatemirror  else ''],
        ['@premirror@', options.premirror],
        ['@sstatemirror@', ' '.join(options.sstatemirror) if options.sstatemirror else ''],
        ['@buildhistoryauthor@', options.buildhistoryauthor],
        ['@buildhistory@',                         '--%s-buildhistory'                      % ('enable' if options.buildhistory                   else 'disable')],
        ['@network@',                              '--%s-network'                           % ('enable' if options.network                        else 'disable')],
        ['@fetchpremirroronlyoption@',             '--%s-fetch-premirror-only'              % ('enable' if options.fetchpremirroronly             else 'disable')],
        ['@generatemirrortarballsoption@',         '--%s-generate-mirror-tarballs'          % ('enable' if options.generatemirrortarballs         else 'disable')],
        ['@machine@', options.MACHINE[0]],
        ['@machines@', ' '.join(options.MACHINE)],
        ['@distro@', DISTRO],
        ['@prog@', progname],
        ['@srcdir@', srcdir],
        ['@abs_srcdir@', abs_srcdir],
    ]

    # if icecc is not installed, or version does not match requirements, then disabling icecc is the correct action.
    icestate = _icecc_installed()

    icecc_replacements = [
        ['@icecc_disable_enable@',           '1' if not icestate or options.disable_icecc else ''],
        ['@icecc_parallel_make@',            '%s'                             % options.icecc_parallel_make],
        ['@alternative_icecc_installation@', ('ICECC_PATH ?= "%s"'            % options.icecc_location) if options.icecc_location else ''],
        ['@icecc_user_package_blacklist@',   ('ICECC_USER_PACKAGE_BL ?= "%s"' % ' '.join(options.icecc_user_package_blacklist)) if options.icecc_user_package_blacklist else ''],
        ['@icecc_user_class_blacklist@',     ('ICECC_USER_CLASS_BL ?= "%s"'   % ' '.join(options.icecc_user_class_blacklist)) if options.icecc_user_class_blacklist else ''],
        ['@icecc_user_package_whitelist@',   ('ICECC_USER_PACKAGE_WL ?= "%s"' % ' '.join(options.icecc_user_package_whitelist)) if options.icecc_user_package_whitelist else ''],
        ['@icecc_environment_script@',       'ICECC_ENV_EXEC ?= "%s"' % options.icecc_env_exec if options.icecc_location else ''],
        ['@icecc_disable_enable_mcf@',       '--%s-icecc'                     % ('disable' if not icestate or options.disable_icecc else 'enable')],
        ['@alternative_icecc_installation_mcf@', options.icecc_location if options.icecc_location else ''],
        ['@icecc_environment_script_mcf@',       options.icecc_env_exec if options.icecc_location else ''],
        ['@icecc_user_package_blacklist_mcf@',   (' '.join(options.icecc_user_package_blacklist)) if options.icecc_user_package_blacklist else ''],
        ['@icecc_user_class_blacklist_mcf@',     (' '.join(options.icecc_user_class_blacklist)) if options.icecc_user_class_blacklist else ''],
        ['@icecc_user_package_whitelist_mcf@',   (' '.join(options.icecc_user_package_whitelist)) if options.icecc_user_package_whitelist else ''],
    ]

    replacements =  replacements + icecc_replacements

    logger.info('MCF-%s: Configuring build directory BUILD' % __version__)
    for f in files:
        process_file(f, replacements)
    parse_collections(srcdir)
    write_bblayers_conf(srcdir)
    logger.info('MCF-%s: Done configuring build directory BUILD' % __version__)

    echo_check_call('/bin/chmod a+x mcf.status', options.verbose)

if __name__ == '__main__':
    # NB. The exec done by mcf.status causes argv[0] to be an absolute pathname
    progname = sys.argv[0]

    # Use the same timestamp for everything created by this invocation of mcf
    timestamp = strftime("%Y%m%d%H%M%S", gmtime())

    options = getopts()

    srcdir = os.path.dirname(progname)
    abs_srcdir = os.path.abspath(srcdir)

    if options.mcfcommand == 'update':
         # recover current mcf state
        options = recover_current_mcf_state(srcdir, options)

    set_verbosity(options)

    if options.clean:
        enable_clean()

    read_weboslayers(srcdir)
    for M in options.MACHINE:
        if M not in SUPPORTED_MACHINES:
            logger.error("MACHINE argument '%s' isn't supported (does not appear in Machines in weboslayers.py '%s')" % (M, SUPPORTED_MACHINES))
            sys.exit(1)

    if options.mcfcommand != 'configure':
        update_layers(srcdir)

    configure_build(srcdir, options)

    sanitycheck(options)
    logger.info('Done.')
