#!/usr/bin/python3


# Copyright (c) WanSheng Intelligent Corp. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.


import os
import sys
import time
import getopt
import logging
import json
from pprint import pprint
import hashlib
import zipfile
import tarfile
import shutil


class Manifest_Info:
    product_name = ''
    category = ''
    version = ''
    vender = ''
    description = ''
    cpu = ''
    os = ''
    components = ''
class Json_Platform:
    os = ''
    cpu = ''

current_dir = ''
g_working_dir = ''#dir to be packed
output_name = ''
manifest_info1 = Manifest_Info()
g_build_id = ''
g_build_id_file = ''

g_platfrom_os_cpus =[]
silent_mode = False

def usage():
    print("wa_pack.py -f [input dir] -o [output name] -v [version] -b [build_id] -B [build_id file path]")
    print("\t[input dir]:     the directory to be packed, default is current directory")
    print("\t[output name]:   the output file name, default using the product name")
    print("\t[version]:       the version of the package, default is 1.0.0")
    print("\t[build_id]:      the build id of the package, default is 1")
    print("\t[build_id file path]: the build id file path, default is build_id.json from source directory")
    print("Notes: path can be both relative or full directory.")
    print("Examples:")
    print("\twa_pack.py -f wagent_v1 -o wagent.zip")
    print("\twa_pack.py -f /work/wagent_v1 -o /work/wagent.zip")


def check_file_package_info(input_dir) :
    packageInfoPath = input_dir + "/package.info"
    if False == os.path.exists(packageInfoPath):
        logging.info("ERR: package.info does not exist in: "+input_dir)
        print(("ERR: package.info does not exist in : "+input_dir))
        sys.exit(2)
    json_data = open(packageInfoPath).read()
    packageinfo = json.loads(json_data)
    return packageinfo

def check_file_platform_info(dir1):
    os_s = os.listdir(dir1)
    for os_item in os_s:
        os_name = dir1 + "/" + os_item
        if os.path.isdir( os_name ) :
            os_name = os_name + "/ams/version/platform.info"
            if False == os.path.exists(os_name):
                logging.info("ERR: ams/version/platform.info missed in subdir: "+os_item)
                print(("ERR: ams/version/platform.info missed in subdir: "+os_item))
                sys.exit(3)


def parse_and_update_package_info(packageinfo, version):
    global g_working_dir
    global g_build_id
    global g_build_id_file
    keys = ['product_name', 'category', 'vendor' ,'description' ]
    for key in keys:
        if not key in packageinfo:
            logging.warn('miss key in the package.info: ' + key)
            return False

    if not 'subclass' in packageinfo:
        packageinfo['subclass'] = ""

    if version == "" and 'version' in packageinfo:
          version = packageinfo['version']

    package_path = g_working_dir + "/package.info"
    logging.debug(package_path)
    if (g_build_id != ''):
        packageinfo['build_id'] = g_build_id
    else:
        if not os.path.exists(g_build_id_file):
            build_JSON = {'build_id': 0, 'version': '1.0.0', 'product_name': packageinfo['product_name']}
            logging.info(f'build_id_file [{g_build_id_file}] does not exist, create a new one')
        else:
            with open(g_build_id_file, 'r') as file1:
                build_JSON = json.loads(file1.read())
        if('build_id' in build_JSON):
            build_JSON['build_id'] = build_JSON['build_id'] + 1
        else:
            build_JSON['build_id'] = 1

        if('version' in build_JSON):
            if version == '':
                version = build_JSON['version']
                logging.info('version from build_id_file: ' + version)
            elif version != build_JSON['version']:
                logging.warning('version from build_id_file is different from input version, reset build id to 1')
                build_JSON['build_id'] = 1
        elif version == '':
            version = '1.0.0'
            logging.info('version from build_id_file is empty, use default version: ' + version)

        build_JSON['version'] = version
        packageinfo['build_id'] = str(build_JSON['build_id'])
        g_build_id = str(build_JSON['build_id'])
        with open(g_build_id_file, 'w') as file1:
                json.dump(build_JSON, file1, indent=4)

        logging.info('build_id from build_id_file: ' + packageinfo['build_id'])

    if version == "":
        version = "1.0.0"
    packageinfo['version'] = version
    with open(package_path, 'w') as file1:
        json.dump(packageinfo, file1, indent=4)
    print_info("package.info version modified:"+package_path)

    return True

def parse_dir_for_os_cpu(dir1):
    os_s = os.listdir(dir1)
    for os_item in os_s:
        os_name = dir1 + "/" + os_item
        if os.path.isdir(os_name):
            g_platfrom_os_cpus.append([os_item])
    return

# zip each subfolder and then remove the subfolders
def replace_platform_dir_with_tar(input_dir, prefix = "") :
    os_s = os.listdir(input_dir)
    for os_item in os_s:
        os_dir = input_dir + "/" + os_item
        if os.path.isdir(os_dir):
            make_targz(input_dir+"/"+ prefix + os_item+ ".tar.gz", os_dir+"/")
            shutil.rmtree(os_dir)

def calc_md5_for_file(path_name):
    BLOCK_SIZE = 1024
    hasher=hashlib.md5()
    with open(path_name,'rb') as afile:
        buf = afile.read(BLOCK_SIZE)
        while len(buf) >0:
            hasher.update(buf)
            buf =afile.read(BLOCK_SIZE)
    return hasher.hexdigest()


def update_md5_for_single_target(sub_dir1, package_info):
    global g_working_dir
    string_temp = 'update_md5 for '+sub_dir1
    logging.debug(string_temp)
    print_info (string_temp)
    manifest_file = sub_dir1  + "/ams/version/manifest"
    manifest_dir =  sub_dir1  + "/ams/version"

    if False == os.path.exists(manifest_dir):
        os.makedirs(manifest_dir)

    with open(manifest_dir + "/package.info", 'w') as file1:
        json.dump(package_info, file1, indent=4)

    buf1={'description': '%s' % package_info['description']}
    buf1.update({'category': '%s' % package_info['category']})
    buf1.update({'version': '%s' % package_info['version']})
    buf1.update({'build_id': '%s' % package_info['build_id']})
    buf1.update({'vendor': '%s' % package_info['vendor']})
    buf1.update({'product_name':'%s' %package_info['product_name']})
    buf1.update({'subclass':'%s' %package_info['subclass']})

    components = []
    for root_dir ,dir_1 ,files in os.walk(sub_dir1, followlinks=True):
        for file_item in files:
            full_path = root_dir+"/"+file_item
            if manifest_file == full_path:#exclude manifest file
                continue
            md5_str = calc_md5_for_file(full_path)
            sub_dir_name = "".join(full_path.rsplit(sub_dir1))
            component_item = {'f':'%s' %sub_dir_name}
            component_item.update({'v':'%s' %package_info['version']})
            component_item.update({'h': '%s' % md5_str})
            fvh = "calculated md5 for "+ sub_dir_name+" version: "+package_info['version']+" MD5: "+md5_str
            logging.debug(fvh)
            logging.debug(component_item)
            components.append(component_item)
    logging.debug(components)
    buf1.update({'components': components})
    logging.debug(manifest_dir)
    with open(manifest_file,'w') as file1:
        json.dump(buf1,file1,indent=4)
        print_info("generated manifiest file:" + manifest_file)
    return

def update_md5_for_each_target(array_for_os_and_cpu):
    global g_working_dir
    for sub_dir1 in array_for_os_and_cpu:
        target_dir = g_working_dir + "/" +sub_dir1[0]
        update_md5_for_single_target(target_dir, g_working_dir+'/package.info')

def make_targz(output_filename, source_dir):
    """
    :param output_filename: tar.gz file name
    :param source_dir: input file path
    :return: bool
    """
    if os.path.exists(output_filename):
        os.remove(output_filename)
    print_info ("make tar.gz for folder:" + source_dir)

    '''
    cmd = "tar -czvf " + output_filename + ' ' + source_dir
    print (cmd)
    os.system(cmd)
    if not os.path.exists(output_filename):
        print ("failed to create " + output_filename)
        return False
    '''
    try:
        with tarfile.open(output_filename, "w:gz") as tar:
            tar.add(source_dir, arcname=os.path.basename(source_dir), recursive=True)
        return True
    except Exception as e:
        print(e)
        return False

def make_zip(output_name, input_dir):
    z = zipfile.ZipFile(output_name, 'w', zipfile.ZIP_DEFLATED)
    startdir = input_dir
    for dirpath, dirnames, filenames in os.walk(startdir, followlinks=True):
        for filename in filenames:
            full_file_name = dirpath+"/"+filename
            z.write(os.path.join(dirpath, filename),"".join(full_file_name.rsplit(input_dir)))
    z.close()
    return True

def copydir(indir,outdir):
    try:
        if os.path.exists(outdir) :
            shutil.rmtree(outdir)
        shutil.copytree(indir,outdir)
        return True
    except:
        print('This dir is wrong')
        return False

def print_info(msg):
    global silent_mode

    if (not silent_mode):
        print(msg)

'''
build id file format:
{
    "version: "1.0.0",
    "build_id": 1
}
'''

def main(argv):
    global g_working_dir
    global output_name
    global silent_mode
    global g_build_id
    global g_build_id_file
    version = ""

    if True == os.path.exists('ams_pack.log'):
        os.remove('ams_pack.log')
    logging.basicConfig(filename='ams_pack.log', level=logging.DEBUG)
    logging.debug('packing tool started')
    current_dir = os.getcwd()
    logging.info('current_dir is ' + current_dir)

    try:
        opts, args = getopt.getopt(argv, "f:o:v:b:B:s", ["help"])
        for opt, arg in opts:
            if opt == '-f':
                logging.debug("input folder:" + arg)
                current_dir = arg
            elif opt == '-o':
                output_name = arg
                logging.info('output_file_name:'+output_name)
            elif opt == '-v':
                version = arg
                logging.info('version:'+version)
            elif opt == '-b':
                g_build_id = arg
                logging.info('build_id:' + g_build_id)
            elif opt == '-B':
                g_build_id_file = arg
                logging.info('build_id_file:'+ g_build_id_file)
            elif opt == '-s':
                silent_mode = True
                logging.info('silent_mode: true')
            elif opt == '--help':
                logging.error("get opt error")
                usage()
                sys.exit(3)
    except getopt.GetoptError:
        usage()
        sys.exit(2)

    if g_build_id_file== '':
        g_build_id_file = f'{current_dir}/build_id.json'
    logging.info('build_id_file:'+ g_build_id_file)

    retval = os.path.exists(current_dir)
    if True == retval:
        os.chdir(current_dir)
        current_dir = os.getcwd()
        os.chdir(current_dir)
        logging.info('input path ['+ current_dir + '] exists')
    else:
        logging.info('input path does not exists')
        print("ams-pack: input path does not exists,return failed")
        return

    orig_package =  check_file_package_info(current_dir)
    product_name = orig_package['product_name'];

    #1. prepare working dir
    input_dir_origin = current_dir
    g_working_dir = current_dir + "/../ams_working_temp"
    if os.path.isdir(g_working_dir):
        shutil.rmtree(g_working_dir)

    copy_target_dir = g_working_dir
    if orig_package['category'] == "managed_app" :
        os.mkdir(g_working_dir)
        copy_target_dir = g_working_dir + '/app'

    if not copydir(input_dir_origin, copy_target_dir) :
        logging.debug('failed to cp original {} to working dir {}'.format(input_dir_origin, copy_target_dir))
        sys.exit(3)

    if True == os.path.exists(output_name):
        logging.info("output file name already exists,will replace it")

    #2. process and generate package.info in working dir
    if not parse_and_update_package_info(orig_package, version):
        sys.exit(3)

    # prepare the output file name and directory
    if output_name == '':
        if 'pack_name' in orig_package:
            n = orig_package['pack_name']
        else:
            n = product_name
        output_name = os.path.abspath(os.path.join(current_dir, "..", f'{n}_v{orig_package["version"]}_b{orig_package["build_id"]}'))
        print ("ams-pack: output file name is not specified, use default name: " + output_name)
    pack_base_dir = os.path.dirname(output_name)
    out_base_name = os.path.basename(output_name)
    if pack_base_dir == '':
        pack_base_dir = os.getcwd()
    elif not os.path.isdir(pack_base_dir):
        os.mkdir(pack_base_dir)


    #3. generate manifest or tar.gz
    if orig_package['category'] == "managed_app" :
        update_md5_for_single_target(g_working_dir+'/app', orig_package)
        if (not silent_mode):
            os.system("tree " + g_working_dir)
        replace_platform_dir_with_tar(g_working_dir)
        shutil.copyfile(g_working_dir + '/app.tar.gz', pack_base_dir + '/' + out_base_name + '.tar.gz')

    elif orig_package['category'] == "runtime_engine" or orig_package['category'] == "docker" or orig_package['category'] == "plugin":
        check_file_platform_info(g_working_dir)

        # check the platforms included
        parse_dir_for_os_cpu(g_working_dir)

        # generating maninfest for each platform
        for sub_dir1 in g_platfrom_os_cpus:
            target_dir = g_working_dir + "/" +sub_dir1[0]
            if orig_package['category'] == "docker":
                if not os.path.exists(target_dir + '/wa_docker.tar'):
                    print_info('No wa_docker.tar for platform: ' + sub_dir1[0] + '. exit packing')
                    sys.exit(3)
            update_md5_for_single_target(target_dir, orig_package)

        replace_platform_dir_with_tar(g_working_dir, out_base_name+"_")

        offline_package_dir = pack_base_dir + '/' + out_base_name + '-offline'
        if not os.path.isdir(offline_package_dir):
            os.mkdir(offline_package_dir)
        cmd = 'cp {}/*.tar.gz {}'.format(g_working_dir, offline_package_dir)
        os.system(cmd)

    else :
        check_file_platform_info(g_working_dir)
        parse_dir_for_os_cpu(g_working_dir)
        for sub_dir1 in g_platfrom_os_cpus:
            target_dir = g_working_dir + "/" +sub_dir1[0]
            update_md5_for_single_target(target_dir, orig_package)

        # pack the offline packages for each platform
        for sub_dir1 in g_platfrom_os_cpus:
            iagent_offline_package_zip=pack_base_dir + '/' + out_base_name + '_offline_'+sub_dir1[0]+'.tar.gz'
            tar1=tarfile.open(iagent_offline_package_zip,"w:gz")
            input_dir=g_working_dir + '/' + sub_dir1[0]
            print ("gen offline package for " + input_dir)
            for root,dir,files in os.walk(input_dir, followlinks=True):
                for file in files:
                    fullpath=os.path.join(root,file)
                    fl_name = "".join(fullpath.rsplit(input_dir))
                    tar1.add(fullpath,arcname=fl_name)
                    tar1.add
            tar1.close()
            print ("generated offline package: " + iagent_offline_package_zip)

    make_zip(output_name+'.zip', g_working_dir)
    logging.info("ams_pack success")
    print_info("ams_pack success. " + output_name+'.zip')


    # 4. revoce working dir
    shutil.rmtree(g_working_dir)

if __name__ == '__main__':
    main(sys.argv[1:])
