import random
import json
import os

def add_row_xmessage_popup ( popup_layout_output, text ):

    nrows = len(popup_layout_output[0]["rows"]) 

    popup_layout_output[0]["rows"].append({})
    popup_layout_output[0]["rows"][nrows]["permission"] = "read_write"
    popup_layout_output[0]["rows"][nrows]["id"] = "add_row_xmessage_popup_" + str(random.randint(0, 10000))
    popup_layout_output[0]["rows"][nrows]["type"] = "row"
    popup_layout_output[0]["rows"][nrows]["fields"] = []
    popup_layout_output[0]["rows"][nrows]["fields"].append({"type": "textbox", "display": text })
    popup_layout_output[0]["rows"][nrows]["fields"].append({"type": "void"})

    return popup_layout_output, nrows

def start_xmessage_popup( rows_display, texbox_display ):

    popup_layout_output = []

    # Append a new dictionary to the list
    popup_layout_output.append({})

    # Populate the dictionary
    popup_layout_output[0]["type"] = "rows"
    popup_layout_output[0]["display"] = datetime.today().strftime('%Y.%m.%d') + " " + datetime.today().strftime('%H.%M.%S') + " " + str(random.randint(0, 10000))
    popup_layout_output[0]["id"] = rows_display.replace(" ","_") + "_" + str(random.randint(0, 10000))
    popup_layout_output[0]["expand"] = "on"

    # Initialize the "rows" key as a list and append a dictionary
    popup_layout_output[0]["rows"] = []

    popup_layout_output[0]["rows"].append({})
    popup_layout_output[0]["rows"][0]["permission"] = "read_write"
    popup_layout_output[0]["rows"][0]["id"] = rows_display.replace(" ","_") + "_" + str(random.randint(0, 10000))
    popup_layout_output[0]["rows"][0]["type"] = "row"
    popup_layout_output[0]["rows"][0]["fields"] = []
    popup_layout_output[0]["rows"][0]["fields"].append({"type": "textbox", "display": "timestamp" })
    popup_layout_output[0]["rows"][0]["fields"].append({"type": "void"})
    popup_layout_output[0]["rows"][0]["fields"].append({"type": "archive", "selection": "date", "items": []})
    popup_layout_output[0]["rows"][0]["fields"][2]["items"].append({"display": "date", "data": datetime.today().strftime('%Y-%m-%d') })
    popup_layout_output[0]["rows"][0]["fields"][2]["items"].append({"display": "time", "data": datetime.today().strftime('%H:%M:%S') })

    popup_layout_output[0]["rows"].append({})
    popup_layout_output[0]["rows"][1]["permission"] = "read_write"
    popup_layout_output[0]["rows"][1]["id"] = rows_display.replace(" ","_") + "_" + str(random.randint(0, 10000))
    popup_layout_output[0]["rows"][1]["type"] = "row"
    popup_layout_output[0]["rows"][1]["fields"] = []
    popup_layout_output[0]["rows"][1]["fields"].append({"type": "textbox", "display": texbox_display })
    popup_layout_output[0]["rows"][1]["fields"].append({"type": "void"})

    row_index = 1
    return popup_layout_output, row_index 


def generate_log_string ( ) :
    global xrn_main_window_global
    slayout_local = xrn_main_window_global.jlayout_str

    obj=get_from_layout( slayout_local, "general_cache_directory" )
    cache_dir = os.path.expanduser(get_from_layout( slayout_local, "general_cache_directory" )["fields"][2]["display"])
    os.system( "rm -rf " + cache_dir )
    os.system( "mkdir -p " + cache_dir )
    cache_dir = cache_dir + "/" + os.path.basename(__file__).replace(".py","_") + str(random.randint(0,10000))
    warning_log = cache_dir + "/warning_log "
    error_log = cache_dir + "/error_log "
    info_log = cache_dir + "/info_log "
    #debug_log = cache_dir + "/debug_log "
    logging_str = "--logging-conf -force-write "
    logging_str = logging_str + "-lib-error-log " + error_log + " " 
    #logging_str = logging_str + "-lib-debug-log " + debug_log + " " 
    logging_str = logging_str + "-lib-warning-log " + warning_log + " " 
    logging_info_str = logging_str + "-lib-info-log " + info_log + " " 

    os.system( "mkdir -p " + cache_dir )
    return logging_str, logging_info_str, cache_dir, info_log

def append_logs( popup_obj, cmd_str ):

    error_log = 0
    row_index = len(popup_obj[0]["rows"]) - 1

    jcmd_obj = json.loads(raw2json(cmd_str))
    error_str = ""
    warning_str = ""
    #debug_str = ""

    jcmd_str = json.dumps(jcmd_obj)

    if "lib-info-log" in jcmd_str :
        for configuration in jcmd_obj["configurations"] :
            if configuration["name"] == "logging-conf" :
                for option in configuration["options"] :
                     if option["name"] == "lib-info-log" :
                         filename = option["value"]
        with open( filename , "r" ) as file : 
            error_str = file.read()
        if error_str == "" :
            error_str = "empty"
        popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display" : "lib-info-log", "data": error_str })

    if "lib-error-log" in jcmd_str :
        for configuration in jcmd_obj["configurations"] :
            if configuration["name"] == "logging-conf" :
                for option in configuration["options"] :
                     if option["name"] == "lib-error-log" :
                         filename = option["value"]
        with open( filename , "r" ) as file : 
            error_str = file.read()
        if error_str == "" :
            error_str = "empty"
            error_log = 0
        else :
            error_log = 1
        popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display" : "lib-error-log", "data": error_str })

    if "lib-warning-log" in jcmd_str :
        for configuration in jcmd_obj["configurations"] :
            if configuration["name"] == "logging-conf" :
                for option in configuration["options"] :
                     if option["name"] == "lib-warning-log" :
                         filename = option["value"]
        with open( filename , "r" ) as file : 
            warning_str = file.read()
        if warning_str == "" :
            warning_str = "empty"
        popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display" : "lib-warning-log", "data": warning_str })

    # if "lib-debug-log" in jcmd_str :
    #     for configuration in jcmd_obj["configurations"] :
    #         if configuration["name"] == "logging-conf" :
    #             for option in configuration["options"] :
    #                  if option["name"] == "lib-debug-log" :
    #                      filename = option["value"]
    #     with open( filename , "r" ) as file : 
    #         debug_str = file.read()
    #     if debug_str == "" :
    #         debug_str = "empty"
    #     popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display" : "lib-debug-log", "data": debug_str })

    return popup_obj, error_log 

def append_execution_logs( popup_obj, input_local_check, output_local_check, input_remote_check, output_remote_check ):

    row_index = len(popup_obj[0]["rows"]) - 1

    if input_local_check != "" :
        popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display" : "input local check", "data": input_local_check })

    if output_local_check != "" :
        popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display" : "output local check", "data": output_local_check })

    if input_remote_check != "" :
        popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display" : "input remote check", "data": input_remote_check })

    if output_remote_check != "" :
        popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display" : "output remote check", "data": output_remote_check })

    return popup_obj


def append_result_in_archive( popup_layout_output, pass_fail, display_name ):

    nrows = len(popup_layout_output[0]["rows"]) 

    popup_layout_output[0]["rows"].append({})
    popup_layout_output[0]["rows"][nrows]["permission"] = "read_write"
    popup_layout_output[0]["rows"][nrows]["id"] = "append_result_in_archive_" + str(random.randint(0, 10000))
    popup_layout_output[0]["rows"][nrows]["type"] = "row"
    popup_layout_output[0]["rows"][nrows]["fields"] = []
    popup_layout_output[0]["rows"][nrows]["fields"].append({"type": "textbox", "display": "info" })
    popup_layout_output[0]["rows"][nrows]["fields"].append({"type": "void"})

    if pass_fail == "pass" :
        popup_layout_output[0]["rows"][nrows]["fields"].append({"type": "archive", "selection": "pass", "items": []})
        popup_layout_output[0]["rows"][nrows]["fields"][2]["items"].append({"display": "pass", "data": "pass" })
    else :
        popup_layout_output[0]["rows"][nrows]["fields"].append({"type": "archive", "selection": "fail", "items": []})
        popup_layout_output[0]["rows"][nrows]["fields"][2]["items"].append({"display": "fail", "data": "fail" })

    popup_layout_output[0]["rows"][nrows]["fields"][2]["items"].append({"display": "command", "data": display_name })
    popup_layout_output[0]["rows"][nrows]["fields"][2]["items"].append({"display": "program", "data": os.path.basename(__file__).replace(".py","") })

    return popup_layout_output


def check_passwords( first_password, first_password_display, second_password, second_password_display, row_index, popup_obj ):
    error = 0

    if first_password != second_password :
        text = "password in \"" + first_password_display + "\" and \"" + second_password_display + "\" do not match"
        error = 1
    else :
        response = check_password(first_password)
        if response != "" :
            text = "password in \"" + first_password_display + "\" and \"" + second_password_display + "\" error\n" + response
            error = 1

    if error == 0 :
        text = "password valid"

    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": "password", "items":[]})

    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": "password", "data": text })

    return error, popup_obj

def check_if_file_is_writable( file, row_index, popup_obj ):

    error = 0
    rnd = str(random.randint(0, 10000))

    file = os.path.expanduser(file)

    if file == "" :
        text = "no file specified"
        error = 1
        file = "void_" + rnd
    else :
        try:
            with open( file, 'w'):
                pass
            text = file + " is writable"
        except IOError:
            text = file + " is not writable"
            error = 1

    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file) + " write access " + rnd, "items":[]})

    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file) + " write access " + rnd, "data": text })
    return error, popup_obj


def check_if_file_is_readable( file, row_index, popup_obj ):

    error = 0
    file = os.path.expanduser(file)
    rnd = str(random.randint(0, 10000))

    if file == "" :
        text = "no file specified"
        error = 1
        file = "void_" + rnd
    else :
        try:
            with open( file, 'r'):
                pass
            text = file + " is readable"
        except IOError:
            text = file + " is not readable"
            error = 1

    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file) + " read access " + rnd, "items":[]})

    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file) + " read access " + rnd, "data": text })
    return error, popup_obj


def check_matrx_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "matrix" )

    if error == 0 :
        text = "matrix correctly detected"
    else :
        text = "matrix not correctly detected"

    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})

    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_matrx_compressed_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "matrix_compressed" )

    if error == 0 :
        text = "matrx_compressed correctly detected"
    else :
        text = "matrx_compressed not correctly detected"

    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_start_point_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))

    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "start" )

    if error == 0 :
        text = "start point correctly detected"
    else :
        text = "start point not correctly detected"

    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})

    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_point_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "point" )

    if error == 0 :
        text = "point correctly detected"
    else :
        text = "point not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_monovalent_key_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "monovalent_key" )

    if error == 0 :
        text = "monovalent_key correctly detected"
    else :
        text = "monovalent_key not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_polyvalent_proof_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "polyvalent_proof" )

    if error == 0 :
        text = "polyvalent_proof correctly detected"
    else :
        text = "polyvalent_proof not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_polyvalent_key_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "polyvalent_key" )

    if error == 0 :
        text = "polyvalent_key correctly detected"
    else :
        text = "polyvalent_key not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_monomial_key_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "monomial_key" )

    if error == 0 :
        text = "monomial_key correctly detected"
    else :
        text = "monomial_key not correctly detected"

    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_monomial_commitment_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "monomial_commitment" )

    if error == 0 :
        text = "monomial_commitment correctly detected"
    else :
        text = "monomial_commitment not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_monomial_response_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "monomial_response" )

    if error == 0 :
        text = "monomial_response correctly detected"
    else :
        text = "monomial_response not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_monomial_proof_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "monomial_proof" )

    if error == 0 :
        text = "monomial_proof correctly detected"
    else :
        text = "monomial_proof not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_binomial_key_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "binomial_key" )

    if error == 0 :
        text = "binomial_key correctly detected"
    else :
        text = "binomial_key not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_binomial_commitment_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "binomial_commitment" )

    if error == 0 :
        text = "binomial_commitment correctly detected"
    else :
        text = "binomial_commitment not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_hash_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "hash" )

    if error == 0 :
        text = "hash correctly detected"
    else :
        text = "hash not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_signature_sym_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "signature_sym" )

    if error == 0 :
        text = "signature_sym correctly detected"
    else :
        text = "signature_sym not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_signature_asy_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "signature_asy" )

    if error == 0 :
        text = "signature_asy correctly detected"
    else :
        text = "signature_asy not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_ring_signature_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "ring_signature" )

    if error == 0 :
        text = "ring_signature correctly detected"
    else :
        text = "ring_signature not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_certificate_sym_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "certificate_sym" )

    if error == 0 :
        text = "certificate_sym correctly detected"
    else :
        text = "certificate_sym not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_certificate_asy_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "certificate_asy" )

    if error == 0 :
        text = "certificate_asy correctly detected"
    else :
        text = "certificate_asy not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_sym_encryption_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "sym_encryption" )

    if error == 0 :
        text = "sym_encryption correctly detected"
    else :
        text = "sym_encryption not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_asy_encryption_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "asy_encryption" )

    if error == 0 :
        text = "asy_encryption correctly detected"
    else :
        text = "asy_encryption not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_challenge_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str, architecture ), "challenge" )

    if error == 0 :
        text = "challenge correctly detected"
    else :
        text = "challenge not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_solution_wrapper( file_path, cache_dir, encoding_param_path, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, encoding_param_path, password_str ), "solution" )

    if error == 0 :
        text = "solution correctly detected"
    else :
        text = "solution not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_block_wrapper( file_path, cache_dir, password_str, architecture, popup_obj, row_index ):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, "", password_str ), "block" )

    if error == 0 :
        text = "block correctly detected"
    else :
        text = "block not correctly detected"
    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})
    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def check_encoding_param_wrapper( file_path, cache_dir, password_str , architecture, popup_obj, row_index):

    error, popup_obj = check_if_file_is_readable( file_path, row_index, popup_obj )
    if error == 1 :
        return error, popup_obj

    rnd = str(random.randint(0, 10000))
    error = check_xrn_file_type( load_xrn_json_header ( file_path, cache_dir, "", password_str, architecture ) , "encoding_parameters" )

    if error == 0 :
        text = "encoding_param correctly detected"
    else :
        text = "encoding_param not correctly detected"

    if len(popup_obj[0]["rows"][row_index]["fields"]) <= 2 :
        popup_obj[0]["rows"][row_index]["fields"].append({"type": "archive", "selection": os.path.basename(file_path) + " check " + rnd, "items":[]})

    popup_obj[0]["rows"][row_index]["fields"][2]["items"].append({"display": os.path.basename(file_path) + " check " + rnd, "data": text })
    return error, popup_obj

def rename_final_popup_panel_name( panel_name, popup_obj, error ):

    display_name = panel_name

    if starts_with_inital_tag_number ( display_name ):
        display_name =  remove_initial_tag_number ( display_name )

    if error == 0 :
        popup_obj = append_result_in_archive( popup_obj, "pass", display_name )

    else :                                          
        popup_obj = append_result_in_archive( popup_obj, "fail", display_name )

    return popup_obj

