import codesecurity.feature.property_graph as property_graph
import codesecurity.feature.objects as code_objects
import codesecurity.feature.api as feature_api
import codesecurity.tasks.common.intelligence_data as intelligence_data
import codesecurity.tasks.common.api_name_word2vec as api_word2vec
import codesecurity.data.caches_manager as caches_manager
import codesecurity.data.api as data_api
import codesecurity.tasks.common.features as task_features

from codesecurity.utils import pretty_print


def get_api_list(file_path:str):
    detect_lang=code_objects.ProgramLanguage.match(file_path).value
    
    ast_object= feature_api.create_ast_obj(feature_api.read_bytes(file_path),detect_lang)
    
    image_builder=property_graph.AstImageBuilder(ast_object)
    
    return property_graph.JsValueDependency.api_call_table(ast_object,image_builder)

def get_api_common_words(api_list:dict):
    words=set()
    for k in api_list:
        if len(k)>64: continue
        tokens=intelligence_data.tokenize_js(k)
        try:
            for t in tokens:
                if len(t)>2 and t[0] not in intelligence_data.StopChars:
                    words.add(t)
        except:
            continue
    return words


def feature_update_api_list(caches_file:str,include_common_words=True,force_update=True,show_progress=True):
    caches=data_api.caches_load(caches_file)
    bar=pretty_print.get_progress_bar(caches.group_number) if show_progress else None
    for i,group in enumerate(caches.iter_group()):
        for obj in group:
            if obj.property_exists(code_objects.CommonFeatureSet.Addon_Api_List) and not force_update:
                continue
            
            image_builder=property_graph.AstImageBuilder(obj.ast_object)
            
            api_table=property_graph.JsValueDependency.api_call_table(obj.ast_object,image_builder)
            obj.add_property(code_objects.CommonFeatureSet.Addon_Api_List,api_table)
            
            if include_common_words:
                obj.add_property(code_objects.CommonFeatureSet.Addon_Common_Api_Word,get_api_common_words(api_table))
        
        caches.update_group(i,group)
        if show_progress:
            bar.update()
        else:
            print(f'group {i} updated finish.')
        
def unuse_memory_release(caches_file:str):
    caches=data_api.caches_load(caches_file)
    for i,group in enumerate(caches.iter_group()):
        for obj in group:
            obj.release()
        caches.update_group(i,group)

def default_feature_file_name(caches_file:str,property_names:list):
    property_list_str=''
    for p in property_names:
        property_list_str+=p+'_'
        
    feature_file=caches_file.replace('.pt',f'{property_list_str}feature.pt')
    return feature_file

def update_api_name_vocab(caches_file:str,overwrite=False):
    
    if overwrite:
        api_word2vec.remove()
    
    obj=data_api.caches_load(caches_file)
    for i,group in enumerate(obj.iter_group()):
        group_sentences=[list(e[2]) for e in group]
        api_word2vec.train(group_sentences)
        print(f'group {i} done. vocab size: {len(api_word2vec.model().wv)}')
        
    api_word2vec.save()

def export_feature(caches_file:str,property_names:list,feature_file=None):
    
    
    if feature_file is None:
        feature_file=default_feature_file_name(caches_file,property_names)
    
    caches=data_api.caches_load(caches_file)
    features=data_api.caches_new(feature_file)
    for obj in caches:
        feature_obj=[obj.path]+[obj.get_property(p) for p in property_names]
        features.add(feature_obj)
        
    features.save()
    
def export_mini_feature(caches_file:str,feature_file=None):
    
    if feature_file is None:
        feature_file=caches_file.replace('.pt','_mini_feature.pt')
    
    caches=data_api.caches_load(caches_file)
    features=data_api.caches_new(feature_file)
    total=len(caches)
    for i,obj in enumerate(caches):
        features.add(task_features.mini_feature(obj))
        pretty_print.lazy_rprint(f'process: {i}/{total}',0.01)
        
    features.save()