from __future__ import annotations

from codesecurity.feature.tfidf import TfidfModule
from codesecurity.data.api import list_dataset,list_file_in_dir
from codesecurity.tasks.malicious_code_detect.meta import SuperParameter_MCD
from codesecurity.data.api import GroupPipe,pickle_load
from codesecurity.tasks.malicious_code_detect.objects import MaliciousDataset
from codesecurity.tasks.malicious_code_detect.model import BasicModel
from codesecurity.nn.api import train_model,eval_model


import os
import torch
import torch.utils.data as torchdata

def list_2class(dataset_dir):
    good_dir=os.path.join(dataset_dir,'good')
    bad_dir=os.path.join(dataset_dir,'bad')

    return [(good_dir,'good'),(bad_dir,'bad')]

                                                     
class NetworkModule:
    def __init__(self,sp:SuperParameter_MCD,device) -> None:
        self.model=BasicModel(sp,device)
        self.batch_size=64
        self.device=device
        
    def train_kfold(self,dataset:torchdata.Dataset,k=10,seed=42):
        test_number=len(dataset)//k
        train_number=len(dataset)-test_number
        training_data,test_data=torchdata.random_split(dataset,[train_number,test_number],generator=torch.Generator().manual_seed(seed))
        
        training_data_loader=torchdata.DataLoader(training_data,batch_size=self.batch_size)
        test_data_loader=torchdata.DataLoader(test_data,batch_size=self.batch_size)
        
        train_model(self.model,training_data_loader,test_data_loader,device=self.device)

    def train(self,training_data,test_data):
        training_data_loader=torchdata.DataLoader(training_data,batch_size=self.batch_size)
        test_data_loader=torchdata.DataLoader(test_data,batch_size=self.batch_size)
        
        train_model(self.model,training_data_loader,test_data_loader,device=self.device)

    def eval(self,dataset:torchdata.Dataset):
        test_data_loader=torchdata.DataLoader(dataset,batch_size=self.batch_size)
        
        return eval_model(self.model,test_data_loader,self.device)
        
class PreprocessingModule:
    def __init__(self,caches_dir,sp:SuperParameter_MCD,module_name="") -> None:
        
        if module_name == "":
            module_name='data'
        
        self.feature_builder=MaliciousFeatureBuilder(sp)
        self.module_name=module_name
        self.caches_dir=caches_dir
        
        self.submodules2name={}
        self.name2submodules={}
    
    def load(self):
        return torch.load(self.get_features_file())
        
    def build_features(self,*dataset_dirs):
        
        features=[]
        
        for dataset_dir in dataset_dirs:
            caches_head=self.get_dataset_caches_head_file(dataset_dir)
            raw_feature_pipe:GroupPipe=pickle_load(caches_head)
            for group in raw_feature_pipe.iter_group():
                for raw_feature in group:
                    features.append(self.feature_builder.build_basic_feature(raw_feature,self.lexical_submodule))

        dataset=MaliciousDataset(features)
        torch.save(dataset,self.get_features_file())

        return dataset

    def build_raw_features(self,*dataset_dirs):
        for dataset_dir in dataset_dirs:
            caches_head=self.get_dataset_caches_head_file(dataset_dir)
            
            pipe=GroupPipe(caches_head,250)
            
            file_pairs=list_dataset(dataset_dir,list_2class)
            for files,label in file_pairs:
                for file in files:
                    raw_feature=self.feature_builder.build_raw_basic_feature(file,label)
                    pipe.add_group(raw_feature)
    
            pipe.save(self.feature_builder)
    
    def build_submodules(self,*dataset_dirs):
        lexical_submodule=TfidfModule()
        syntactic_submodule=TfidfModule()
        for dataset_dir in dataset_dirs:
            caches_head=self.get_dataset_caches_head_file(dataset_dir)
            pipe:GroupPipe=pickle_load(caches_head)

            for group in pipe.iter_group():
                for raw_feature in group:
                    lexical_submodule.add_documents(raw_feature.tokens)
                    syntactic_submodule.add_documents(self.feature_builder.ast2bag(raw_feature.ast))

        lexical_submodule.build()
        syntactic_submodule.build()
        
        self.add_submodule(lexical_submodule,'lexical_submodule')
        self.add_submodule(syntactic_submodule,'syntactic_submodule')
        
        lexical_submodule.save(self.get_submodule_caches_file('lexical_submodule'))    
        syntactic_submodule.save(self.get_submodule_caches_file('syntactic_submodule'))
        
    def add_submodule(self,submodule,name):
        self.submodules2name[submodule]=name
        self.name2submodules[name]=submodule
                    
    def get_dataset_caches_dir(self,dataset_dir):
        caches_dir=os.path.join(dataset_dir,'caches',self.module_name)
        os.makedirs(caches_dir,exist_ok=True)
        return caches_dir
    
    def get_submodule_caches_file(self,name):        
        return os.path.join(self.caches_dir,f'submodule_{name}.pt')
    
    def get_features_file(self):
        return os.path.join(self.caches_dir,'')
    
    def get_dataset_caches_head_file(self,dataset_dir):
        dataset_caches_dir=self.get_dataset_caches_dir(dataset_dir)
        
        return os.path.join(dataset_caches_dir,f'{self.module_name}.pt')

    @property
    def lexical_submodule(self):
        name='lexical_submodule'
        
        if name not in self.name2submodules:
            path=self.get_submodule_caches_file(name)
            obj=pickle_load(path)
            self.add_submodule(obj,name)
            
        return self.name2submodules[name]
    
    @property
    def syntactic_submodule(self):
        name='syntactic_submodule'
        
        if name not in self.name2submodules:
            path=self.get_submodule_caches_file(name)
            obj=pickle_load(path)
            self.add_submodule(obj,name)
            
        return self.name2submodules[name]