from typing import Iterable
import codesecurity.tasks.crawler.npm_crawler as npm_crawler
from codesecurity.data.history import HistoryFile 

from dataclasses import dataclass

import os
import shutil

@dataclass
class NpmCrawlerMeta:
    caches_dir:str=""
    origin_dir:str=""
    tgz_dir:str=""
    preprocess_dir:str=""
    hot_package_dir:str=""
    
    @property
    def hot_package_origin(self):
        return os.path.join(self.hot_package_dir,'origin')
    
    @property
    def hot_package_preprocess(self):
        return os.path.join(self.hot_package_dir,'preprocess')
    
    @staticmethod
    def default(base_dir):
        result=NpmCrawlerMeta()
        result.caches_dir=os.path.join(base_dir,'tgz')
        result.origin_dir=os.path.join(base_dir,'origin')
        result.preprocess_dir=os.path.join(base_dir,'preprocess')
        result.tgz_dir=os.path.join(base_dir,'tgz')
        result.hot_package_dir=os.path.join(base_dir,'hot_package')
        return result

class NpmCrawler:
    def __init__(self,crawler_meta:NpmCrawlerMeta):
        self.npm_db=None    
        self.crawler_meta=crawler_meta
        
    def check_dir(self):
        if not os.path.exists(self.crawler_meta.caches_dir):
            os.makedirs(self.crawler_meta.caches_dir)
        if not os.path.exists(self.crawler_meta.origin_dir):
            os.makedirs(self.crawler_meta.origin_dir)
        if not os.path.exists(self.crawler_meta.preprocess_dir):
            os.makedirs(self.crawler_meta.preprocess_dir)
        if not os.path.exists(self.crawler_meta.hot_package_dir):
            os.makedirs(self.crawler_meta.hot_package_dir)
        if not os.path.exists(self.crawler_meta.tgz_dir):
            os.makedirs(self.crawler_meta.tgz_dir)
            
    def crawl_hot_packages(self,k):
        self.check_dir()
        npm_crawler.dep_hot_npm_packages(k,self.crawler_meta.hot_package_origin,self.crawler_meta.hot_package_preprocess)
    
    def crawl_packages(self,packages:Iterable):
        self.check_dir()
        for package in packages:
            if isinstance(package,str):
                package=package.split('@')
                package=[e for e in package if e!='']
            
            #print(package) 
            assert len(package)==2
            package_name,version=package
            
            package_dir=npm_crawler.downlad_and_extract_npm_package(package_name,self.crawler_meta.origin_dir,version)
            if package_dir:
                npm_crawler.extract_npm_code_to_dir(package_dir,f'{self.crawler_meta.preprocess_dir}/{package_name}/{version}')
    
    def crawl_tgzs(self,packages:Iterable,history_file=None):
        
        def init_handle():
            items=[]
            for package in packages:
                if isinstance(package,str):
                    package=package.split('@')
                    package=[e for e in package if e!='']
                
                #print(package) 
                assert len(package)==2
                package_name,version=package
                
                items.append([f'{package_name}@{version}',HistoryFile.Status_Wait])

            return items
                
        self.check_dir()

        history=None
        if history_file:
            history=HistoryFile.regular(history_file,init_handle,auto_update=True)
        
        error_packages=[]
        for i,package in enumerate(packages):
            if history and history.get_status(i) in [HistoryFile.Status_Success,HistoryFile.Status_Corrupt]:
                continue
            
            if isinstance(package,str):
                package=package.split('@')
                package=[e for e in package if e!='']
            
            #print(package) 
            assert len(package)==2
            package_name,version=package
            
            if npm_crawler.download_npm_package(package_name,self.crawler_meta.tgz_dir,version):
                if history:
                    history.set_status(i,HistoryFile.Status_Success)
            else:
                error_packages.append('@'.join(package))
                
        return error_packages
    
    def remove_dir(self,dir):
        if os.path.exists(dir):
            os.system(f'chmod -R 777 {dir}')
            shutil.rmtree(dir)
            return True
        
        return False
    
    def preprocess_tgzs(self,remove_tgz=False,remove_origin=False):
        self.remove_dir(self.crawler_meta.preprocess_dir)
        self.remove_dir(self.crawler_meta.origin_dir)
        self.check_dir()
        for package_tgz in self.iter_tgzs():
            package_name,version=npm_crawler.identify_npm_tgz(package_tgz)
            
            dest_origin_dir=f'{self.crawler_meta.origin_dir}/{package_name}/{version}'
            dest_preprocess_dir=f'{self.crawler_meta.preprocess_dir}/{package_name}/{version}'
            
            # dest_origin_dir=dest_origin_dir.replace('@','A')
            # dest_preprocess_dir=dest_preprocess_dir.replace('@','A')
            
            print(f'preprocess {package_name}@{version} to {dest_preprocess_dir}')
            
            
            
            npm_crawler.extract_npm_package(package_tgz,dest_origin_dir,remove_after_extract=remove_tgz)
            npm_crawler.extract_npm_code_to_dir(dest_origin_dir,dest_preprocess_dir)
            
            #print(dest_origin_dir)
            
            if remove_origin:
                self.remove_dir(dest_origin_dir)
            
    def preprocess_packages(self):
        self.remove_dir(self.crawler_meta.preprocess_dir)
        self.check_dir()
        
        for package_name,version,version_dir in self.iter_packages_origin():
            print(f'preprocess {package_name}@{version} to {self.crawler_meta.preprocess_dir}/{package_name}/{version}')
            npm_crawler.extract_npm_code_to_dir(version_dir,f'{self.crawler_meta.preprocess_dir}/{package_name}/{version}')         
    
    def iter_packages_origin(self):
        self.check_dir()
        return self.iter_packages(self.crawler_meta.origin_dir)
    
    def iter_packages_preprocess(self):
        self.check_dir()
        return self.iter_packages(self.crawler_meta.preprocess_dir)
    
    def iter_tgzs(self):
        for package_tgz in os.listdir(self.crawler_meta.tgz_dir):
            package_tgz=os.path.join(self.crawler_meta.tgz_dir,package_tgz)
            yield package_tgz
    
    def iter_packages(self,target_dir):
        package_dirs=[]
        for package_name in os.listdir(target_dir):
            package_dir=os.path.join(target_dir,package_name)
            if package_name.startswith('@') or package_name.startswith('A'):
                for sub_package_name in os.listdir(package_dir):
                    sub_package_dir=os.path.join(package_dir,sub_package_name)
                    package_dirs.append((f'{package_name}/{sub_package_name}'.replace('A','@'),sub_package_dir))
            else:
                package_dirs.append((package_name,package_dir))
                
        for package_name,package_dir in package_dirs:
            for version,version_dir in self.scan_versions(package_dir):
                yield package_name,version,version_dir
    
    
    def scan_versions(self,package_dir):
        for version in os.listdir(package_dir):
            version_dir=os.path.join(package_dir,version)
            yield version,version_dir
                
    @staticmethod 
    def get_hot_package_list():
       metas=npm_crawler.get_hot_npm_packages(5000)
       
       return metas
    # def crawl_packages(self,k):
    #     npm_crawler.dep_npm_packages(k,self.crawler_meta.origin_dir,self.crawler_meta.preprocess_dir)