import sys
import json


sys.path.append(r'./')

from codesecurity.tasks.crawler.npm_crawler import *
from codesecurity.data.couchdb_connector import *
from codesecurity.data.package_extract import *
from codesecurity.tasks.crawler.crawler_api import NpmCrawler, NpmCrawlerMeta

def test_npm_package_download():
    caches_dir='test_data'

    package_name="lodash"
    all_versions=get_package_all_versions(package_name)

    newest_version=all_versions[-1] if all_versions is not None else None

    download_npm_package(package_name,caches_dir,newest_version)
    
def test_couchdb_operation():
    
    npm_db=NpmMetaDB.default()
    
    meta=npm_db.get('@116common/shared')
    
    print(meta.package_name)
    print(meta.time_meta.created)
    print(meta.time_meta.modified)
    print(meta.time_meta.versions_created)
    
def test_npm_extract():
    target_package='lodash'
    downlad_and_extract_npm_package(target_package,'test_data',version='4.17.21')
    #peek_tgz(target_package)
    # paths=list_npm('test_data/package')
    # for e in paths:
    #     print(e)
        

def test_build_npm_dataset():
    npm_db=NpmMetaDB.default()
    meta=npm_db.list_top_k(500)
    count=0
    #print(len(meta))
    for e in meta:
        package_name=e.package_name
        #if package_name is None or package_name[0]=='-': continue
        versions=e.versions_meta.keys()
        print(package_name)
        print(versions)
        if versions is None: continue
        for version in versions:
            package_dir=downlad_and_extract_npm_package(package_name,'data/js/test_package/origin',version=version)
            if package_dir:
                extract_npm_code_to_dir(package_dir,f'data/js/test_package/preprocessing/{package_name}/{version}')
            print(package_name)
            
        count+=1
        if count>=3:
            break
        
def test_crawler_package_list():
    list_file='test_data/package_between_20231220_20231227.txt'
    npm_db=NpmMetaDB.default()    
    count=-1
    search_number=0
    with open(list_file,'w') as f:
        for meta in npm_db.lastest_docs('2023-12-20','2023-12-27'):
            search_number+=1
            if meta.time_meta is None:
                print(f'{meta.package_name} has no time meta') 
                continue
            iter=meta.time_meta.time_between('2023-12-20','2023-12-27')   
            for v,t in iter:
                f.write(f'{meta.package_name}@{v} {t}\n')
                count+=1
            
            if search_number%1000==0 and search_number>1:
                print(f'In {search_number} packages, find {count} in range versions.')

def test_hot_package_crawler():
    download_hot_npm_packages(100,'/mnt/XiaoweiGuo/data/npm_hot_packages/origin')

def test_npm_crwaler():
    crawler_meta=NpmCrawlerMeta.default('/mnt/XiaoweiGuo/data/npm_packages')
    crawler=NpmCrawler(crawler_meta)
    crawler.crawl_hot_packages(5000)
    
    
def test_download_npm_package_list():
    file='test_data/package_between_20231201_20231231.txt'
    caches_dir='data'
    download_npm_package(package_name,caches_dir,version=version)

def test_latest_view():
    npm_db=NpmMetaDB.default()
    docs=npm_db.lastest_docs('2024-01-01','2024-01-05')
    for doc in docs:
        print(doc.package_name,doc.time_meta.modified)
        
test_latest_view()
#test_npm_extract()
#test_build_npm_dataset()
#test_crawler_package_list()
#test_couchdb_operation()   

#test_hot_package_crawler()

#test_npm_crwaler()