#!/usr/bin/env python
# coding=utf-8

from sys import argv
from cve.DebianPackageCrawler import DebianPackageCrawler
from multiprocessing.dummy import Pool as thread_pool
from cve.log_helper import logger
from cve.CveUtils import ParseCommitLog
import time

def parse_arg():
    k = 1
    target = None
    while k<len(argv):
        if argv[k]=="--DebianCrawler":
            target =  "DebianCrawler"
            k+=1
            return dict(target=target)
        elif argv[k]=="--CrawlGitUrl":
            target = "CrawlGitUrl"
            package = argv[k+1]
            k+=2
            return dict(target=target, package=package)
        elif argv[k]=="--insertUrlData":
            target = "insertUrlData"
            k+=1
            return dict(target=target)
        elif argv[k]=="--extractGitCloneUrl":
            target = "extractGitCloneUrl"
            k+=1
            return dict(target=target)
        elif argv[k] == "--extractCveFromCommitLog":
            target = "extractCveFromCommitLog"
            repo = argv[k+1]
            k+=2
            return dict(target=target,repo=repo)
        elif argv[k] == "--getRepoName":
            target = "getRepoName"
            repo = argv[k+1]
            k+=2
            return dict(target=target,repo=repo)


def DebianCrawler():
    dpc = DebianPackageCrawler()
    dpc.start()

def main():
    args = parse_arg()
    if args["target"] == "insertUrlData":
        from UrlData import insertUrlDataIntoDb
        insertUrlDataIntoDb()
    elif args["target"] == "extractGitCloneUrl":
        from UrlData import extractGitCloneUrl
        extractGitCloneUrl()
    elif args["target"] == "DebianCrawler":
        DebianCrawler()
    elif args["target"] == "CrawlGitUrl":
        import os.path as path
        pkg = args["package"]
        logger.info("Crawling {}".format(pkg))

        if not path.isfile("output/DebianCrawler/{}".format(pkg)):
            dpc = DebianPackageCrawler()
            timeout = False
            start = time.clock()
            try:
                result = dpc.crawlGitUrl(pkg)
            except StopIteration:
                timeout = True
            elapsed = (time.clock() - start)


            with open("output/DebianCrawler/{}".format(pkg), "w") as f:
                if not timeout:
                    f.write("{}\n".format(elapsed))
                else:
                    f.write("timeout\n")

                if result is not None:
                    for url in result:
                        f.write("{}\n".format(url))
    elif args["target"] == "extractCveFromCommitLog":
        repo = args["repo"]
        cve_list = ParseCommitLog(repo)
        for x in cve_list:
            print x
    elif args["target"] == "getRepoName":
        repo = args["repo"]
        from UrlData import getRepoName
        print getRepoName(repo)

        
        

if __name__=="__main__":
    main()