#!/usr/bin/env python3

import sys
import os.path
import threading

from args import parse_args
from utils import *
from modules import *
from constants import *



def run(options):
    logger = Logger('knownsec-spider', options.level)
    logger.add_stream()
    if options.logfile:
        logfile = os.path.join(DIR, options.logfile)
        logger.add_stream(logfile)
    SpiderBase._defaultlogger = logger

    core = Core(options.depth)
    dld = Downloader(core, TIME_OUT)
    ppl = Pipeline(core, options.dbfile, options.key)

    # continue submit the download task with a daemon
    pool = ThreadPool(options.nthreads, 8 * options.nthreads)
    def _submit():
        while True:
            pool.submit(dld.work)
    st = threading.Thread(target=_submit)
    st.daemon = True
    st.start()

    core.put_requests([(1, options.url)])
    core.add_timer(AUTO_LOG_INTERVAL)
    logger.debug('start point added up %s', options.url)
    # import time; time.sleep(TIME_OUT); time.sleep(TIME_OUT)
    while True:
        ppl.pipeline()
        if core.is_finished():
            break
    logger.info('processed %d urls in total' % len(core.done))


def testself(options):
    pass



if __name__ == '__main__':
    opts = parse_args(sys.argv[1:])
    try:
        run(opts)
    except KeyboardInterrupt:
        print('\rBye~')
        sys.exit(0)
